From 776c8188820b93881d8a05d2223bbe25746a97e6 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Wed, 10 Dec 2025 06:35:11 +0200 Subject: [PATCH 1/3] Lay the basics for non-`Copy` solver types with GC That means stop using Salsa for interning solver types. --- Cargo.lock | 1 + crates/hir-def/src/nameres/attr_resolution.rs | 4 +- crates/hir-def/src/nameres/collector.rs | 2 +- crates/hir-ty/src/consteval.rs | 149 ++-- crates/hir-ty/src/consteval/tests.rs | 10 +- crates/hir-ty/src/db.rs | 130 ++-- crates/hir-ty/src/diagnostics/expr.rs | 12 +- crates/hir-ty/src/diagnostics/match_check.rs | 17 +- .../diagnostics/match_check/pat_analysis.rs | 4 +- crates/hir-ty/src/diagnostics/unsafe_check.rs | 22 +- crates/hir-ty/src/display.rs | 50 +- crates/hir-ty/src/drop.rs | 6 +- crates/hir-ty/src/dyn_compatibility.rs | 3 +- crates/hir-ty/src/infer.rs | 337 ++++----- crates/hir-ty/src/infer/autoderef.rs | 7 +- crates/hir-ty/src/infer/cast.rs | 20 +- crates/hir-ty/src/infer/closure/analysis.rs | 88 +-- crates/hir-ty/src/infer/coerce.rs | 64 +- crates/hir-ty/src/infer/diagnostics.rs | 16 +- crates/hir-ty/src/infer/expr.rs | 88 +-- crates/hir-ty/src/infer/mutability.rs | 4 +- crates/hir-ty/src/infer/op.rs | 4 +- crates/hir-ty/src/infer/opaques.rs | 4 +- crates/hir-ty/src/infer/pat.rs | 35 +- crates/hir-ty/src/infer/path.rs | 2 +- crates/hir-ty/src/infer/place_op.rs | 15 +- crates/hir-ty/src/infer/unify.rs | 3 +- crates/hir-ty/src/inhabitedness.rs | 2 +- crates/hir-ty/src/layout.rs | 68 +- crates/hir-ty/src/layout/adt.rs | 24 +- crates/hir-ty/src/layout/tests.rs | 6 +- crates/hir-ty/src/lib.rs | 19 +- crates/hir-ty/src/lower.rs | 688 ++++++++++-------- crates/hir-ty/src/lower/path.rs | 14 +- crates/hir-ty/src/method_resolution.rs | 2 +- .../hir-ty/src/method_resolution/confirm.rs | 22 +- crates/hir-ty/src/method_resolution/probe.rs | 2 +- crates/hir-ty/src/mir.rs | 261 ++++--- crates/hir-ty/src/mir/borrowck.rs | 123 ++-- crates/hir-ty/src/mir/eval.rs | 301 ++++---- crates/hir-ty/src/mir/eval/shim.rs | 31 +- crates/hir-ty/src/mir/eval/shim/simd.rs | 3 +- crates/hir-ty/src/mir/eval/tests.rs | 9 +- crates/hir-ty/src/mir/lower.rs | 327 ++++----- crates/hir-ty/src/mir/lower/as_place.rs | 54 +- .../hir-ty/src/mir/lower/pattern_matching.rs | 115 +-- crates/hir-ty/src/mir/monomorphization.rs | 106 +-- crates/hir-ty/src/mir/pretty.rs | 48 +- crates/hir-ty/src/next_solver.rs | 2 + crates/hir-ty/src/next_solver/binder.rs | 83 +++ crates/hir-ty/src/next_solver/consts.rs | 115 ++- crates/hir-ty/src/next_solver/fulfill.rs | 2 +- .../hir-ty/src/next_solver/fulfill/errors.rs | 2 +- crates/hir-ty/src/next_solver/generic_arg.rs | 357 +++++++-- crates/hir-ty/src/next_solver/infer/at.rs | 2 +- .../infer/canonical/canonicalizer.rs | 4 +- .../infer/canonical/instantiate.rs | 2 +- .../next_solver/infer/outlives/obligations.rs | 10 +- crates/hir-ty/src/next_solver/interner.rs | 430 ++++++----- crates/hir-ty/src/next_solver/ir_print.rs | 1 - crates/hir-ty/src/next_solver/opaques.rs | 59 +- crates/hir-ty/src/next_solver/predicate.rs | 285 ++++---- crates/hir-ty/src/next_solver/region.rs | 66 +- crates/hir-ty/src/next_solver/solver.rs | 2 +- crates/hir-ty/src/next_solver/ty.rs | 88 ++- crates/hir-ty/src/opaques.rs | 44 +- crates/hir-ty/src/specialization.rs | 1 - crates/hir-ty/src/tests.rs | 26 +- crates/hir-ty/src/tests/closure_captures.rs | 1 + crates/hir-ty/src/tests/incremental.rs | 100 +-- crates/hir-ty/src/traits.rs | 29 +- crates/hir-ty/src/variance.rs | 58 +- crates/hir/src/diagnostics.rs | 21 +- crates/hir/src/lib.rs | 86 ++- crates/hir/src/semantics.rs | 7 +- crates/hir/src/source_analyzer.rs | 101 ++- crates/ide/src/inlay_hints/implicit_drop.rs | 2 +- crates/ide/src/interpret.rs | 2 +- crates/intern/Cargo.toml | 1 + crates/intern/src/intern.rs | 331 +++++++++ crates/intern/src/intern_slice.rs | 351 +++++++++ crates/intern/src/lib.rs | 221 +----- crates/macros/src/lib.rs | 30 + crates/query-group-macro/src/queries.rs | 3 +- .../rust-analyzer/src/cli/analysis_stats.rs | 33 +- 85 files changed, 3772 insertions(+), 2508 deletions(-) create mode 100644 crates/hir-ty/src/next_solver/binder.rs create mode 100644 crates/intern/src/intern.rs create mode 100644 crates/intern/src/intern_slice.rs diff --git a/Cargo.lock b/Cargo.lock index 70611dd1311b..081c7d66f1d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1215,6 +1215,7 @@ dependencies = [ "dashmap", "hashbrown 0.14.5", "rustc-hash 2.1.1", + "smallvec", "triomphe", ] diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs index ec05c02bd635..1cbd2c10b5a1 100644 --- a/crates/hir-def/src/nameres/attr_resolution.rs +++ b/crates/hir-def/src/nameres/attr_resolution.rs @@ -63,7 +63,7 @@ impl DefMap { return Ok(ResolvedAttr::Other); } } - None => return Err(UnresolvedMacro { path: ast_id.path.as_ref().clone() }), + None => return Err(UnresolvedMacro { path: (*ast_id.path).clone() }), }; Ok(ResolvedAttr::Macro(attr_macro_as_call_id( @@ -145,7 +145,7 @@ pub(super) fn derive_macro_as_call_id( ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { let (macro_id, def_id) = resolver(&item_attr.path) .filter(|(_, def_id)| def_id.is_derive()) - .ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?; + .ok_or_else(|| UnresolvedMacro { path: (*item_attr.path).clone() })?; let call_id = def_id.make_call( db, krate, diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 10581378bef9..7e1ec526a7bc 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -1675,7 +1675,7 @@ impl<'db> DefCollector<'db> { derive_index: *derive_pos as u32, derive_macro_id: *derive_macro_id, }, - ast_id.path.as_ref().clone(), + (*ast_id.path).clone(), )); } // These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 32c207b189f0..f11240e0f78c 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -23,8 +23,9 @@ use crate::{ mir::{MirEvalError, MirLowerError}, next_solver::{ Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, - ParamEnv, Ty, ValueConst, + ParamEnv, StoredConst, StoredGenericArgs, Ty, ValueConst, }, + traits::StoredParamEnvAndCrate, }; use super::mir::{interpret_mir, lower_to_mir, pad16}; @@ -38,12 +39,12 @@ pub fn unknown_const_as_generic<'db>(ty: Ty<'db>) -> GenericArg<'db> { } #[derive(Debug, Clone, PartialEq, Eq)] -pub enum ConstEvalError<'db> { - MirLowerError(MirLowerError<'db>), - MirEvalError(MirEvalError<'db>), +pub enum ConstEvalError { + MirLowerError(MirLowerError), + MirEvalError(MirEvalError), } -impl ConstEvalError<'_> { +impl ConstEvalError { pub fn pretty_print( &self, f: &mut String, @@ -62,8 +63,8 @@ impl ConstEvalError<'_> { } } -impl<'db> From> for ConstEvalError<'db> { - fn from(value: MirLowerError<'db>) -> Self { +impl From for ConstEvalError { + fn from(value: MirLowerError) -> Self { match value { MirLowerError::ConstEvalError(_, e) => *e, _ => ConstEvalError::MirLowerError(value), @@ -71,8 +72,8 @@ impl<'db> From> for ConstEvalError<'db> { } } -impl<'db> From> for ConstEvalError<'db> { - fn from(value: MirEvalError<'db>) -> Self { +impl From for ConstEvalError { + fn from(value: MirEvalError) -> Self { ConstEvalError::MirEvalError(value) } } @@ -85,7 +86,8 @@ pub fn intern_const_ref<'a>( krate: Crate, ) -> Const<'a> { let interner = DbInterner::new_no_crate(db); - let layout = db.layout_of_ty(ty, ParamEnvAndCrate { param_env: ParamEnv::empty(), krate }); + let layout = db + .layout_of_ty(ty.store(), ParamEnvAndCrate { param_env: ParamEnv::empty(), krate }.store()); let kind = match value { LiteralConstRef::Int(i) => { // FIXME: We should handle failure of layout better. @@ -180,10 +182,10 @@ pub fn try_const_isize<'db>(db: &'db dyn HirDatabase, c: &Const<'db>) -> Option< } } -pub(crate) fn const_eval_discriminant_variant<'db>( - db: &'db dyn HirDatabase, +pub(crate) fn const_eval_discriminant_variant( + db: &dyn HirDatabase, variant_id: EnumVariantId, -) -> Result> { +) -> Result { let interner = DbInterner::new_no_crate(db); let def = variant_id.into(); let body = db.body(def); @@ -206,8 +208,9 @@ pub(crate) fn const_eval_discriminant_variant<'db>( let mir_body = db.monomorphized_mir_body( def, - GenericArgs::new_from_iter(interner, []), - ParamEnvAndCrate { param_env: db.trait_environment_for_body(def), krate: def.krate(db) }, + GenericArgs::empty(interner).store(), + ParamEnvAndCrate { param_env: db.trait_environment_for_body(def), krate: def.krate(db) } + .store(), )?; let c = interpret_mir(db, mir_body, false, None)?.0?; let c = if is_signed { @@ -233,7 +236,7 @@ pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'd } if has_closure(ctx.body, expr) { // Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic. - return unknown_const(infer[expr]); + return Const::error(ctx.interner()); } if let Expr::Path(p) = &ctx.body[expr] { let mut ctx = TyLoweringContext::new( @@ -252,63 +255,89 @@ pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'd { return result; } - unknown_const(infer[expr]) + Const::error(ctx.interner()) } -pub(crate) fn const_eval_cycle_result<'db>( - _: &'db dyn HirDatabase, - _: salsa::Id, - _: ConstId, - _: GenericArgs<'db>, - _: Option>, -) -> Result, ConstEvalError<'db>> { - Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) -} - -pub(crate) fn const_eval_static_cycle_result<'db>( - _: &'db dyn HirDatabase, - _: salsa::Id, - _: StaticId, -) -> Result, ConstEvalError<'db>> { - Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) -} - -pub(crate) fn const_eval_discriminant_cycle_result<'db>( - _: &'db dyn HirDatabase, +pub(crate) fn const_eval_discriminant_cycle_result( + _: &dyn HirDatabase, _: salsa::Id, _: EnumVariantId, -) -> Result> { +) -> Result { Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) } -pub(crate) fn const_eval_query<'db>( +pub(crate) fn const_eval<'db>( db: &'db dyn HirDatabase, def: ConstId, subst: GenericArgs<'db>, trait_env: Option>, -) -> Result, ConstEvalError<'db>> { - let body = db.monomorphized_mir_body( - def.into(), - subst, - ParamEnvAndCrate { param_env: db.trait_environment(def.into()), krate: def.krate(db) }, - )?; - let c = interpret_mir(db, body, false, trait_env)?.0?; - Ok(c) +) -> Result, ConstEvalError> { + return match const_eval_query(db, def, subst.store(), trait_env.map(|env| env.store())) { + Ok(konst) => Ok(konst.as_ref()), + Err(err) => Err(err.clone()), + }; + + #[salsa::tracked(returns(ref), cycle_result = const_eval_cycle_result)] + pub(crate) fn const_eval_query<'db>( + db: &'db dyn HirDatabase, + def: ConstId, + subst: StoredGenericArgs, + trait_env: Option, + ) -> Result { + let body = db.monomorphized_mir_body( + def.into(), + subst, + ParamEnvAndCrate { param_env: db.trait_environment(def.into()), krate: def.krate(db) } + .store(), + )?; + let c = interpret_mir(db, body, false, trait_env.as_ref().map(|env| env.as_ref()))?.0?; + Ok(c.store()) + } + + pub(crate) fn const_eval_cycle_result( + _: &dyn HirDatabase, + _: salsa::Id, + _: ConstId, + _: StoredGenericArgs, + _: Option, + ) -> Result { + Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) + } } -pub(crate) fn const_eval_static_query<'db>( +pub(crate) fn const_eval_static<'db>( db: &'db dyn HirDatabase, def: StaticId, -) -> Result, ConstEvalError<'db>> { - let interner = DbInterner::new_no_crate(db); - let body = db.monomorphized_mir_body( - def.into(), - GenericArgs::new_from_iter(interner, []), - ParamEnvAndCrate { - param_env: db.trait_environment_for_body(def.into()), - krate: def.krate(db), - }, - )?; - let c = interpret_mir(db, body, false, None)?.0?; - Ok(c) +) -> Result, ConstEvalError> { + return match const_eval_static_query(db, def) { + Ok(konst) => Ok(konst.as_ref()), + Err(err) => Err(err.clone()), + }; + + #[salsa::tracked(returns(ref), cycle_result = const_eval_static_cycle_result)] + pub(crate) fn const_eval_static_query<'db>( + db: &'db dyn HirDatabase, + def: StaticId, + ) -> Result { + let interner = DbInterner::new_no_crate(db); + let body = db.monomorphized_mir_body( + def.into(), + GenericArgs::empty(interner).store(), + ParamEnvAndCrate { + param_env: db.trait_environment_for_body(def.into()), + krate: def.krate(db), + } + .store(), + )?; + let c = interpret_mir(db, body, false, None)?.0?; + Ok(c.store()) + } + + pub(crate) fn const_eval_static_cycle_result( + _: &dyn HirDatabase, + _: salsa::Id, + _: StaticId, + ) -> Result { + Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) + } } diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 2dc937d76031..8816e13ba7b6 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -27,7 +27,7 @@ use super::{ mod intrinsics; -fn simplify(e: ConstEvalError<'_>) -> ConstEvalError<'_> { +fn simplify(e: ConstEvalError) -> ConstEvalError { match e { ConstEvalError::MirEvalError(MirEvalError::InFunction(e, _)) => { simplify(ConstEvalError::MirEvalError(*e)) @@ -39,7 +39,7 @@ fn simplify(e: ConstEvalError<'_>) -> ConstEvalError<'_> { #[track_caller] fn check_fail( #[rust_analyzer::rust_fixture] ra_fixture: &str, - error: impl FnOnce(ConstEvalError<'_>) -> bool, + error: impl FnOnce(ConstEvalError) -> bool, ) { let (db, file_id) = TestDB::with_single_file(ra_fixture); crate::attach_db(&db, || match eval_goal(&db, file_id) { @@ -104,7 +104,7 @@ fn check_answer( }); } -fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String { +fn pretty_print_err(e: ConstEvalError, db: &TestDB) -> String { let mut err = String::new(); let span_formatter = |file, range| format!("{file:?} {range:?}"); let display_target = @@ -121,7 +121,7 @@ fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String { err } -fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result, ConstEvalError<'_>> { +fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result, ConstEvalError> { let _tracing = setup_tracing(); let interner = DbInterner::new_no_crate(db); let module_id = db.module_for_file(file_id.file_id(db)); @@ -142,7 +142,7 @@ fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result, ConstEv _ => None, }) .expect("No const named GOAL found in the test"); - db.const_eval(const_id, GenericArgs::new_from_iter(interner, []), None) + db.const_eval(const_id, GenericArgs::empty(interner), None) } #[test] diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 47bb6675ea48..f0f65eedbce9 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -19,9 +19,10 @@ use crate::{ lower::{Diagnostics, GenericDefaults}, mir::{BorrowckResult, MirBody, MirLowerError}, next_solver::{ - Const, EarlyBinder, GenericArgs, ParamEnv, PolyFnSig, TraitRef, Ty, VariancesOf, + Const, EarlyBinder, GenericArgs, ParamEnv, PolyFnSig, StoredEarlyBinder, StoredGenericArgs, + StoredTy, TraitRef, Ty, VariancesOf, }, - traits::ParamEnvAndCrate, + traits::{ParamEnvAndCrate, StoredParamEnvAndCrate}, }; #[query_group::query_group] @@ -32,60 +33,48 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { // and `monomorphized_mir_body_for_closure` into `monomorphized_mir_body` #[salsa::invoke(crate::mir::mir_body_query)] #[salsa::cycle(cycle_result = crate::mir::mir_body_cycle_result)] - fn mir_body<'db>( - &'db self, - def: DefWithBodyId, - ) -> Result>, MirLowerError<'db>>; + fn mir_body(&self, def: DefWithBodyId) -> Result, MirLowerError>; #[salsa::invoke(crate::mir::mir_body_for_closure_query)] - fn mir_body_for_closure<'db>( - &'db self, - def: InternedClosureId, - ) -> Result>, MirLowerError<'db>>; + fn mir_body_for_closure(&self, def: InternedClosureId) -> Result, MirLowerError>; #[salsa::invoke(crate::mir::monomorphized_mir_body_query)] #[salsa::cycle(cycle_result = crate::mir::monomorphized_mir_body_cycle_result)] - fn monomorphized_mir_body<'db>( - &'db self, + fn monomorphized_mir_body( + &self, def: DefWithBodyId, - subst: GenericArgs<'db>, - env: ParamEnvAndCrate<'db>, - ) -> Result>, MirLowerError<'db>>; + subst: StoredGenericArgs, + env: StoredParamEnvAndCrate, + ) -> Result, MirLowerError>; #[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)] - fn monomorphized_mir_body_for_closure<'db>( - &'db self, + fn monomorphized_mir_body_for_closure( + &self, def: InternedClosureId, - subst: GenericArgs<'db>, - env: ParamEnvAndCrate<'db>, - ) -> Result>, MirLowerError<'db>>; + subst: StoredGenericArgs, + env: StoredParamEnvAndCrate, + ) -> Result, MirLowerError>; #[salsa::invoke(crate::mir::borrowck_query)] #[salsa::lru(2024)] - fn borrowck<'db>( - &'db self, - def: DefWithBodyId, - ) -> Result]>, MirLowerError<'db>>; + fn borrowck(&self, def: DefWithBodyId) -> Result, MirLowerError>; - #[salsa::invoke(crate::consteval::const_eval_query)] - #[salsa::cycle(cycle_result = crate::consteval::const_eval_cycle_result)] + #[salsa::invoke(crate::consteval::const_eval)] + #[salsa::transparent] fn const_eval<'db>( &'db self, def: ConstId, subst: GenericArgs<'db>, trait_env: Option>, - ) -> Result, ConstEvalError<'db>>; + ) -> Result, ConstEvalError>; - #[salsa::invoke(crate::consteval::const_eval_static_query)] - #[salsa::cycle(cycle_result = crate::consteval::const_eval_static_cycle_result)] - fn const_eval_static<'db>(&'db self, def: StaticId) -> Result, ConstEvalError<'db>>; + #[salsa::invoke(crate::consteval::const_eval_static)] + #[salsa::transparent] + fn const_eval_static<'db>(&'db self, def: StaticId) -> Result, ConstEvalError>; #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)] #[salsa::cycle(cycle_result = crate::consteval::const_eval_discriminant_cycle_result)] - fn const_eval_discriminant<'db>( - &'db self, - def: EnumVariantId, - ) -> Result>; + fn const_eval_discriminant(&self, def: EnumVariantId) -> Result; #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] #[salsa::transparent] @@ -100,19 +89,19 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::invoke(crate::layout::layout_of_adt_query)] #[salsa::cycle(cycle_result = crate::layout::layout_of_adt_cycle_result)] - fn layout_of_adt<'db>( - &'db self, + fn layout_of_adt( + &self, def: AdtId, - args: GenericArgs<'db>, - trait_env: ParamEnvAndCrate<'db>, + args: StoredGenericArgs, + trait_env: StoredParamEnvAndCrate, ) -> Result, LayoutError>; #[salsa::invoke(crate::layout::layout_of_ty_query)] #[salsa::cycle(cycle_result = crate::layout::layout_of_ty_cycle_result)] - fn layout_of_ty<'db>( - &'db self, - ty: Ty<'db>, - env: ParamEnvAndCrate<'db>, + fn layout_of_ty( + &self, + ty: StoredTy, + env: StoredParamEnvAndCrate, ) -> Result, LayoutError>; #[salsa::invoke(crate::layout::target_data_layout_query)] @@ -125,8 +114,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::transparent] fn ty<'db>(&'db self, def: TyDefId) -> EarlyBinder<'db, Ty<'db>>; - #[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower::type_for_type_alias_with_diagnostics_cycle_result)] + #[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics)] + #[salsa::transparent] fn type_for_type_alias_with_diagnostics<'db>( &'db self, def: TypeAliasId, @@ -134,11 +123,12 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is /// a `StructId` or `EnumVariantId` with a record constructor. - #[salsa::invoke(crate::lower::value_ty_query)] + #[salsa::invoke(crate::lower::value_ty)] + #[salsa::transparent] fn value_ty<'db>(&'db self, def: ValueTyDefId) -> Option>>; - #[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower::impl_self_ty_with_diagnostics_cycle_result)] + #[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics)] + #[salsa::transparent] fn impl_self_ty_with_diagnostics<'db>( &'db self, def: ImplId, @@ -148,9 +138,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::transparent] fn impl_self_ty<'db>(&'db self, def: ImplId) -> EarlyBinder<'db, Ty<'db>>; - // FIXME: Make this a non-interned query. - #[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower::const_param_ty_with_diagnostics_cycle_result)] + #[salsa::invoke(crate::lower::const_param_ty_with_diagnostics)] + #[salsa::transparent] fn const_param_ty_with_diagnostics<'db>(&'db self, def: ConstParamId) -> (Ty<'db>, Diagnostics); @@ -158,7 +147,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::transparent] fn const_param_ty_ns<'db>(&'db self, def: ConstParamId) -> Ty<'db>; - #[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)] + #[salsa::invoke(crate::lower::impl_trait_with_diagnostics)] + #[salsa::transparent] fn impl_trait_with_diagnostics<'db>( &'db self, def: ImplId, @@ -169,19 +159,18 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { fn impl_trait<'db>(&'db self, def: ImplId) -> Option>>; #[salsa::invoke(crate::lower::field_types_with_diagnostics_query)] - fn field_types_with_diagnostics<'db>( - &'db self, + #[salsa::transparent] + fn field_types_with_diagnostics( + &self, var: VariantId, - ) -> (Arc>>>, Diagnostics); + ) -> &(ArenaMap>, Diagnostics); #[salsa::invoke(crate::lower::field_types_query)] #[salsa::transparent] - fn field_types<'db>( - &'db self, - var: VariantId, - ) -> Arc>>>; + fn field_types(&self, var: VariantId) -> &ArenaMap>; - #[salsa::invoke(crate::lower::callable_item_signature_query)] + #[salsa::invoke(crate::lower::callable_item_signature)] + #[salsa::transparent] fn callable_item_signature<'db>( &'db self, def: CallableDefId, @@ -191,26 +180,27 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::transparent] fn trait_environment_for_body<'db>(&'db self, def: DefWithBodyId) -> ParamEnv<'db>; - #[salsa::invoke(crate::lower::trait_environment_query)] + #[salsa::invoke(crate::lower::trait_environment)] + #[salsa::transparent] fn trait_environment<'db>(&'db self, def: GenericDefId) -> ParamEnv<'db>; #[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)] #[salsa::cycle(cycle_result = crate::lower::generic_defaults_with_diagnostics_cycle_result)] - fn generic_defaults_with_diagnostics<'db>( - &'db self, + fn generic_defaults_with_diagnostics( + &self, def: GenericDefId, - ) -> (GenericDefaults<'db>, Diagnostics); + ) -> (GenericDefaults, Diagnostics); /// This returns an empty list if no parameter has default. /// /// The binders of the returned defaults are only up to (not including) this parameter. #[salsa::invoke(crate::lower::generic_defaults_query)] #[salsa::transparent] - fn generic_defaults<'db>(&'db self, def: GenericDefId) -> GenericDefaults<'db>; + fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults; // Interned IDs for solver integration #[salsa::interned] - fn intern_impl_trait_id(&self, id: ImplTraitId<'_>) -> InternedOpaqueTyId; + fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId; #[salsa::interned] fn intern_closure(&self, id: InternedClosure) -> InternedClosureId; @@ -219,11 +209,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId; #[salsa::invoke(crate::variance::variances_of)] - #[salsa::cycle( - // cycle_fn = crate::variance::variances_of_cycle_fn, - // cycle_initial = crate::variance::variances_of_cycle_initial, - cycle_result = crate::variance::variances_of_cycle_initial, - )] + #[salsa::transparent] fn variances_of<'db>(&'db self, def: GenericDefId) -> VariancesOf<'db>; } @@ -245,10 +231,10 @@ pub struct InternedConstParamId { pub loc: ConstParamId, } -#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX, unsafe(non_update_types))] +#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] #[derive(PartialOrd, Ord)] pub struct InternedOpaqueTyId { - pub loc: ImplTraitId<'db>, + pub loc: ImplTraitId, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 0de7fab8d1b2..dd1fc3b36ef8 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -99,7 +99,7 @@ impl BodyValidationDiagnostic { struct ExprValidator<'db> { owner: DefWithBodyId, body: Arc, - infer: &'db InferenceResult<'db>, + infer: &'db InferenceResult, env: ParamEnv<'db>, diagnostics: Vec, validate_lints: bool, @@ -313,7 +313,7 @@ impl<'db> ExprValidator<'db> { ); value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_))) } - Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind() { + Expr::Field { expr, .. } => match self.infer.expr_ty(*expr).kind() { TyKind::Adt(adt, ..) if matches!(adt.def_id().0, AdtId::UnionId(_)) => false, _ => self.is_known_valid_scrutinee(*expr), }, @@ -554,7 +554,7 @@ impl<'db> FilterMapNextChecker<'db> { pub fn record_literal_missing_fields( db: &dyn HirDatabase, - infer: &InferenceResult<'_>, + infer: &InferenceResult, id: ExprId, expr: &Expr, ) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { @@ -584,7 +584,7 @@ pub fn record_literal_missing_fields( pub fn record_pattern_missing_fields( db: &dyn HirDatabase, - infer: &InferenceResult<'_>, + infer: &InferenceResult, id: PatId, pat: &Pat, ) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { @@ -612,8 +612,8 @@ pub fn record_pattern_missing_fields( Some((variant_def, missed_fields, exhaustive)) } -fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult<'_>) -> bool { - fn walk(pat: PatId, body: &Body, infer: &InferenceResult<'_>, has_type_mismatches: &mut bool) { +fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool { + fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) { match infer.type_mismatch_for_pat(pat) { Some(_) => *has_type_mismatches = true, None if *has_type_mismatches => (), diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs index 80b65ace77cd..8e6101e6a0e3 100644 --- a/crates/hir-ty/src/diagnostics/match_check.rs +++ b/crates/hir-ty/src/diagnostics/match_check.rs @@ -16,7 +16,7 @@ use hir_def::{ item_tree::FieldsShape, }; use hir_expand::name::Name; -use rustc_type_ir::inherent::{IntoKind, SliceLike}; +use rustc_type_ir::inherent::IntoKind; use span::Edition; use stdx::{always, never, variance::PhantomCovariantLifetime}; @@ -96,7 +96,7 @@ pub(crate) enum PatKind<'db> { pub(crate) struct PatCtxt<'a, 'db> { db: &'db dyn HirDatabase, - infer: &'a InferenceResult<'db>, + infer: &'db InferenceResult, body: &'a Body, pub(crate) errors: Vec, } @@ -104,7 +104,7 @@ pub(crate) struct PatCtxt<'a, 'db> { impl<'a, 'db> PatCtxt<'a, 'db> { pub(crate) fn new( db: &'db dyn HirDatabase, - infer: &'a InferenceResult<'db>, + infer: &'db InferenceResult, body: &'a Body, ) -> Self { Self { db, infer, body, errors: Vec::new() } @@ -119,12 +119,15 @@ impl<'a, 'db> PatCtxt<'a, 'db> { let unadjusted_pat = self.lower_pattern_unadjusted(pat); self.infer.pat_adjustments.get(&pat).map(|it| &**it).unwrap_or_default().iter().rev().fold( unadjusted_pat, - |subpattern, ref_ty| Pat { ty: *ref_ty, kind: Box::new(PatKind::Deref { subpattern }) }, + |subpattern, ref_ty| Pat { + ty: ref_ty.as_ref(), + kind: Box::new(PatKind::Deref { subpattern }), + }, ) } fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat<'db> { - let mut ty = self.infer[pat]; + let mut ty = self.infer.pat_ty(pat); let variant = self.infer.variant_resolution_for_pat(pat); let kind = match self.body[pat] { @@ -151,7 +154,7 @@ impl<'a, 'db> PatCtxt<'a, 'db> { hir_def::hir::Pat::Bind { id, subpat, .. } => { let bm = self.infer.binding_modes[pat]; - ty = self.infer[id]; + ty = self.infer.binding_ty(id); let name = &self.body[id].name; match (bm, ty.kind()) { (BindingMode::Ref(_), TyKind::Ref(_, rty, _)) => ty = rty, @@ -273,7 +276,7 @@ impl<'a, 'db> PatCtxt<'a, 'db> { } fn lower_path(&mut self, pat: PatId, _path: &Path) -> Pat<'db> { - let ty = self.infer[pat]; + let ty = self.infer.pat_ty(pat); let pat_from_kind = |kind| Pat { ty, kind: Box::new(kind) }; diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 91448d5806f9..eda7e7e249b3 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -11,7 +11,7 @@ use rustc_pattern_analysis::{ constructor::{Constructor, ConstructorSet, VariantVisibility}, usefulness::{PlaceValidity, UsefulnessReport, compute_match_usefulness}, }; -use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike}; +use rustc_type_ir::inherent::{AdtDef, IntoKind}; use smallvec::{SmallVec, smallvec}; use stdx::never; @@ -150,7 +150,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> { let fields_len = variant.fields(self.db).fields().len() as u32; (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| { - let ty = field_tys[fid].instantiate(self.infcx.interner, substs); + let ty = field_tys[fid].get().instantiate(self.infcx.interner, substs); let ty = self .infcx .at(&ObligationCause::dummy(), self.env) diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs index bbc381ba5d51..50d4517d0125 100644 --- a/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -97,9 +97,9 @@ enum UnsafeDiagnostic { DeprecatedSafe2024 { node: ExprId, inside_unsafe_block: InsideUnsafeBlock }, } -pub fn unsafe_operations_for_body<'db>( - db: &'db dyn HirDatabase, - infer: &InferenceResult<'db>, +pub fn unsafe_operations_for_body( + db: &dyn HirDatabase, + infer: &InferenceResult, def: DefWithBodyId, body: &Body, callback: &mut dyn FnMut(ExprOrPatId), @@ -116,9 +116,9 @@ pub fn unsafe_operations_for_body<'db>( } } -pub fn unsafe_operations<'db>( - db: &'db dyn HirDatabase, - infer: &InferenceResult<'db>, +pub fn unsafe_operations( + db: &dyn HirDatabase, + infer: &InferenceResult, def: DefWithBodyId, body: &Body, current: ExprId, @@ -136,7 +136,7 @@ pub fn unsafe_operations<'db>( struct UnsafeVisitor<'db> { db: &'db dyn HirDatabase, - infer: &'db InferenceResult<'db>, + infer: &'db InferenceResult, body: &'db Body, resolver: Resolver<'db>, def: DefWithBodyId, @@ -155,7 +155,7 @@ struct UnsafeVisitor<'db> { impl<'db> UnsafeVisitor<'db> { fn new( db: &'db dyn HirDatabase, - infer: &'db InferenceResult<'db>, + infer: &'db InferenceResult, body: &'db Body, def: DefWithBodyId, unsafe_expr_cb: &'db mut dyn FnMut(UnsafeDiagnostic), @@ -260,7 +260,7 @@ impl<'db> UnsafeVisitor<'db> { match pat { Pat::Record { .. } => { - if let Some((AdtId::UnionId(_), _)) = self.infer[current].as_adt() { + if let Some((AdtId::UnionId(_), _)) = self.infer.pat_ty(current).as_adt() { let old_inside_union_destructure = mem::replace(&mut self.inside_union_destructure, true); self.body.walk_pats_shallow(current, |pat| self.walk_pat(pat)); @@ -286,7 +286,7 @@ impl<'db> UnsafeVisitor<'db> { let inside_assignment = mem::replace(&mut self.inside_assignment, false); match expr { &Expr::Call { callee, .. } => { - let callee = self.infer[callee]; + let callee = self.infer.expr_ty(callee); if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(func)), _) = callee.kind() { @@ -341,7 +341,7 @@ impl<'db> UnsafeVisitor<'db> { } } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { - if let TyKind::RawPtr(..) = self.infer[*expr].kind() { + if let TyKind::RawPtr(..) = self.infer.expr_ty(*expr).kind() { self.on_unsafe_op(current.into(), UnsafetyReason::RawPtrDeref); } } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index e1d62a9c7a3e..8e44c14abe23 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -38,7 +38,7 @@ use rustc_hash::FxHashSet; use rustc_type_ir::{ AliasTyKind, BoundVarIndexKind, CoroutineArgsParts, CoroutineClosureArgsParts, RegionKind, Upcast, - inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _, Tys as _}, + inherent::{AdtDef, GenericArgs as _, IntoKind, Term as _, Ty as _, Tys as _}, }; use smallvec::SmallVec; use span::Edition; @@ -52,9 +52,9 @@ use crate::{ lower::GenericPredicates, mir::pad16, next_solver::{ - AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, EarlyBinder, - ExistentialPredicate, FnSig, GenericArg, GenericArgs, ParamEnv, PolyFnSig, Region, - SolverDefId, Term, TraitRef, Ty, TyKind, TypingMode, + AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, ExistentialPredicate, FnSig, + GenericArg, GenericArgKind, GenericArgs, ParamEnv, PolyFnSig, Region, SolverDefId, + StoredEarlyBinder, StoredTy, Term, TermKind, TraitRef, Ty, TyKind, TypingMode, abi::Safety, infer::{DbInternerInferExt, traits::ObligationCause}, }, @@ -602,7 +602,7 @@ impl<'db, T: HirDisplay<'db>> HirDisplay<'db> for &T { impl<'db, T: HirDisplay<'db> + Internable> HirDisplay<'db> for Interned { fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result { - HirDisplay::hir_fmt(self.as_ref(), f) + HirDisplay::hir_fmt(&**self, f) } } @@ -664,10 +664,10 @@ fn write_projection<'db>(f: &mut HirFormatter<'_, 'db>, alias: &AliasTy<'db>) -> impl<'db> HirDisplay<'db> for GenericArg<'db> { fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result { - match self { - GenericArg::Ty(ty) => ty.hir_fmt(f), - GenericArg::Lifetime(lt) => lt.hir_fmt(f), - GenericArg::Const(c) => c.hir_fmt(f), + match self.kind() { + GenericArgKind::Type(ty) => ty.hir_fmt(f), + GenericArgKind::Lifetime(lt) => lt.hir_fmt(f), + GenericArgKind::Const(c) => c.hir_fmt(f), } } } @@ -790,7 +790,7 @@ fn render_const_scalar_inner<'db>( TyKind::Slice(ty) => { let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap()); let count = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap()); - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { return f.write_str(""); }; let size_one = layout.size.bytes_usize(); @@ -824,7 +824,7 @@ fn render_const_scalar_inner<'db>( let Ok(t) = memory_map.vtable_ty(ty_id) else { return f.write_str(""); }; - let Ok(layout) = f.db.layout_of_ty(t, param_env) else { + let Ok(layout) = f.db.layout_of_ty(t.store(), param_env.store()) else { return f.write_str(""); }; let size = layout.size.bytes_usize(); @@ -854,7 +854,7 @@ fn render_const_scalar_inner<'db>( return f.write_str(""); } }); - let Ok(layout) = f.db.layout_of_ty(t, param_env) else { + let Ok(layout) = f.db.layout_of_ty(t.store(), param_env.store()) else { return f.write_str(""); }; let size = layout.size.bytes_usize(); @@ -866,7 +866,7 @@ fn render_const_scalar_inner<'db>( } }, TyKind::Tuple(tys) => { - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { return f.write_str(""); }; f.write_str("(")?; @@ -878,7 +878,7 @@ fn render_const_scalar_inner<'db>( f.write_str(", ")?; } let offset = layout.fields.offset(id).bytes_usize(); - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { f.write_str("")?; continue; }; @@ -889,7 +889,7 @@ fn render_const_scalar_inner<'db>( } TyKind::Adt(def, args) => { let def = def.def_id().0; - let Ok(layout) = f.db.layout_of_adt(def, args, param_env) else { + let Ok(layout) = f.db.layout_of_adt(def, args.store(), param_env.store()) else { return f.write_str(""); }; match def { @@ -900,7 +900,7 @@ fn render_const_scalar_inner<'db>( render_variant_after_name( s.fields(f.db), f, - &field_types, + field_types, f.db.trait_environment(def.into()), &layout, args, @@ -932,7 +932,7 @@ fn render_const_scalar_inner<'db>( render_variant_after_name( var_id.fields(f.db), f, - &field_types, + field_types, f.db.trait_environment(def.into()), var_layout, args, @@ -952,7 +952,7 @@ fn render_const_scalar_inner<'db>( let Some(len) = consteval::try_const_usize(f.db, len) else { return f.write_str(""); }; - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { return f.write_str(""); }; let size_one = layout.size.bytes_usize(); @@ -992,7 +992,7 @@ fn render_const_scalar_inner<'db>( fn render_variant_after_name<'db>( data: &VariantFields, f: &mut HirFormatter<'_, 'db>, - field_types: &ArenaMap>>, + field_types: &'db ArenaMap>, param_env: ParamEnv<'db>, layout: &Layout, args: GenericArgs<'db>, @@ -1004,8 +1004,8 @@ fn render_variant_after_name<'db>( FieldsShape::Record | FieldsShape::Tuple => { let render_field = |f: &mut HirFormatter<'_, 'db>, id: LocalFieldId| { let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize(); - let ty = field_types[id].instantiate(f.interner, args); - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let ty = field_types[id].get().instantiate(f.interner, args); + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { return f.write_str(""); }; let size = layout.size.bytes_usize(); @@ -1223,7 +1223,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> { }; f.end_location_link(); - if args.len() > 0 { + if !args.is_empty() { let generic_def_id = GenericDefId::from_callable(db, def); let generics = generics(db, generic_def_id); let (parent_len, self_param, type_, const_, impl_, lifetime) = @@ -1787,9 +1787,9 @@ impl<'db> HirDisplay<'db> for PolyFnSig<'db> { impl<'db> HirDisplay<'db> for Term<'db> { fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result { - match self { - Term::Ty(it) => it.hir_fmt(f), - Term::Const(it) => it.hir_fmt(f), + match self.kind() { + TermKind::Ty(it) => it.hir_fmt(f), + TermKind::Const(it) => it.hir_fmt(f), } } } diff --git a/crates/hir-ty/src/drop.rs b/crates/hir-ty/src/drop.rs index 3ae6451d6952..66692143bc1a 100644 --- a/crates/hir-ty/src/drop.rs +++ b/crates/hir-ty/src/drop.rs @@ -2,7 +2,7 @@ use hir_def::{AdtId, signatures::StructFlags}; use rustc_hash::FxHashSet; -use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike}; +use rustc_type_ir::inherent::{AdtDef, IntoKind}; use stdx::never; use crate::{ @@ -85,7 +85,7 @@ fn has_drop_glue_impl<'db>( .map(|(_, field_ty)| { has_drop_glue_impl( infcx, - field_ty.instantiate(infcx.interner, subst), + field_ty.get().instantiate(infcx.interner, subst), env, visited, ) @@ -105,7 +105,7 @@ fn has_drop_glue_impl<'db>( .map(|(_, field_ty)| { has_drop_glue_impl( infcx, - field_ty.instantiate(infcx.interner, subst), + field_ty.get().instantiate(infcx.interner, subst), env, visited, ) diff --git a/crates/hir-ty/src/dyn_compatibility.rs b/crates/hir-ty/src/dyn_compatibility.rs index 64b15eb017a6..ffdc9ca0f85a 100644 --- a/crates/hir-ty/src/dyn_compatibility.rs +++ b/crates/hir-ty/src/dyn_compatibility.rs @@ -10,8 +10,7 @@ use hir_def::{ use rustc_hash::FxHashSet; use rustc_type_ir::{ AliasTyKind, ClauseKind, PredicatePolarity, TypeSuperVisitable as _, TypeVisitable as _, - Upcast, elaborate, - inherent::{IntoKind, SliceLike}, + Upcast, elaborate, inherent::IntoKind, }; use smallvec::SmallVec; diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index d6682c0cf2fe..bd5fffc4cc65 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -28,7 +28,7 @@ mod path; mod place_op; pub(crate) mod unify; -use std::{cell::OnceCell, convert::identity, iter, ops::Index}; +use std::{cell::OnceCell, convert::identity, iter}; use base_db::Crate; use either::Either; @@ -47,14 +47,12 @@ use hir_expand::{mod_path::ModPath, name::Name}; use indexmap::IndexSet; use intern::sym; use la_arena::ArenaMap; -use macros::{TypeFoldable, TypeVisitable}; use rustc_ast_ir::Mutability; use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::{ AliasTyKind, TypeFoldable, - inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _}, + inherent::{AdtDef, IntoKind, Region as _, Ty as _}, }; -use salsa::Update; use span::Edition; use stdx::never; use thin_vec::ThinVec; @@ -74,8 +72,8 @@ use crate::{ method_resolution::{CandidateId, MethodResolutionUnstableFeatures}, mir::MirSpan, next_solver::{ - AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, Ty, TyKind, - Tys, + AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, + StoredGenericArgs, StoredTy, StoredTys, Ty, TyKind, Tys, abi::Safety, infer::{InferCtxt, traits::ObligationCause}, }, @@ -95,7 +93,7 @@ use cast::{CastCheck, CastError}; pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; /// The entry point of type inference. -fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_> { +fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult { let _p = tracing::info_span!("infer_query").entered(); let resolver = def.resolver(db); let body = db.body(def); @@ -162,7 +160,7 @@ fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_> ctx.resolve_all() } -fn infer_cycle_result(db: &dyn HirDatabase, _: salsa::Id, _: DefWithBodyId) -> InferenceResult<'_> { +fn infer_cycle_result(db: &dyn HirDatabase, _: salsa::Id, _: DefWithBodyId) -> InferenceResult { InferenceResult { has_errors: true, ..InferenceResult::new(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)) @@ -196,8 +194,8 @@ pub enum InferenceTyDiagnosticSource { Signature, } -#[derive(Debug, PartialEq, Eq, Clone, Update)] -pub enum InferenceDiagnostic<'db> { +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum InferenceDiagnostic { NoSuchField { field: ExprOrPatId, private: Option, @@ -213,16 +211,16 @@ pub enum InferenceDiagnostic<'db> { }, UnresolvedField { expr: ExprId, - receiver: Ty<'db>, + receiver: StoredTy, name: Name, method_with_same_name_exists: bool, }, UnresolvedMethodCall { expr: ExprId, - receiver: Ty<'db>, + receiver: StoredTy, name: Name, /// Contains the type the field resolves to - field_with_same_name: Option>, + field_with_same_name: Option, assoc_func_with_same_name: Option, }, UnresolvedAssocItem { @@ -249,21 +247,21 @@ pub enum InferenceDiagnostic<'db> { }, ExpectedFunction { call_expr: ExprId, - found: Ty<'db>, + found: StoredTy, }, TypedHole { expr: ExprId, - expected: Ty<'db>, + expected: StoredTy, }, CastToUnsized { expr: ExprId, - cast_ty: Ty<'db>, + cast_ty: StoredTy, }, InvalidCast { expr: ExprId, error: CastError, - expr_ty: Ty<'db>, - cast_ty: Ty<'db>, + expr_ty: StoredTy, + cast_ty: StoredTy, }, TyDiagnostic { source: InferenceTyDiagnosticSource, @@ -290,10 +288,10 @@ pub enum InferenceDiagnostic<'db> { } /// A mismatch between an expected and an inferred type. -#[derive(Clone, PartialEq, Eq, Debug, Hash, Update)] -pub struct TypeMismatch<'db> { - pub expected: Ty<'db>, - pub actual: Ty<'db>, +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct TypeMismatch { + pub expected: StoredTy, + pub actual: StoredTy, } /// Represents coercing a value to a different type of value. @@ -336,20 +334,23 @@ pub struct TypeMismatch<'db> { /// At some point, of course, `Box` should move out of the compiler, in which /// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> -> /// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`. -#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, Update)] -pub struct Adjustment<'db> { - #[type_visitable(ignore)] - #[type_foldable(identity)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct Adjustment { pub kind: Adjust, - pub target: Ty<'db>, + pub target: StoredTy, } -impl<'db> Adjustment<'db> { - pub fn borrow(interner: DbInterner<'db>, m: Mutability, ty: Ty<'db>, lt: Region<'db>) -> Self { +impl Adjustment { + pub fn borrow<'db>( + interner: DbInterner<'db>, + m: Mutability, + ty: Ty<'db>, + lt: Region<'db>, + ) -> Self { let ty = Ty::new_ref(interner, lt, ty, m); Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::new(m, AllowTwoPhase::No))), - target: ty, + target: ty.store(), } } } @@ -473,56 +474,47 @@ pub enum PointerCast { /// When you add a field that stores types (including `Substitution` and the like), don't forget /// `resolve_completely()`'ing them in `InferenceContext::resolve_all()`. Inference variables must /// not appear in the final inference result. -#[derive(Clone, PartialEq, Eq, Debug, Update)] -pub struct InferenceResult<'db> { +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct InferenceResult { /// For each method call expr, records the function it resolves to. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))] - method_resolutions: FxHashMap)>, + method_resolutions: FxHashMap, /// For each field access expr, records the field it resolves to. field_resolutions: FxHashMap>, /// For each struct literal or pattern, records the variant it resolves to. variant_resolutions: FxHashMap, /// For each associated item record what it resolves to - assoc_resolutions: FxHashMap)>, + assoc_resolutions: FxHashMap, /// Whenever a tuple field expression access a tuple field, we allocate a tuple id in /// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of /// that which allows us to resolve a [`TupleFieldId`]s type. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))] - tuple_field_access_types: ThinVec>, + tuple_field_access_types: ThinVec, - #[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))] - pub(crate) type_of_expr: ArenaMap>, + pub(crate) type_of_expr: ArenaMap, /// For each pattern record the type it resolves to. /// /// **Note**: When a pattern type is resolved it may still contain /// unresolved or missing subpatterns or subpatterns of mismatched types. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))] - pub(crate) type_of_pat: ArenaMap>, - #[update(unsafe(with(crate::utils::unsafe_update_eq /* binding id is technically update */)))] - pub(crate) type_of_binding: ArenaMap>, - #[update(unsafe(with(crate::utils::unsafe_update_eq /* type ref id is technically update */)))] - pub(crate) type_of_type_placeholder: FxHashMap>, - pub(crate) type_of_opaque: FxHashMap>, - - pub(crate) type_mismatches: Option>>>, + pub(crate) type_of_pat: ArenaMap, + pub(crate) type_of_binding: ArenaMap, + pub(crate) type_of_type_placeholder: FxHashMap, + pub(crate) type_of_opaque: FxHashMap, + + pub(crate) type_mismatches: Option>>, /// Whether there are any type-mismatching errors in the result. // FIXME: This isn't as useful as initially thought due to us falling back placeholders to // `TyKind::Error`. // Which will then mark this field. pub(crate) has_errors: bool, /// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))] - diagnostics: ThinVec>, + diagnostics: ThinVec, /// Interned `Error` type to return references to. // FIXME: Remove this. - error_ty: Ty<'db>, + error_ty: StoredTy, - #[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))] - pub(crate) expr_adjustments: FxHashMap]>>, + pub(crate) expr_adjustments: FxHashMap>, /// Stores the types which were implicitly dereferenced in pattern binding modes. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))] - pub(crate) pat_adjustments: FxHashMap>>, + pub(crate) pat_adjustments: FxHashMap>, /// Stores the binding mode (`ref` in `let ref x = 2`) of bindings. /// /// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an @@ -538,7 +530,7 @@ pub struct InferenceResult<'db> { /// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`. pub(crate) binding_modes: ArenaMap, - pub(crate) closure_info: FxHashMap>, FnTrait)>, + pub(crate) closure_info: FxHashMap, FnTrait)>, // FIXME: remove this field pub mutated_bindings_in_closure: FxHashSet, @@ -546,15 +538,15 @@ pub struct InferenceResult<'db> { } #[salsa::tracked] -impl<'db> InferenceResult<'db> { - #[salsa::tracked(returns(ref), cycle_result = infer_cycle_result, unsafe(non_update_types))] - pub fn for_body(db: &'db dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'db> { +impl InferenceResult { + #[salsa::tracked(returns(ref), cycle_result = infer_cycle_result)] + pub fn for_body(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult { infer_query(db, def) } } -impl<'db> InferenceResult<'db> { - fn new(error_ty: Ty<'db>) -> Self { +impl InferenceResult { + fn new(error_ty: Ty<'_>) -> Self { Self { method_resolutions: Default::default(), field_resolutions: Default::default(), @@ -569,7 +561,7 @@ impl<'db> InferenceResult<'db> { type_of_opaque: Default::default(), type_mismatches: Default::default(), has_errors: Default::default(), - error_ty, + error_ty: error_ty.store(), pat_adjustments: Default::default(), binding_modes: Default::default(), expr_adjustments: Default::default(), @@ -579,8 +571,8 @@ impl<'db> InferenceResult<'db> { } } - pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> { - self.method_resolutions.get(&expr).copied() + pub fn method_resolution<'db>(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> { + self.method_resolutions.get(&expr).map(|(func, args)| (*func, args.as_ref())) } pub fn field_resolution(&self, expr: ExprId) -> Option> { self.field_resolutions.get(&expr).copied() @@ -597,16 +589,19 @@ impl<'db> InferenceResult<'db> { ExprOrPatId::PatId(id) => self.variant_resolution_for_pat(id), } } - pub fn assoc_resolutions_for_expr( + pub fn assoc_resolutions_for_expr<'db>( &self, id: ExprId, ) -> Option<(CandidateId, GenericArgs<'db>)> { - self.assoc_resolutions.get(&id.into()).copied() + self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref())) } - pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(CandidateId, GenericArgs<'db>)> { - self.assoc_resolutions.get(&id.into()).copied() + pub fn assoc_resolutions_for_pat<'db>( + &self, + id: PatId, + ) -> Option<(CandidateId, GenericArgs<'db>)> { + self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref())) } - pub fn assoc_resolutions_for_expr_or_pat( + pub fn assoc_resolutions_for_expr_or_pat<'db>( &self, id: ExprOrPatId, ) -> Option<(CandidateId, GenericArgs<'db>)> { @@ -615,20 +610,20 @@ impl<'db> InferenceResult<'db> { ExprOrPatId::PatId(id) => self.assoc_resolutions_for_pat(id), } } - pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch<'db>> { + pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> { self.type_mismatches.as_deref()?.get(&expr.into()) } - pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch<'db>> { + pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> { self.type_mismatches.as_deref()?.get(&pat.into()) } - pub fn type_mismatches(&self) -> impl Iterator)> { + pub fn type_mismatches(&self) -> impl Iterator { self.type_mismatches .as_deref() .into_iter() .flatten() .map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch)) } - pub fn expr_type_mismatches(&self) -> impl Iterator)> { + pub fn expr_type_mismatches(&self) -> impl Iterator { self.type_mismatches.as_deref().into_iter().flatten().filter_map( |(expr_or_pat, mismatch)| match *expr_or_pat { ExprOrPatId::ExprId(expr) => Some((expr, mismatch)), @@ -636,22 +631,22 @@ impl<'db> InferenceResult<'db> { }, ) } - pub fn placeholder_types(&self) -> impl Iterator)> { - self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty)) + pub fn placeholder_types<'db>(&self) -> impl Iterator)> { + self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty.as_ref())) } - pub fn type_of_type_placeholder(&self, type_ref: TypeRefId) -> Option> { - self.type_of_type_placeholder.get(&type_ref).copied() + pub fn type_of_type_placeholder<'db>(&self, type_ref: TypeRefId) -> Option> { + self.type_of_type_placeholder.get(&type_ref).map(|ty| ty.as_ref()) } - pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec>, FnTrait) { + pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec, FnTrait) { self.closure_info.get(&closure).unwrap() } - pub fn type_of_expr_or_pat(&self, id: ExprOrPatId) -> Option> { + pub fn type_of_expr_or_pat<'db>(&self, id: ExprOrPatId) -> Option> { match id { - ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).copied(), - ExprOrPatId::PatId(id) => self.type_of_pat.get(id).copied(), + ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).map(|it| it.as_ref()), + ExprOrPatId::PatId(id) => self.type_of_pat.get(id).map(|it| it.as_ref()), } } - pub fn type_of_expr_with_adjust(&self, id: ExprId) -> Option> { + pub fn type_of_expr_with_adjust<'db>(&self, id: ExprId) -> Option> { match self.expr_adjustments.get(&id).and_then(|adjustments| { adjustments.iter().rfind(|adj| { // https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140 @@ -660,37 +655,37 @@ impl<'db> InferenceResult<'db> { Adjustment { kind: Adjust::NeverToAny, target, - } if target.is_never() + } if target.as_ref().is_never() ) }) }) { - Some(adjustment) => Some(adjustment.target), - None => self.type_of_expr.get(id).copied(), + Some(adjustment) => Some(adjustment.target.as_ref()), + None => self.type_of_expr.get(id).map(|it| it.as_ref()), } } - pub fn type_of_pat_with_adjust(&self, id: PatId) -> Option> { + pub fn type_of_pat_with_adjust<'db>(&self, id: PatId) -> Option> { match self.pat_adjustments.get(&id).and_then(|adjustments| adjustments.last()) { - Some(adjusted) => Some(*adjusted), - None => self.type_of_pat.get(id).copied(), + Some(adjusted) => Some(adjusted.as_ref()), + None => self.type_of_pat.get(id).map(|it| it.as_ref()), } } pub fn is_erroneous(&self) -> bool { self.has_errors && self.type_of_expr.iter().count() == 0 } - pub fn diagnostics(&self) -> &[InferenceDiagnostic<'db>] { + pub fn diagnostics(&self) -> &[InferenceDiagnostic] { &self.diagnostics } - pub fn tuple_field_access_type(&self, id: TupleId) -> Tys<'db> { - self.tuple_field_access_types[id.0 as usize] + pub fn tuple_field_access_type<'db>(&self, id: TupleId) -> Tys<'db> { + self.tuple_field_access_types[id.0 as usize].as_ref() } - pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty<'db>]> { + pub fn pat_adjustment(&self, id: PatId) -> Option<&[StoredTy]> { self.pat_adjustments.get(&id).map(|it| &**it) } - pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment<'db>]> { + pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> { self.expr_adjustments.get(&id).map(|it| &**it) } @@ -699,66 +694,47 @@ impl<'db> InferenceResult<'db> { } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn expression_types(&self) -> impl Iterator)> { - self.type_of_expr.iter().map(|(k, v)| (k, *v)) + pub fn expression_types<'db>(&self) -> impl Iterator)> { + self.type_of_expr.iter().map(|(k, v)| (k, v.as_ref())) } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn pattern_types(&self) -> impl Iterator)> { - self.type_of_pat.iter().map(|(k, v)| (k, *v)) + pub fn pattern_types<'db>(&self) -> impl Iterator)> { + self.type_of_pat.iter().map(|(k, v)| (k, v.as_ref())) } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn binding_types(&self) -> impl Iterator)> { - self.type_of_binding.iter().map(|(k, v)| (k, *v)) + pub fn binding_types<'db>(&self) -> impl Iterator)> { + self.type_of_binding.iter().map(|(k, v)| (k, v.as_ref())) } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn return_position_impl_trait_types( - &self, + pub fn return_position_impl_trait_types<'db>( + &'db self, db: &'db dyn HirDatabase, - ) -> impl Iterator, Ty<'db>)> { - self.type_of_opaque.iter().filter_map(move |(&id, &ty)| { + ) -> impl Iterator)> { + self.type_of_opaque.iter().filter_map(move |(&id, ty)| { let ImplTraitId::ReturnTypeImplTrait(_, rpit_idx) = id.loc(db) else { return None; }; - Some((rpit_idx, ty)) + Some((rpit_idx, ty.as_ref())) }) } -} -impl<'db> Index for InferenceResult<'db> { - type Output = Ty<'db>; - - fn index(&self, expr: ExprId) -> &Ty<'db> { - self.type_of_expr.get(expr).unwrap_or(&self.error_ty) + pub fn expr_ty<'db>(&self, id: ExprId) -> Ty<'db> { + self.type_of_expr.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref()) } -} - -impl<'db> Index for InferenceResult<'db> { - type Output = Ty<'db>; - fn index(&self, pat: PatId) -> &Ty<'db> { - self.type_of_pat.get(pat).unwrap_or(&self.error_ty) + pub fn pat_ty<'db>(&self, id: PatId) -> Ty<'db> { + self.type_of_pat.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref()) } -} - -impl<'db> Index for InferenceResult<'db> { - type Output = Ty<'db>; - fn index(&self, id: ExprOrPatId) -> &Ty<'db> { - match id { - ExprOrPatId::ExprId(id) => &self[id], - ExprOrPatId::PatId(id) => &self[id], - } + pub fn expr_or_pat_ty<'db>(&self, id: ExprOrPatId) -> Ty<'db> { + self.type_of_expr_or_pat(id).unwrap_or(self.error_ty.as_ref()) } -} -impl<'db> Index for InferenceResult<'db> { - type Output = Ty<'db>; - - fn index(&self, b: BindingId) -> &Ty<'db> { - self.type_of_binding.get(b).unwrap_or(&self.error_ty) + pub fn binding_ty<'db>(&self, id: BindingId) -> Ty<'db> { + self.type_of_binding.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref()) } } @@ -826,7 +802,7 @@ impl<'db> InternedStandardTypes<'db> { re_error: Region::error(interner), re_erased: Region::new_erased(interner), - empty_args: GenericArgs::new_from_iter(interner, []), + empty_args: GenericArgs::empty(interner), } } } @@ -848,7 +824,7 @@ pub(crate) struct InferenceContext<'body, 'db> { pub(crate) lang_items: &'db LangItems, /// The traits in scope, disregarding block modules. This is used for caching purposes. traits_in_scope: FxHashSet, - pub(crate) result: InferenceResult<'db>, + pub(crate) result: InferenceResult, tuple_field_accesses_rev: IndexSet, std::hash::BuildHasherDefault>, /// The return type of the function being inferred, the closure or async block if we're @@ -873,7 +849,7 @@ pub(crate) struct InferenceContext<'body, 'db> { deferred_cast_checks: Vec>, // fields related to closure capture - current_captures: Vec>, + current_captures: Vec, /// A stack that has an entry for each projection in the current capture. /// /// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`. @@ -886,7 +862,7 @@ pub(crate) struct InferenceContext<'body, 'db> { closure_dependencies: FxHashMap>, deferred_closures: FxHashMap, Ty<'db>, Vec>, ExprId)>>, - diagnostics: Diagnostics<'db>, + diagnostics: Diagnostics, } #[derive(Clone, Debug)] @@ -1008,7 +984,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { /// Clones `self` and calls `resolve_all()` on it. // FIXME: Remove this. - pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult<'db> { + pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult { let mut ctx = self.clone(); ctx.type_inference_fallback(); @@ -1032,7 +1008,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { // `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you // used this function for another workaround, mention it here. If you really need this function and believe that // there is no problem in it being `pub(crate)`, remove this comment. - fn resolve_all(self) -> InferenceResult<'db> { + fn resolve_all(self) -> InferenceResult { let InferenceContext { mut table, mut result, tuple_field_accesses_rev, diagnostics, .. } = self; @@ -1066,23 +1042,23 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } = &mut result; for ty in type_of_expr.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_expr.shrink_to_fit(); for ty in type_of_pat.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_pat.shrink_to_fit(); for ty in type_of_binding.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_binding.shrink_to_fit(); for ty in type_of_type_placeholder.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_type_placeholder.shrink_to_fit(); type_of_opaque.shrink_to_fit(); @@ -1090,8 +1066,8 @@ impl<'body, 'db> InferenceContext<'body, 'db> { if let Some(type_mismatches) = type_mismatches { *has_errors = true; for mismatch in type_mismatches.values_mut() { - mismatch.expected = table.resolve_completely(mismatch.expected); - mismatch.actual = table.resolve_completely(mismatch.actual); + mismatch.expected = table.resolve_completely(mismatch.expected.as_ref()).store(); + mismatch.actual = table.resolve_completely(mismatch.actual.as_ref()).store(); } type_mismatches.shrink_to_fit(); } @@ -1101,23 +1077,23 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ExpectedFunction { found: ty, .. } | UnresolvedField { receiver: ty, .. } | UnresolvedMethodCall { receiver: ty, .. } => { - *ty = table.resolve_completely(*ty); + *ty = table.resolve_completely(ty.as_ref()).store(); // FIXME: Remove this when we are on par with rustc in terms of inference - if ty.references_non_lt_error() { + if ty.as_ref().references_non_lt_error() { return false; } if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic && let Some(ty) = field_with_same_name { - *ty = table.resolve_completely(*ty); - if ty.references_non_lt_error() { + *ty = table.resolve_completely(ty.as_ref()).store(); + if ty.as_ref().references_non_lt_error() { *field_with_same_name = None; } } } TypedHole { expected: ty, .. } => { - *ty = table.resolve_completely(*ty); + *ty = table.resolve_completely(ty.as_ref()).store(); } _ => (), } @@ -1125,30 +1101,33 @@ impl<'body, 'db> InferenceContext<'body, 'db> { }); diagnostics.shrink_to_fit(); for (_, subst) in method_resolutions.values_mut() { - *subst = table.resolve_completely(*subst); - *has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error()); + *subst = table.resolve_completely(subst.as_ref()).store(); + *has_errors = + *has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error()); } method_resolutions.shrink_to_fit(); for (_, subst) in assoc_resolutions.values_mut() { - *subst = table.resolve_completely(*subst); - *has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error()); + *subst = table.resolve_completely(subst.as_ref()).store(); + *has_errors = + *has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error()); } assoc_resolutions.shrink_to_fit(); for adjustment in expr_adjustments.values_mut().flatten() { - adjustment.target = table.resolve_completely(adjustment.target); - *has_errors = *has_errors || adjustment.target.references_non_lt_error(); + adjustment.target = table.resolve_completely(adjustment.target.as_ref()).store(); + *has_errors = *has_errors || adjustment.target.as_ref().references_non_lt_error(); } expr_adjustments.shrink_to_fit(); for adjustment in pat_adjustments.values_mut().flatten() { - *adjustment = table.resolve_completely(*adjustment); - *has_errors = *has_errors || adjustment.references_non_lt_error(); + *adjustment = table.resolve_completely(adjustment.as_ref()).store(); + *has_errors = *has_errors || adjustment.as_ref().references_non_lt_error(); } pat_adjustments.shrink_to_fit(); result.tuple_field_access_types = tuple_field_accesses_rev .into_iter() - .map(|subst| table.resolve_completely(subst)) + .map(|subst| table.resolve_completely(subst).store()) .inspect(|subst| { - *has_errors = *has_errors || subst.iter().any(|ty| ty.references_non_lt_error()); + *has_errors = + *has_errors || subst.as_ref().iter().any(|ty| ty.references_non_lt_error()); }) .collect(); result.tuple_field_access_types.shrink_to_fit(); @@ -1262,10 +1241,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } fn write_expr_ty(&mut self, expr: ExprId, ty: Ty<'db>) { - self.result.type_of_expr.insert(expr, ty); + self.result.type_of_expr.insert(expr, ty.store()); } - pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) { + pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment]>) { if adjustments.is_empty() { return; } @@ -1278,7 +1257,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ) => { // NeverToAny coercion can target any type, so instead of adding a new // adjustment on top we can change the target. - *target = *new_target; + *target = new_target.clone(); } _ => { *entry.get_mut() = adjustments; @@ -1291,7 +1270,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } } - fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[Ty<'db>]>) { + fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[StoredTy]>) { if adjustments.is_empty() { return; } @@ -1304,7 +1283,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { func: FunctionId, subst: GenericArgs<'db>, ) { - self.result.method_resolutions.insert(expr, (func, subst)); + self.result.method_resolutions.insert(expr, (func, subst.store())); } fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) { @@ -1317,22 +1296,22 @@ impl<'body, 'db> InferenceContext<'body, 'db> { item: CandidateId, subs: GenericArgs<'db>, ) { - self.result.assoc_resolutions.insert(id, (item, subs)); + self.result.assoc_resolutions.insert(id, (item, subs.store())); } fn write_pat_ty(&mut self, pat: PatId, ty: Ty<'db>) { - self.result.type_of_pat.insert(pat, ty); + self.result.type_of_pat.insert(pat, ty.store()); } fn write_type_placeholder_ty(&mut self, type_ref: TypeRefId, ty: Ty<'db>) { - self.result.type_of_type_placeholder.insert(type_ref, ty); + self.result.type_of_type_placeholder.insert(type_ref, ty.store()); } fn write_binding_ty(&mut self, id: BindingId, ty: Ty<'db>) { - self.result.type_of_binding.insert(id, ty); + self.result.type_of_binding.insert(id, ty.store()); } - pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) { + pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic) { self.diagnostics.push(diagnostic); } @@ -1486,7 +1465,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> { match ty.kind() { TyKind::Adt(adt_def, substs) => match adt_def.def_id().0 { AdtId::StructId(struct_id) => { - match self.db.field_types(struct_id.into()).values().next_back().copied() { + match self + .db + .field_types(struct_id.into()) + .values() + .next_back() + .map(|it| it.get()) + { Some(field) => { ty = field.instantiate(self.interner(), substs); } @@ -1547,7 +1532,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { self.result .type_mismatches .get_or_insert_default() - .insert(id, TypeMismatch { expected, actual }); + .insert(id, TypeMismatch { expected: expected.store(), actual: actual.store() }); } result } @@ -1592,7 +1577,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } fn expr_ty(&self, expr: ExprId) -> Ty<'db> { - self.result[expr] + self.result.expr_ty(expr) } fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> { @@ -1600,7 +1585,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { if let Some(it) = self.result.expr_adjustments.get(&e) && let Some(it) = it.last() { - ty = Some(it.target); + ty = Some(it.target.as_ref()); } ty.unwrap_or_else(|| self.expr_ty(e)) } diff --git a/crates/hir-ty/src/infer/autoderef.rs b/crates/hir-ty/src/infer/autoderef.rs index b54a6cdee2d4..d748c89e6775 100644 --- a/crates/hir-ty/src/infer/autoderef.rs +++ b/crates/hir-ty/src/infer/autoderef.rs @@ -25,7 +25,7 @@ impl<'db> InferenceTable<'db> { } impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> { - pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec>> { + pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec> { let steps = self.steps(); if steps.is_empty() { return InferOk { obligations: PredicateObligations::new(), value: vec![] }; @@ -42,7 +42,10 @@ impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> { } }) .zip(targets) - .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target }) + .map(|(autoderef, target)| Adjustment { + kind: Adjust::Deref(autoderef), + target: target.store(), + }) .collect(); InferOk { obligations: self.take_obligations(), value: steps } diff --git a/crates/hir-ty/src/infer/cast.rs b/crates/hir-ty/src/infer/cast.rs index 00a1dfff6d95..d073b06ccc8a 100644 --- a/crates/hir-ty/src/infer/cast.rs +++ b/crates/hir-ty/src/infer/cast.rs @@ -4,7 +4,7 @@ use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags}; use rustc_ast_ir::Mutability; use rustc_type_ir::{ Flags, InferTy, TypeFlags, UintTy, - inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _}, + inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, Ty as _}, }; use stdx::never; @@ -83,8 +83,13 @@ impl CastError { expr: ExprId, expr_ty: Ty<'db>, cast_ty: Ty<'db>, - ) -> InferenceDiagnostic<'db> { - InferenceDiagnostic::InvalidCast { expr, error: self, expr_ty, cast_ty } + ) -> InferenceDiagnostic { + InferenceDiagnostic::InvalidCast { + expr, + error: self, + expr_ty: expr_ty.store(), + cast_ty: cast_ty.store(), + } } } @@ -109,7 +114,7 @@ impl<'db> CastCheck<'db> { pub(super) fn check( &mut self, ctx: &mut InferenceContext<'_, 'db>, - ) -> Result<(), InferenceDiagnostic<'db>> { + ) -> Result<(), InferenceDiagnostic> { self.expr_ty = ctx.table.try_structurally_resolve_type(self.expr_ty); self.cast_ty = ctx.table.try_structurally_resolve_type(self.cast_ty); @@ -137,7 +142,7 @@ impl<'db> CastCheck<'db> { { return Err(InferenceDiagnostic::CastToUnsized { expr: self.expr, - cast_ty: self.cast_ty, + cast_ty: self.cast_ty.store(), }); } @@ -393,8 +398,9 @@ fn pointer_kind<'db>( let struct_data = id.fields(ctx.db); if let Some((last_field, _)) = struct_data.fields().iter().last() { - let last_field_ty = - ctx.db.field_types(id.into())[last_field].instantiate(ctx.interner(), subst); + let last_field_ty = ctx.db.field_types(id.into())[last_field] + .get() + .instantiate(ctx.interner(), subst); pointer_kind(last_field_ty, ctx) } else { Ok(Some(PointerKind::Thin)) diff --git a/crates/hir-ty/src/infer/closure/analysis.rs b/crates/hir-ty/src/infer/closure/analysis.rs index 308c01865ae3..a7369d606da4 100644 --- a/crates/hir-ty/src/infer/closure/analysis.rs +++ b/crates/hir-ty/src/infer/closure/analysis.rs @@ -15,7 +15,7 @@ use hir_def::{ }; use rustc_ast_ir::Mutability; use rustc_hash::{FxHashMap, FxHashSet}; -use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{IntoKind, Ty as _}; use smallvec::{SmallVec, smallvec}; use stdx::{format_to, never}; use syntax::utils::is_raw_identifier; @@ -25,21 +25,21 @@ use crate::{ db::{HirDatabase, InternedClosure, InternedClosureId}, infer::InferenceContext, mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem}, - next_solver::{DbInterner, EarlyBinder, GenericArgs, Ty, TyKind}, + next_solver::{DbInterner, GenericArgs, StoredEarlyBinder, StoredTy, Ty, TyKind}, traits::FnTrait, }; // The below functions handle capture and closure kind (Fn, FnMut, ..) #[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] -pub(crate) struct HirPlace<'db> { +pub(crate) struct HirPlace { pub(crate) local: BindingId, - pub(crate) projections: Vec>, + pub(crate) projections: Vec>, } -impl<'db> HirPlace<'db> { - fn ty(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> { - let mut ty = ctx.table.resolve_completely(ctx.result[self.local]); +impl HirPlace { + fn ty<'db>(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> { + let mut ty = ctx.table.resolve_completely(ctx.result.binding_ty(self.local)); for p in &self.projections { ty = p.projected_ty( &ctx.table.infer_ctxt, @@ -78,8 +78,8 @@ pub enum CaptureKind { } #[derive(Debug, Clone, PartialEq, Eq, salsa::Update)] -pub struct CapturedItem<'db> { - pub(crate) place: HirPlace<'db>, +pub struct CapturedItem { + pub(crate) place: HirPlace, pub(crate) kind: CaptureKind, /// The inner vec is the stacks; the outer vec is for each capture reference. /// @@ -88,11 +88,10 @@ pub struct CapturedItem<'db> { /// copy all captures of the inner closure to the outer closure, and then we may /// truncate them, and we want the correct span to be reported. span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, - #[update(unsafe(with(crate::utils::unsafe_update_eq)))] - pub(crate) ty: EarlyBinder<'db, Ty<'db>>, + pub(crate) ty: StoredEarlyBinder, } -impl<'db> CapturedItem<'db> { +impl CapturedItem { pub fn local(&self) -> BindingId { self.place.local } @@ -102,9 +101,9 @@ impl<'db> CapturedItem<'db> { self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref)) } - pub fn ty(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> { + pub fn ty<'db>(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> { let interner = DbInterner::new_no_crate(db); - self.ty.instantiate(interner, subst.split_closure_args_untupled().parent_args) + self.ty.get().instantiate(interner, subst.split_closure_args_untupled().parent_args) } pub fn kind(&self) -> CaptureKind { @@ -273,15 +272,15 @@ impl<'db> CapturedItem<'db> { } #[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct CapturedItemWithoutTy<'db> { - pub(crate) place: HirPlace<'db>, +pub(crate) struct CapturedItemWithoutTy { + pub(crate) place: HirPlace, pub(crate) kind: CaptureKind, /// The inner vec is the stacks; the outer vec is for each capture reference. pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, } -impl<'db> CapturedItemWithoutTy<'db> { - fn with_ty(self, ctx: &mut InferenceContext<'_, 'db>) -> CapturedItem<'db> { +impl CapturedItemWithoutTy { + fn with_ty(self, ctx: &mut InferenceContext<'_, '_>) -> CapturedItem { let ty = self.place.ty(ctx); let ty = match &self.kind { CaptureKind::ByValue => ty, @@ -297,13 +296,13 @@ impl<'db> CapturedItemWithoutTy<'db> { place: self.place, kind: self.kind, span_stacks: self.span_stacks, - ty: EarlyBinder::bind(ty), + ty: StoredEarlyBinder::bind(ty.store()), } } } impl<'db> InferenceContext<'_, 'db> { - fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option> { + fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option { let r = self.place_of_expr_without_adjust(tgt_expr)?; let adjustments = self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default(); @@ -311,7 +310,7 @@ impl<'db> InferenceContext<'_, 'db> { } /// Pushes the span into `current_capture_span_stack`, *without clearing it first*. - fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option> { + fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option { if path.type_anchor().is_some() { return None; } @@ -332,7 +331,7 @@ impl<'db> InferenceContext<'_, 'db> { } /// Changes `current_capture_span_stack` to contain the stack of spans for this expr. - fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option> { + fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option { self.current_capture_span_stack.clear(); match &self.body[tgt_expr] { Expr::Path(p) => { @@ -367,7 +366,7 @@ impl<'db> InferenceContext<'_, 'db> { None } - fn push_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) { + fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) { self.current_captures.push(CapturedItemWithoutTy { place, kind, @@ -375,11 +374,7 @@ impl<'db> InferenceContext<'_, 'db> { }); } - fn truncate_capture_spans( - &self, - capture: &mut CapturedItemWithoutTy<'db>, - mut truncate_to: usize, - ) { + fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) { // The first span is the identifier, and it must always remain. truncate_to += 1; for span_stack in &mut capture.span_stacks { @@ -404,14 +399,14 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn ref_expr(&mut self, expr: ExprId, place: Option>) { + fn ref_expr(&mut self, expr: ExprId, place: Option) { if let Some(place) = place { self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared)); } self.walk_expr(expr); } - fn add_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) { + fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) { if self.is_upvar(&place) { self.push_capture(place, kind); } @@ -427,7 +422,7 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn mutate_expr(&mut self, expr: ExprId, place: Option>) { + fn mutate_expr(&mut self, expr: ExprId, place: Option) { if let Some(place) = place { self.add_capture( place, @@ -444,7 +439,7 @@ impl<'db> InferenceContext<'_, 'db> { self.walk_expr(expr); } - fn consume_place(&mut self, place: HirPlace<'db>) { + fn consume_place(&mut self, place: HirPlace) { if self.is_upvar(&place) { let ty = place.ty(self); let kind = if self.is_ty_copy(ty) { @@ -456,7 +451,7 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment<'db>]) { + fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) { if let Some((last, rest)) = adjustment.split_last() { match &last.kind { Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => { @@ -477,12 +472,7 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn ref_capture_with_adjusts( - &mut self, - m: Mutability, - tgt_expr: ExprId, - rest: &[Adjustment<'db>], - ) { + fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) { let capture_kind = match m { Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared), @@ -780,7 +770,7 @@ impl<'db> InferenceContext<'_, 'db> { } Pat::Bind { id, .. } => match self.result.binding_modes[p] { crate::BindingMode::Move => { - if self.is_ty_copy(self.result.type_of_binding[*id]) { + if self.is_ty_copy(self.result.binding_ty(*id)) { update_result(CaptureKind::ByRef(BorrowKind::Shared)); } else { update_result(CaptureKind::ByValue); @@ -798,7 +788,7 @@ impl<'db> InferenceContext<'_, 'db> { self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut)); } - fn is_upvar(&self, place: &HirPlace<'db>) -> bool { + fn is_upvar(&self, place: &HirPlace) -> bool { if let Some(c) = self.current_closure { let InternedClosure(_, root) = self.db.lookup_intern_closure(c); return self.body.is_binding_upvar(place.local, root); @@ -830,7 +820,7 @@ impl<'db> InferenceContext<'_, 'db> { // FIXME: Borrow checker problems without this. let mut current_captures = std::mem::take(&mut self.current_captures); for capture in &mut current_captures { - let mut ty = self.table.resolve_completely(self.result[capture.place.local]); + let mut ty = self.table.resolve_completely(self.result.binding_ty(capture.place.local)); if ty.is_raw_ptr() || ty.is_union() { capture.kind = CaptureKind::ByRef(BorrowKind::Shared); self.truncate_capture_spans(capture, 0); @@ -875,7 +865,7 @@ impl<'db> InferenceContext<'_, 'db> { fn minimize_captures(&mut self) { self.current_captures.sort_unstable_by_key(|it| it.place.projections.len()); - let mut hash_map = FxHashMap::, usize>::default(); + let mut hash_map = FxHashMap::::default(); let result = mem::take(&mut self.current_captures); for mut item in result { let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] }; @@ -910,7 +900,7 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn consume_with_pat(&mut self, mut place: HirPlace<'db>, tgt_pat: PatId) { + fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) { let adjustments_count = self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default(); place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref)); @@ -921,7 +911,7 @@ impl<'db> InferenceContext<'_, 'db> { Pat::Missing | Pat::Wild => (), Pat::Tuple { args, ellipsis } => { let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); - let field_count = match self.result[tgt_pat].kind() { + let field_count = match self.result.pat_ty(tgt_pat).kind() { TyKind::Tuple(s) => s.len(), _ => break 'reset_span_stack, }; @@ -1221,11 +1211,11 @@ impl<'db> InferenceContext<'_, 'db> { } /// Call this only when the last span in the stack isn't a split. -fn apply_adjusts_to_place<'db>( +fn apply_adjusts_to_place( current_capture_span_stack: &mut Vec, - mut r: HirPlace<'db>, - adjustments: &[Adjustment<'db>], -) -> Option> { + mut r: HirPlace, + adjustments: &[Adjustment], +) -> Option { let span = *current_capture_span_stack.last().expect("empty capture span stack"); for adj in adjustments { match &adj.kind { diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs index 77c7155550da..1233543c40f5 100644 --- a/crates/hir-ty/src/infer/coerce.rs +++ b/crates/hir-ty/src/infer/coerce.rs @@ -104,7 +104,7 @@ struct Coerce { cause: ObligationCause, } -type CoerceResult<'db> = InferResult<'db, (Vec>, Ty<'db>)>; +type CoerceResult<'db> = InferResult<'db, (Vec, Ty<'db>)>; /// Coercing a mutable reference to an immutable works, while /// coercing `&T` to `&mut T` should be forbidden. @@ -114,7 +114,7 @@ fn coerce_mutbls<'db>(from_mutbl: Mutability, to_mutbl: Mutability) -> RelateRes /// This always returns `Ok(...)`. fn success<'db>( - adj: Vec>, + adj: Vec, target: Ty<'db>, obligations: PredicateObligations<'db>, ) -> CoerceResult<'db> { @@ -206,14 +206,17 @@ where &mut self, a: Ty<'db>, b: Ty<'db>, - adjustments: impl IntoIterator>, + adjustments: impl IntoIterator, final_adjustment: Adjust, ) -> CoerceResult<'db> { self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| { success( adjustments .into_iter() - .chain(std::iter::once(Adjustment { target: ty, kind: final_adjustment })) + .chain(std::iter::once(Adjustment { + target: ty.store(), + kind: final_adjustment, + })) .collect(), ty, obligations, @@ -237,7 +240,7 @@ where if self.coerce_never { return success( - vec![Adjustment { kind: Adjust::NeverToAny, target: b }], + vec![Adjustment { kind: Adjust::NeverToAny, target: b.store() }], b, PredicateObligations::new(), ); @@ -532,7 +535,8 @@ where // Now apply the autoref. let mutbl = AutoBorrowMutability::new(mutbl_b, self.allow_two_phase); - adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty }); + adjustments + .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty.store() }); debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments); @@ -635,10 +639,10 @@ where let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No); Some(( - Adjustment { kind: Adjust::Deref(None), target: ty_a }, + Adjustment { kind: Adjust::Deref(None), target: ty_a.store() }, Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), - target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b), + target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b).store(), }, )) } @@ -646,16 +650,16 @@ where coerce_mutbls(mt_a, mt_b)?; Some(( - Adjustment { kind: Adjust::Deref(None), target: ty_a }, + Adjustment { kind: Adjust::Deref(None), target: ty_a.store() }, Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b)), - target: Ty::new_ptr(self.interner(), ty_a, mt_b), + target: Ty::new_ptr(self.interner(), ty_a, mt_b).store(), }, )) } _ => None, }; - let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target); + let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target.as_ref()); // Setup either a subtyping or a LUB relationship between // the `CoerceUnsized` target type and the expected type. @@ -726,7 +730,7 @@ where Ok(None) => { if trait_pred.def_id().0 == unsize_did { let self_ty = trait_pred.self_ty(); - let unsize_ty = trait_pred.trait_ref.args.inner()[1].expect_ty(); + let unsize_ty = trait_pred.trait_ref.args[1].expect_ty(); debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred); match (self_ty.kind(), unsize_ty.kind()) { (TyKind::Infer(rustc_type_ir::TyVar(v)), TyKind::Dynamic(..)) @@ -815,7 +819,7 @@ where b, adjustment.map(|kind| Adjustment { kind, - target: Ty::new_fn_ptr(this.interner(), fn_ty_a), + target: Ty::new_fn_ptr(this.interner(), fn_ty_a).store(), }), Adjust::Pointer(PointerCast::UnsafeFnPointer), ) @@ -955,7 +959,7 @@ where self.unify_and( a_raw, b, - [Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }], + [Adjustment { kind: Adjust::Deref(None), target: mt_a.ty.store() }], Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), ) } else if mt_a.mutbl != mutbl_b { @@ -1170,12 +1174,15 @@ impl<'db> InferenceContext<'_, 'db> { for &expr in exprs { self.write_expr_adj( expr, - Box::new([Adjustment { kind: prev_adjustment.clone(), target: fn_ptr }]), + Box::new([Adjustment { + kind: prev_adjustment.clone(), + target: fn_ptr.store(), + }]), ); } self.write_expr_adj( new, - Box::new([Adjustment { kind: next_adjustment, target: fn_ptr }]), + Box::new([Adjustment { kind: next_adjustment, target: fn_ptr.store() }]), ); return Ok(fn_ptr); } @@ -1510,9 +1517,9 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> { icx.result.type_mismatches.get_or_insert_default().insert( expression.into(), if label_expression_as_expected { - TypeMismatch { expected: found, actual: expected } + TypeMismatch { expected: found.store(), actual: expected.store() } } else { - TypeMismatch { expected, actual: found } + TypeMismatch { expected: expected.store(), actual: found.store() } }, ); } @@ -1570,7 +1577,7 @@ fn coerce<'db>( db: &'db dyn HirDatabase, env: ParamEnvAndCrate<'db>, tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>, -) -> Result<(Vec>, Ty<'db>), TypeError>> { +) -> Result<(Vec, Ty<'db>), TypeError>> { let interner = DbInterner::new_with(db, env.krate); let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis); let ((ty1_with_vars, ty2_with_vars), vars) = infcx.instantiate_canonical(tys); @@ -1593,7 +1600,6 @@ fn coerce<'db>( let mut ocx = ObligationCtxt::new(&infcx); let (adjustments, ty) = ocx.register_infer_ok_obligations(infer_ok); _ = ocx.try_evaluate_obligations(); - let (adjustments, ty) = infcx.resolve_vars_if_possible((adjustments, ty)); // default any type vars that weren't unified back to their original bound vars // (kind of hacky) @@ -1701,10 +1707,18 @@ fn coerce<'db>( } // FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferCtxt`. - let (adjustments, ty) = (adjustments, ty).fold_with(&mut Resolver { - interner, - debruijn: DebruijnIndex::ZERO, - var_values: vars.var_values, - }); + let mut resolver = + Resolver { interner, debruijn: DebruijnIndex::ZERO, var_values: vars.var_values }; + let ty = infcx.resolve_vars_if_possible(ty).fold_with(&mut resolver); + let adjustments = adjustments + .into_iter() + .map(|adjustment| Adjustment { + kind: adjustment.kind, + target: infcx + .resolve_vars_if_possible(adjustment.target.as_ref()) + .fold_with(&mut resolver) + .store(), + }) + .collect(); Ok((adjustments, ty)) } diff --git a/crates/hir-ty/src/infer/diagnostics.rs b/crates/hir-ty/src/infer/diagnostics.rs index 0eb7a2f4740f..2bdc6f9491dc 100644 --- a/crates/hir-ty/src/infer/diagnostics.rs +++ b/crates/hir-ty/src/infer/diagnostics.rs @@ -25,10 +25,10 @@ use crate::{ // to our resolver and so we cannot have mutable reference, but we really want to have // ability to dispatch diagnostics during this work otherwise the code becomes a complete mess. #[derive(Debug, Default, Clone)] -pub(super) struct Diagnostics<'db>(RefCell>>); +pub(super) struct Diagnostics(RefCell>); -impl<'db> Diagnostics<'db> { - pub(super) fn push(&self, diagnostic: InferenceDiagnostic<'db>) { +impl Diagnostics { + pub(super) fn push(&self, diagnostic: InferenceDiagnostic) { self.0.borrow_mut().push(diagnostic); } @@ -42,19 +42,19 @@ impl<'db> Diagnostics<'db> { ); } - pub(super) fn finish(self) -> ThinVec> { + pub(super) fn finish(self) -> ThinVec { self.0.into_inner() } } -pub(crate) struct PathDiagnosticCallbackData<'a, 'db> { +pub(crate) struct PathDiagnosticCallbackData<'a> { node: ExprOrPatId, - diagnostics: &'a Diagnostics<'db>, + diagnostics: &'a Diagnostics, } pub(super) struct InferenceTyLoweringContext<'db, 'a> { ctx: TyLoweringContext<'db, 'a>, - diagnostics: &'a Diagnostics<'db>, + diagnostics: &'a Diagnostics, source: InferenceTyDiagnosticSource, } @@ -64,7 +64,7 @@ impl<'db, 'a> InferenceTyLoweringContext<'db, 'a> { db: &'db dyn HirDatabase, resolver: &'a Resolver<'db>, store: &'a ExpressionStore, - diagnostics: &'a Diagnostics<'db>, + diagnostics: &'a Diagnostics, source: InferenceTyDiagnosticSource, generic_def: GenericDefId, lifetime_elision: LifetimeElisionKind<'db>, diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 4e1711e48ec4..5eeaeef9d9f6 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -17,7 +17,7 @@ use hir_expand::name::Name; use rustc_ast_ir::Mutability; use rustc_type_ir::{ CoroutineArgs, CoroutineArgsParts, InferTy, Interner, - inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _}, + inherent::{AdtDef, GenericArgs as _, IntoKind, Ty as _}, }; use syntax::ast::RangeOp; use tracing::debug; @@ -35,7 +35,7 @@ use crate::{ lower::{GenericPredicates, lower_mutability}, method_resolution::{self, CandidateId, MethodCallee, MethodError}, next_solver::{ - ErrorGuaranteed, FnSig, GenericArgs, TraitRef, Ty, TyKind, TypeError, + ErrorGuaranteed, FnSig, GenericArg, GenericArgs, TraitRef, Ty, TyKind, TypeError, infer::{ BoundRegionConversionTime, InferOk, traits::{Obligation, ObligationCause}, @@ -68,10 +68,10 @@ impl<'db> InferenceContext<'_, 'db> { if let Some(expected_ty) = expected.only_has_type(&mut self.table) { let could_unify = self.unify(ty, expected_ty); if !could_unify { - self.result - .type_mismatches - .get_or_insert_default() - .insert(tgt_expr.into(), TypeMismatch { expected: expected_ty, actual: ty }); + self.result.type_mismatches.get_or_insert_default().insert( + tgt_expr.into(), + TypeMismatch { expected: expected_ty.store(), actual: ty.store() }, + ); } } ty @@ -98,10 +98,10 @@ impl<'db> InferenceContext<'_, 'db> { match self.coerce(expr.into(), ty, target, AllowTwoPhase::No, is_read) { Ok(res) => res, Err(_) => { - self.result - .type_mismatches - .get_or_insert_default() - .insert(expr.into(), TypeMismatch { expected: target, actual: ty }); + self.result.type_mismatches.get_or_insert_default().insert( + expr.into(), + TypeMismatch { expected: target.store(), actual: ty.store() }, + ); target } } @@ -276,7 +276,7 @@ impl<'db> InferenceContext<'_, 'db> { if ty.is_never() { if let Some(adjustments) = self.result.expr_adjustments.get(&expr) { return if let [Adjustment { kind: Adjust::NeverToAny, target }] = &**adjustments { - *target + target.as_ref() } else { self.err_ty() }; @@ -292,10 +292,10 @@ impl<'db> InferenceContext<'_, 'db> { if let Some(expected_ty) = expected.only_has_type(&mut self.table) { let could_unify = self.unify(ty, expected_ty); if !could_unify { - self.result - .type_mismatches - .get_or_insert_default() - .insert(expr.into(), TypeMismatch { expected: expected_ty, actual: ty }); + self.result.type_mismatches.get_or_insert_default().insert( + expr.into(), + TypeMismatch { expected: expected_ty.store(), actual: ty.store() }, + ); } } ty @@ -637,7 +637,7 @@ impl<'db> InferenceContext<'_, 'db> { } }; let field_ty = field_def.map_or(self.err_ty(), |it| { - field_types[it].instantiate(self.interner(), &substs) + field_types[it].get().instantiate(self.interner(), &substs) }); // Field type might have some unknown types @@ -780,7 +780,7 @@ impl<'db> InferenceContext<'_, 'db> { Ty::new_adt( self.interner(), adt, - GenericArgs::new_from_iter(self.interner(), [ty.into()]), + GenericArgs::new_from_iter(self.interner(), [GenericArg::from(ty)]), ) }; match (range_type, lhs_ty, rhs_ty) { @@ -947,7 +947,10 @@ impl<'db> InferenceContext<'_, 'db> { // Underscore expression is an error, we render a specialized diagnostic // to let the user know what type is expected though. let expected = expected.to_option(&mut self.table).unwrap_or_else(|| self.err_ty()); - self.push_diagnostic(InferenceDiagnostic::TypedHole { expr: tgt_expr, expected }); + self.push_diagnostic(InferenceDiagnostic::TypedHole { + expr: tgt_expr, + expected: expected.store(), + }); expected } Expr::OffsetOf(_) => self.types.usize, @@ -1183,10 +1186,10 @@ impl<'db> InferenceContext<'_, 'db> { match this.coerce(tgt_expr.into(), ty, target, AllowTwoPhase::No, ExprIsRead::Yes) { Ok(res) => res, Err(_) => { - this.result - .type_mismatches - .get_or_insert_default() - .insert(tgt_expr.into(), TypeMismatch { expected: target, actual: ty }); + this.result.type_mismatches.get_or_insert_default().insert( + tgt_expr.into(), + TypeMismatch { expected: target.store(), actual: ty.store() }, + ); target } } @@ -1234,7 +1237,7 @@ impl<'db> InferenceContext<'_, 'db> { &mut self, fn_x: FnTrait, derefed_callee: Ty<'db>, - adjustments: &mut Vec>, + adjustments: &mut Vec, callee_ty: Ty<'db>, params: &[Ty<'db>], tgt_expr: ExprId, @@ -1249,7 +1252,8 @@ impl<'db> InferenceContext<'_, 'db> { .unwrap_or(true) { // prefer reborrow to move - adjustments.push(Adjustment { kind: Adjust::Deref(None), target: inner }); + adjustments + .push(Adjustment { kind: Adjust::Deref(None), target: inner.store() }); adjustments.push(Adjustment::borrow( self.interner(), Mutability::Mut, @@ -1282,13 +1286,10 @@ impl<'db> InferenceContext<'_, 'db> { }; let trait_data = trait_.trait_items(self.db); if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) { - let subst = GenericArgs::new_from_iter( - self.interner(), - [ - callee_ty.into(), - Ty::new_tup_from_iter(self.interner(), params.iter().copied()).into(), - ], - ); + let subst = GenericArgs::new_from_slice(&[ + callee_ty.into(), + Ty::new_tup(self.interner(), params).into(), + ]); self.write_method_resolution(tgt_expr, func, subst); } } @@ -1549,7 +1550,10 @@ impl<'db> InferenceContext<'_, 'db> { { this.result.type_mismatches.get_or_insert_default().insert( expr.into(), - TypeMismatch { expected: t, actual: this.types.unit }, + TypeMismatch { + expected: t.store(), + actual: this.types.unit.store(), + }, ); } t @@ -1567,7 +1571,7 @@ impl<'db> InferenceContext<'_, 'db> { &mut self, receiver_ty: Ty<'db>, name: &Name, - ) -> Option<(Ty<'db>, Either, Vec>, bool)> { + ) -> Option<(Ty<'db>, Either, Vec, bool)> { let interner = self.interner(); let mut autoderef = self.table.autoderef_with_tracking(receiver_ty); let mut private_field = None; @@ -1612,6 +1616,7 @@ impl<'db> InferenceContext<'_, 'db> { return None; } let ty = self.db.field_types(field_id.parent)[field_id.local_id] + .get() .instantiate(interner, parameters); Some((Either::Left(field_id), ty)) }); @@ -1629,6 +1634,7 @@ impl<'db> InferenceContext<'_, 'db> { let adjustments = self.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok()); let ty = self.db.field_types(field_id.parent)[field_id.local_id] + .get() .instantiate(self.interner(), subst); let ty = self.process_remote_user_written_ty(ty); @@ -1679,7 +1685,7 @@ impl<'db> InferenceContext<'_, 'db> { ); self.push_diagnostic(InferenceDiagnostic::UnresolvedField { expr: tgt_expr, - receiver: receiver_ty, + receiver: receiver_ty.store(), name: name.clone(), method_with_same_name_exists: resolved.is_ok(), }); @@ -1755,7 +1761,7 @@ impl<'db> InferenceContext<'_, 'db> { None => { self.push_diagnostic(InferenceDiagnostic::ExpectedFunction { call_expr: tgt_expr, - found: callee_ty, + found: callee_ty.store(), }); (Vec::new(), Ty::new_error(interner, ErrorGuaranteed)) } @@ -1867,9 +1873,9 @@ impl<'db> InferenceContext<'_, 'db> { self.push_diagnostic(InferenceDiagnostic::UnresolvedMethodCall { expr: tgt_expr, - receiver: receiver_ty, + receiver: receiver_ty.store(), name: method_name.clone(), - field_with_same_name: field_with_same_name_exists, + field_with_same_name: field_with_same_name_exists.map(|it| it.store()), assoc_func_with_same_name: assoc_func_with_same_name.map(|it| it.def_id), }); @@ -2115,10 +2121,10 @@ impl<'db> InferenceContext<'_, 'db> { && args_count_matches { // Don't report type mismatches if there is a mismatch in args count. - self.result - .type_mismatches - .get_or_insert_default() - .insert((*arg).into(), TypeMismatch { expected, actual: found }); + self.result.type_mismatches.get_or_insert_default().insert( + (*arg).into(), + TypeMismatch { expected: expected.store(), actual: found.store() }, + ); } } } diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index 87dcaa8a4ea8..729ed214daea 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs @@ -26,8 +26,8 @@ impl<'db> InferenceContext<'_, 'db> { Adjust::Deref(Some(d)) => { if mutability == Mutability::Mut { let source_ty = match adjustments.peek() { - Some(prev_adj) => prev_adj.target, - None => self.result.type_of_expr[tgt_expr], + Some(prev_adj) => prev_adj.target.as_ref(), + None => self.result.type_of_expr[tgt_expr].as_ref(), }; if let Some(infer_ok) = Self::try_mutable_overloaded_place_op( &self.table, diff --git a/crates/hir-ty/src/infer/op.rs b/crates/hir-ty/src/infer/op.rs index 8236de167f73..c61b6c9ae53d 100644 --- a/crates/hir-ty/src/infer/op.rs +++ b/crates/hir-ty/src/infer/op.rs @@ -213,7 +213,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes); let autoref = Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), - target: method.sig.inputs_and_output.inputs()[0], + target: method.sig.inputs_and_output.inputs()[0].store(), }; self.write_expr_adj(lhs_expr, Box::new([autoref])); } @@ -227,7 +227,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { let autoref = Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), - target: method.sig.inputs_and_output.inputs()[1], + target: method.sig.inputs_and_output.inputs()[1].store(), }; // HACK(eddyb) Bypass checks due to reborrows being in // some cases applied on the RHS, on top of which we need diff --git a/crates/hir-ty/src/infer/opaques.rs b/crates/hir-ty/src/infer/opaques.rs index ce4597f83d55..1c722897f47b 100644 --- a/crates/hir-ty/src/infer/opaques.rs +++ b/crates/hir-ty/src/infer/opaques.rs @@ -112,12 +112,12 @@ impl<'db> InferenceContext<'_, 'db> { _ = self.demand_eqtype_fixme_no_diag(expected, hidden_type.ty); } - self.result.type_of_opaque.insert(def_id, ty.ty); + self.result.type_of_opaque.insert(def_id, ty.ty.store()); continue; } - self.result.type_of_opaque.insert(def_id, self.types.error); + self.result.type_of_opaque.insert(def_id, self.types.error.store()); } } diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs index a02e280ac637..5eddd4102ba7 100644 --- a/crates/hir-ty/src/infer/pat.rs +++ b/crates/hir-ty/src/infer/pat.rs @@ -9,7 +9,7 @@ use hir_def::{ }; use hir_expand::name::Name; use rustc_ast_ir::Mutability; -use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, Ty as _}; use stdx::TupleExt; use crate::{ @@ -82,7 +82,7 @@ impl<'db> InferenceContext<'_, 'db> { { // FIXME(DIAGNOSE): private tuple field } - let f = field_types[local_id]; + let f = field_types[local_id].get(); let expected_ty = match substs { Some(substs) => f.instantiate(self.interner(), substs), None => f.instantiate(self.interner(), &[]), @@ -146,7 +146,7 @@ impl<'db> InferenceContext<'_, 'db> { variant: def, }); } - let f = field_types[local_id]; + let f = field_types[local_id].get(); let expected_ty = match substs { Some(substs) => f.instantiate(self.interner(), substs), None => f.instantiate(self.interner(), &[]), @@ -270,7 +270,7 @@ impl<'db> InferenceContext<'_, 'db> { } else if self.is_non_ref_pat(self.body, pat) { let mut pat_adjustments = Vec::new(); while let TyKind::Ref(_lifetime, inner, mutability) = expected.kind() { - pat_adjustments.push(expected); + pat_adjustments.push(expected.store()); expected = self.table.try_structurally_resolve_type(inner); default_bm = match default_bm { BindingMode::Move => BindingMode::Ref(mutability), @@ -333,7 +333,10 @@ impl<'db> InferenceContext<'_, 'db> { Err(_) => { self.result.type_mismatches.get_or_insert_default().insert( pat.into(), - TypeMismatch { expected, actual: ty_inserted_vars }, + TypeMismatch { + expected: expected.store(), + actual: ty_inserted_vars.store(), + }, ); self.write_pat_ty(pat, ty); // We return `expected` to prevent cascading errors. I guess an alternative is to @@ -413,10 +416,10 @@ impl<'db> InferenceContext<'_, 'db> { ) { Ok(ty) => ty, Err(_) => { - self.result - .type_mismatches - .get_or_insert_default() - .insert(pat.into(), TypeMismatch { expected, actual: lhs_ty }); + self.result.type_mismatches.get_or_insert_default().insert( + pat.into(), + TypeMismatch { expected: expected.store(), actual: lhs_ty.store() }, + ); // `rhs_ty` is returned so no further type mismatches are // reported because of this mismatch. expected @@ -432,22 +435,22 @@ impl<'db> InferenceContext<'_, 'db> { let ty = self.insert_type_vars_shallow(ty); // FIXME: This never check is odd, but required with out we do inference right now if !expected.is_never() && !self.unify(ty, expected) { - self.result - .type_mismatches - .get_or_insert_default() - .insert(pat.into(), TypeMismatch { expected, actual: ty }); + self.result.type_mismatches.get_or_insert_default().insert( + pat.into(), + TypeMismatch { expected: expected.store(), actual: ty.store() }, + ); } self.write_pat_ty(pat, ty); self.pat_ty_after_adjustment(pat) } fn pat_ty_after_adjustment(&self, pat: PatId) -> Ty<'db> { - *self - .result + self.result .pat_adjustments .get(&pat) .and_then(|it| it.last()) - .unwrap_or(&self.result.type_of_pat[pat]) + .unwrap_or_else(|| &self.result.type_of_pat[pat]) + .as_ref() } fn infer_ref_pat( diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index 301cbf462ce0..a323952494c0 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -64,7 +64,7 @@ impl<'db> InferenceContext<'_, 'db> { } ValueNs::LocalBinding(pat) => { return match self.result.type_of_binding.get(pat) { - Some(ty) => Some(ValuePathResolution::NonGeneric(*ty)), + Some(ty) => Some(ValuePathResolution::NonGeneric(ty.as_ref())), None => { never!("uninferred pattern?"); None diff --git a/crates/hir-ty/src/infer/place_op.rs b/crates/hir-ty/src/infer/place_op.rs index 3ef5e5870a58..5bcae21aa11b 100644 --- a/crates/hir-ty/src/infer/place_op.rs +++ b/crates/hir-ty/src/infer/place_op.rs @@ -65,7 +65,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { oprnd_expr, Box::new([Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)), - target: method.sig.inputs_and_output.inputs()[0], + target: method.sig.inputs_and_output.inputs()[0].store(), }]), ); } else { @@ -151,7 +151,8 @@ impl<'a, 'db> InferenceContext<'a, 'db> { { adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)), - target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty), + target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty) + .store(), }); } else { panic!("input to index is not a ref?"); @@ -159,7 +160,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { if unsize { adjustments.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), - target: method.sig.inputs_and_output.inputs()[0], + target: method.sig.inputs_and_output.inputs()[0].store(), }); } autoderef.ctx().write_expr_adj(base_expr, adjustments.into_boxed_slice()); @@ -283,7 +284,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { None => return, }; debug!("convert_place_op_to_mutable: method={:?}", method); - self.result.method_resolutions.insert(expr, (method.def_id, method.args)); + self.result.method_resolutions.insert(expr, (method.def_id, method.args.store())); let TyKind::Ref(region, _, Mutability::Mut) = method.sig.inputs_and_output.inputs()[0].kind() @@ -308,9 +309,9 @@ impl<'a, 'db> InferenceContext<'a, 'db> { allow_two_phase_borrow: AllowTwoPhase::No, }; adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(mutbl)); - adjustment.target = Ty::new_ref(interner, region, source, mutbl.into()); + adjustment.target = Ty::new_ref(interner, region, source, mutbl.into()).store(); } - source = adjustment.target; + source = adjustment.target.as_ref(); } // If we have an autoref followed by unsizing at the end, fix the unsize target. @@ -320,7 +321,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target }, ] = adjustments[..] { - *target = method.sig.inputs_and_output.inputs()[0]; + *target = method.sig.inputs_and_output.inputs()[0].store(); } } } diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index a5060416a16e..ac84797f37a9 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -9,7 +9,7 @@ use intern::sym; use rustc_hash::FxHashSet; use rustc_type_ir::{ TyVid, TypeFoldable, TypeVisitableExt, UpcastFrom, - inherent::{Const as _, GenericArg as _, IntoKind, SliceLike, Ty as _}, + inherent::{Const as _, GenericArg as _, IntoKind, Ty as _}, solve::Certainty, }; use smallvec::SmallVec; @@ -640,6 +640,7 @@ impl<'db> InferenceTable<'db> { let struct_data = id.fields(self.db); if let Some((last_field, _)) = struct_data.fields().iter().next_back() { let last_field_ty = self.db.field_types(id.into())[last_field] + .get() .instantiate(self.interner(), subst); if structs.contains(&ty) { // A struct recursively contains itself as a tail field somewhere. diff --git a/crates/hir-ty/src/inhabitedness.rs b/crates/hir-ty/src/inhabitedness.rs index 075a7066db78..402e9ce96971 100644 --- a/crates/hir-ty/src/inhabitedness.rs +++ b/crates/hir-ty/src/inhabitedness.rs @@ -154,7 +154,7 @@ impl<'a, 'db> UninhabitedFrom<'a, 'db> { let field_vis = if is_enum { None } else { Some(self.db().field_visibilities(variant)) }; for (fid, _) in fields.iter() { - self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid], subst)?; + self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid].get(), subst)?; } CONTINUE_OPAQUELY_INHABITED } diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index 1ed604865b00..b6ad3624ae28 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -14,10 +14,7 @@ use rustc_abi::{ TargetDataLayout, WrappingRange, }; use rustc_index::IndexVec; -use rustc_type_ir::{ - FloatTy, IntTy, UintTy, - inherent::{IntoKind, SliceLike}, -}; +use rustc_type_ir::{FloatTy, IntTy, UintTy, inherent::IntoKind}; use triomphe::Arc; use crate::{ @@ -25,9 +22,10 @@ use crate::{ consteval::try_const_usize, db::HirDatabase, next_solver::{ - DbInterner, GenericArgs, Ty, TyKind, TypingMode, + DbInterner, GenericArgs, StoredTy, Ty, TyKind, TypingMode, infer::{DbInternerInferExt, traits::ObligationCause}, }, + traits::StoredParamEnvAndCrate, }; pub(crate) use self::adt::layout_of_adt_cycle_result; @@ -144,22 +142,22 @@ fn layout_of_simd_ty<'db>( let Some(TyKind::Array(e_ty, e_len)) = fields .next() .filter(|_| fields.next().is_none()) - .map(|f| (*f.1).instantiate(DbInterner::new_no_crate(db), args).kind()) + .map(|f| (*f.1).get().instantiate(DbInterner::new_no_crate(db), args).kind()) else { return Err(LayoutError::InvalidSimdType); }; let e_len = try_const_usize(db, e_len).ok_or(LayoutError::HasErrorConst)? as u64; - let e_ly = db.layout_of_ty(e_ty, env)?; + let e_ly = db.layout_of_ty(e_ty.store(), env.store())?; let cx = LayoutCx::new(dl); Ok(Arc::new(cx.calc.simd_type(e_ly, e_len, repr_packed)?)) } -pub fn layout_of_ty_query<'db>( - db: &'db dyn HirDatabase, - ty: Ty<'db>, - trait_env: ParamEnvAndCrate<'db>, +pub fn layout_of_ty_query( + db: &dyn HirDatabase, + ty: StoredTy, + trait_env: StoredParamEnvAndCrate, ) -> Result, LayoutError> { let krate = trait_env.krate; let interner = DbInterner::new_with(db, krate); @@ -170,19 +168,29 @@ pub fn layout_of_ty_query<'db>( let cx = LayoutCx::new(dl); let infer_ctxt = interner.infer_ctxt().build(TypingMode::PostAnalysis); let cause = ObligationCause::dummy(); - let ty = infer_ctxt.at(&cause, trait_env.param_env).deeply_normalize(ty).unwrap_or(ty); + let ty = infer_ctxt + .at(&cause, trait_env.param_env()) + .deeply_normalize(ty.as_ref()) + .unwrap_or(ty.as_ref()); let result = match ty.kind() { TyKind::Adt(def, args) => { match def.inner().id { hir_def::AdtId::StructId(s) => { let repr = AttrFlags::repr(db, s.into()).unwrap_or_default(); if repr.simd() { - return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target); + return layout_of_simd_ty( + db, + s, + repr.packed(), + &args, + trait_env.as_ref(), + &target, + ); } } _ => {} } - return db.layout_of_adt(def.inner().id, args, trait_env); + return db.layout_of_adt(def.inner().id, args.store(), trait_env); } TyKind::Bool => Layout::scalar( dl, @@ -246,21 +254,23 @@ pub fn layout_of_ty_query<'db>( ), TyKind::Tuple(tys) => { let kind = - if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; + if tys.is_empty() { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; - let fields = - tys.iter().map(|k| db.layout_of_ty(k, trait_env)).collect::, _>>()?; + let fields = tys + .iter() + .map(|k| db.layout_of_ty(k.store(), trait_env.clone())) + .collect::, _>>()?; let fields = fields.iter().map(|it| &**it).collect::>(); let fields = fields.iter().collect::>(); cx.calc.univariant(&fields, &ReprOptions::default(), kind)? } TyKind::Array(element, count) => { let count = try_const_usize(db, count).ok_or(LayoutError::HasErrorConst)? as u64; - let element = db.layout_of_ty(element, trait_env)?; + let element = db.layout_of_ty(element.store(), trait_env)?; cx.calc.array_like::<_, _, ()>(&element, Some(count))? } TyKind::Slice(element) => { - let element = db.layout_of_ty(element, trait_env)?; + let element = db.layout_of_ty(element.store(), trait_env)?; cx.calc.array_like::<_, _, ()>(&element, None)? } TyKind::Str => { @@ -325,9 +335,11 @@ pub fn layout_of_ty_query<'db>( let fields = captures .iter() .map(|it| { - let ty = - it.ty.instantiate(interner, args.split_closure_args_untupled().parent_args); - db.layout_of_ty(ty, trait_env) + let ty = it + .ty + .get() + .instantiate(interner, args.split_closure_args_untupled().parent_args); + db.layout_of_ty(ty.store(), trait_env.clone()) }) .collect::, _>>()?; let fields = fields.iter().map(|it| &**it).collect::>(); @@ -357,11 +369,11 @@ pub fn layout_of_ty_query<'db>( Ok(Arc::new(result)) } -pub(crate) fn layout_of_ty_cycle_result<'db>( +pub(crate) fn layout_of_ty_cycle_result( _: &dyn HirDatabase, _: salsa::Id, - _: Ty<'db>, - _: ParamEnvAndCrate<'db>, + _: StoredTy, + _: StoredParamEnvAndCrate, ) -> Result, LayoutError> { Err(LayoutError::RecursiveTypeWithoutIndirection) } @@ -377,7 +389,7 @@ fn struct_tail_erasing_lifetimes<'a>(db: &'a dyn HirDatabase, pointee: Ty<'a>) - let mut it = data.fields().iter().rev(); match it.next() { Some((f, _)) => { - let last_field_ty = field_ty(db, struct_id.into(), f, &args); + let last_field_ty = field_ty(db, struct_id.into(), f, args); struct_tail_erasing_lifetimes(db, last_field_ty) } None => pointee, @@ -398,9 +410,9 @@ fn field_ty<'a>( db: &'a dyn HirDatabase, def: hir_def::VariantId, fd: LocalFieldId, - args: &GenericArgs<'a>, + args: GenericArgs<'a>, ) -> Ty<'a> { - db.field_types(def)[fd].instantiate(DbInterner::new_no_crate(db), args) + db.field_types(def)[fd].get().instantiate(DbInterner::new_no_crate(db), args) } fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar { diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs index 1e2c3aa31b8d..d2495917187e 100644 --- a/crates/hir-ty/src/layout/adt.rs +++ b/crates/hir-ty/src/layout/adt.rs @@ -13,17 +13,17 @@ use smallvec::SmallVec; use triomphe::Arc; use crate::{ - ParamEnvAndCrate, db::HirDatabase, layout::{Layout, LayoutCx, LayoutError, field_ty}, - next_solver::GenericArgs, + next_solver::StoredGenericArgs, + traits::StoredParamEnvAndCrate, }; -pub fn layout_of_adt_query<'db>( - db: &'db dyn HirDatabase, +pub fn layout_of_adt_query( + db: &dyn HirDatabase, def: AdtId, - args: GenericArgs<'db>, - trait_env: ParamEnvAndCrate<'db>, + args: StoredGenericArgs, + trait_env: StoredParamEnvAndCrate, ) -> Result, LayoutError> { let krate = trait_env.krate; let Ok(target) = db.target_data_layout(krate) else { @@ -34,7 +34,9 @@ pub fn layout_of_adt_query<'db>( let handle_variant = |def: VariantId, var: &VariantFields| { var.fields() .iter() - .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &args), trait_env)) + .map(|(fd, _)| { + db.layout_of_ty(field_ty(db, def, fd, args.as_ref()).store(), trait_env.clone()) + }) .collect::, _>>() }; let (variants, repr, is_special_no_niche) = match def { @@ -95,12 +97,12 @@ pub fn layout_of_adt_query<'db>( Ok(Arc::new(result)) } -pub(crate) fn layout_of_adt_cycle_result<'db>( - _: &'db dyn HirDatabase, +pub(crate) fn layout_of_adt_cycle_result( + _: &dyn HirDatabase, _: salsa::Id, _def: AdtId, - _args: GenericArgs<'db>, - _trait_env: ParamEnvAndCrate<'db>, + _args: StoredGenericArgs, + _trait_env: StoredParamEnvAndCrate, ) -> Result, LayoutError> { Err(LayoutError::RecursiveTypeWithoutIndirection) } diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index be6a76478a11..8c91be1d7811 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -98,7 +98,7 @@ fn eval_goal( Either::Left(it) => it.krate(&db), Either::Right(it) => it.krate(&db), }; - db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate }) + db.layout_of_ty(goal_ty.store(), ParamEnvAndCrate { param_env, krate }.store()) }) } @@ -140,10 +140,10 @@ fn eval_expr( .unwrap() .0; let infer = InferenceResult::for_body(&db, function_id.into()); - let goal_ty = infer.type_of_binding[b]; + let goal_ty = infer.type_of_binding[b].clone(); let param_env = db.trait_environment(function_id.into()); let krate = function_id.krate(&db); - db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate }) + db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate }.store()) }) } diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 5ebe87c5d5d6..012fd8a10abd 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -61,11 +61,12 @@ use hir_def::{CallableDefId, TypeOrConstParamId, type_ref::Rawness}; use hir_expand::name::Name; use indexmap::{IndexMap, map::Entry}; use intern::{Symbol, sym}; +use macros::GenericTypeVisitable; use mir::{MirEvalError, VTableMap}; use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet}; use rustc_type_ir::{ BoundVarIndexKind, TypeSuperVisitable, TypeVisitableExt, UpcastFrom, - inherent::{IntoKind, SliceLike, Ty as _}, + inherent::{IntoKind, Ty as _}, }; use syntax::ast::{ConstArg, make}; use traits::FnTrait; @@ -104,7 +105,7 @@ pub use utils::{ /// A constant can have reference to other things. Memory map job is holding /// the necessary bits of memory of the const eval session to keep the constant /// meaningful. -#[derive(Debug, Default, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)] pub enum MemoryMap<'db> { #[default] Empty, @@ -112,7 +113,7 @@ pub enum MemoryMap<'db> { Complex(Box>), } -#[derive(Debug, Default, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)] pub struct ComplexMemoryMap<'db> { memory: IndexMap, FxBuildHasher>, vtable: VTableMap<'db>, @@ -134,7 +135,7 @@ impl ComplexMemoryMap<'_> { } impl<'db> MemoryMap<'db> { - pub fn vtable_ty(&self, id: usize) -> Result, MirEvalError<'db>> { + pub fn vtable_ty(&self, id: usize) -> Result, MirEvalError> { match self { MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)), MemoryMap::Complex(cm) => cm.vtable.ty(id), @@ -150,8 +151,8 @@ impl<'db> MemoryMap<'db> { /// allocator function as `f` and it will return a mapping of old addresses to new addresses. fn transform_addresses( &self, - mut f: impl FnMut(&[u8], usize) -> Result>, - ) -> Result, MirEvalError<'db>> { + mut f: impl FnMut(&[u8], usize) -> Result, + ) -> Result, MirEvalError> { let mut transform = |(addr, val): (&usize, &[u8])| { let addr = *addr; let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) }; @@ -333,9 +334,9 @@ impl FnAbi { } #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] -pub enum ImplTraitId<'db> { - ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx<'db>), - TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx<'db>), +pub enum ImplTraitId { + ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx), + TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx), } /// 'Canonicalizes' the `t` by replacing any errors with new variables. Also diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 3f901ad69285..50f808affa99 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -15,8 +15,8 @@ use either::Either; use hir_def::{ AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, - LifetimeParamId, LocalFieldId, Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, - TypeParamId, UnionId, VariantId, + LifetimeParamId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, + TypeOrConstParamId, TypeParamId, UnionId, VariantId, builtin_type::BuiltinType, expr_store::{ExpressionStore, HygieneId, path::Path}, hir::generics::{ @@ -61,36 +61,38 @@ use crate::{ AliasTy, Binder, BoundExistentialPredicates, Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder, EarlyParamRegion, ErrorGuaranteed, FxIndexMap, GenericArg, GenericArgs, ParamConst, ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, - TraitPredicate, TraitRef, Ty, Tys, UnevaluatedConst, abi::Safety, util::BottomUpFolder, + StoredClauses, StoredEarlyBinder, StoredGenericArg, StoredGenericArgs, StoredPolyFnSig, + StoredTy, TraitPredicate, TraitRef, Ty, Tys, UnevaluatedConst, abi::Safety, + util::BottomUpFolder, }, }; pub(crate) struct PathDiagnosticCallbackData(pub(crate) TypeRefId); #[derive(PartialEq, Eq, Debug, Hash)] -pub struct ImplTraits<'db> { - pub(crate) impl_traits: Arena>, +pub struct ImplTraits { + pub(crate) impl_traits: Arena, } #[derive(PartialEq, Eq, Debug, Hash)] -pub struct ImplTrait<'db> { - pub(crate) predicates: Box<[Clause<'db>]>, +pub struct ImplTrait { + pub(crate) predicates: StoredClauses, } -pub type ImplTraitIdx<'db> = Idx>; +pub type ImplTraitIdx = Idx; #[derive(Debug, Default)] -struct ImplTraitLoweringState<'db> { +struct ImplTraitLoweringState { /// When turning `impl Trait` into opaque types, we have to collect the /// bounds at the same time to get the IDs correct (without becoming too /// complicated). mode: ImplTraitLoweringMode, // This is structured as a struct with fields and not as an enum because it helps with the borrow checker. - opaque_type_data: Arena>, + opaque_type_data: Arena, } -impl<'db> ImplTraitLoweringState<'db> { - fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState<'db> { +impl ImplTraitLoweringState { + fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState { Self { mode, opaque_type_data: Arena::new() } } } @@ -174,7 +176,7 @@ pub struct TyLoweringContext<'db, 'a> { def: GenericDefId, generics: OnceCell, in_binders: DebruijnIndex, - impl_trait_mode: ImplTraitLoweringState<'db>, + impl_trait_mode: ImplTraitLoweringState, /// Tracks types with explicit `?Sized` bounds. pub(crate) unsized_types: FxHashSet>, pub(crate) diagnostics: Vec, @@ -337,7 +339,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { } } Some(ValueNs::ConstId(c)) => { - let args = GenericArgs::new_from_iter(self.interner, []); + let args = GenericArgs::empty(self.interner); Some(Const::new( self.interner, rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new( @@ -476,7 +478,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { let idx = self .impl_trait_mode .opaque_type_data - .alloc(ImplTrait { predicates: Box::default() }); + .alloc(ImplTrait { predicates: Clauses::empty(interner).store() }); let impl_trait_id = origin.either( |f| ImplTraitId::ReturnTypeImplTrait(f, idx), @@ -989,7 +991,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { } } - fn lower_impl_trait(&mut self, def_id: SolverDefId, bounds: &[TypeBound]) -> ImplTrait<'db> { + fn lower_impl_trait(&mut self, def_id: SolverDefId, bounds: &[TypeBound]) -> ImplTrait { let interner = self.interner; cov_mark::hit!(lower_rpit); let args = GenericArgs::identity_for_item(interner, def_id); @@ -1010,7 +1012,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { let trait_ref = TraitRef::new_from_args( interner, trait_id.into(), - GenericArgs::new_from_iter(interner, [self_ty.into()]), + GenericArgs::new_from_slice(&[self_ty.into()]), ); Clause(Predicate::new( interner, @@ -1024,9 +1026,9 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { }); predicates.extend(sized_clause); } - predicates.into_boxed_slice() + predicates }); - ImplTrait { predicates } + ImplTrait { predicates: Clauses::new_from_slice(&predicates).store() } } pub(crate) fn lower_lifetime(&mut self, lifetime: LifetimeRefId) -> Region<'db> { @@ -1090,28 +1092,50 @@ pub(crate) fn impl_trait_query<'db>( db.impl_trait_with_diagnostics(impl_id).map(|it| it.0) } -pub(crate) fn impl_trait_with_diagnostics_query<'db>( +pub(crate) fn impl_trait_with_diagnostics<'db>( db: &'db dyn HirDatabase, impl_id: ImplId, ) -> Option<(EarlyBinder<'db, TraitRef<'db>>, Diagnostics)> { - let impl_data = db.impl_signature(impl_id); - let resolver = impl_id.resolver(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &impl_data.store, - impl_id.into(), - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, - ); - let self_ty = db.impl_self_ty(impl_id).skip_binder(); - let target_trait = impl_data.target_trait.as_ref()?; - let trait_ref = EarlyBinder::bind(ctx.lower_trait_ref(target_trait, self_ty)?); - Some((trait_ref, create_diagnostics(ctx.diagnostics))) + return impl_trait_with_diagnostics_query(db, impl_id).as_ref().map(|(binder, diags)| { + ( + binder.get_with(|(trait_id, args)| { + TraitRef::new_from_args( + DbInterner::new_no_crate(db), + (*trait_id).into(), + args.as_ref(), + ) + }), + diags.clone(), + ) + }); + + #[salsa::tracked(returns(ref))] + pub(crate) fn impl_trait_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, + ) -> Option<(StoredEarlyBinder<(TraitId, StoredGenericArgs)>, Diagnostics)> { + let impl_data = db.impl_signature(impl_id); + let resolver = impl_id.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ); + let self_ty = db.impl_self_ty(impl_id).skip_binder(); + let target_trait = impl_data.target_trait.as_ref()?; + let trait_ref = ctx.lower_trait_ref(target_trait, self_ty)?; + Some(( + StoredEarlyBinder::bind((trait_ref.def_id.0, trait_ref.args.store())), + create_diagnostics(ctx.diagnostics), + )) + } } -impl<'db> ImplTraitId<'db> { +impl ImplTraitId { #[inline] - pub fn predicates(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> { + pub fn predicates<'db>(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> { let (impl_traits, idx) = match self { ImplTraitId::ReturnTypeImplTrait(owner, idx) => { (ImplTraits::return_type_impl_traits(db, owner), idx) @@ -1123,8 +1147,7 @@ impl<'db> ImplTraitId<'db> { impl_traits .as_deref() .expect("owner should have opaque type") - .as_ref() - .map_bound(|it| &*it.impl_traits[idx].predicates) + .get_with(|it| it.impl_traits[idx].predicates.as_ref().as_slice()) } } @@ -1136,12 +1159,12 @@ impl InternedOpaqueTyId { } #[salsa::tracked] -impl<'db> ImplTraits<'db> { - #[salsa::tracked(returns(ref), unsafe(non_update_types))] +impl ImplTraits { + #[salsa::tracked(returns(ref))] pub(crate) fn return_type_impl_traits( - db: &'db dyn HirDatabase, + db: &dyn HirDatabase, def: hir_def::FunctionId, - ) -> Option>>> { + ) -> Option>> { // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe let data = db.function_signature(def); let resolver = def.resolver(db); @@ -1162,15 +1185,15 @@ impl<'db> ImplTraits<'db> { None } else { return_type_impl_traits.impl_traits.shrink_to_fit(); - Some(Box::new(EarlyBinder::bind(return_type_impl_traits))) + Some(Box::new(StoredEarlyBinder::bind(return_type_impl_traits))) } } - #[salsa::tracked(returns(ref), unsafe(non_update_types))] + #[salsa::tracked(returns(ref))] pub(crate) fn type_alias_impl_traits( - db: &'db dyn HirDatabase, + db: &dyn HirDatabase, def: hir_def::TypeAliasId, - ) -> Option>>> { + ) -> Option>> { let data = db.type_alias_signature(def); let resolver = def.resolver(db); let mut ctx = TyLoweringContext::new( @@ -1190,7 +1213,7 @@ impl<'db> ImplTraits<'db> { None } else { type_alias_impl_traits.impl_traits.shrink_to_fit(); - Some(Box::new(EarlyBinder::bind(type_alias_impl_traits))) + Some(Box::new(StoredEarlyBinder::bind(type_alias_impl_traits))) } } } @@ -1246,17 +1269,20 @@ pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBind /// Build the declared type of a function. This should not need to look at the /// function body. -fn type_for_fn<'db>(db: &'db dyn HirDatabase, def: FunctionId) -> EarlyBinder<'db, Ty<'db>> { +fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> StoredEarlyBinder { let interner = DbInterner::new_no_crate(db); - EarlyBinder::bind(Ty::new_fn_def( - interner, - CallableDefId::FunctionId(def).into(), - GenericArgs::identity_for_item(interner, def.into()), - )) + StoredEarlyBinder::bind( + Ty::new_fn_def( + interner, + CallableDefId::FunctionId(def).into(), + GenericArgs::identity_for_item(interner, def.into()), + ) + .store(), + ) } /// Build the declared type of a const. -fn type_for_const<'db>(db: &'db dyn HirDatabase, def: ConstId) -> EarlyBinder<'db, Ty<'db>> { +fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> StoredEarlyBinder { let resolver = def.resolver(db); let data = db.const_signature(def); let parent = def.loc(db).container; @@ -1268,11 +1294,11 @@ fn type_for_const<'db>(db: &'db dyn HirDatabase, def: ConstId) -> EarlyBinder<'d LifetimeElisionKind::AnonymousReportError, ); ctx.set_lifetime_elision(LifetimeElisionKind::for_const(ctx.interner, parent)); - EarlyBinder::bind(ctx.lower_ty(data.type_ref)) + StoredEarlyBinder::bind(ctx.lower_ty(data.type_ref).store()) } /// Build the declared type of a static. -fn type_for_static<'db>(db: &'db dyn HirDatabase, def: StaticId) -> EarlyBinder<'db, Ty<'db>> { +fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> StoredEarlyBinder { let resolver = def.resolver(db); let data = db.static_signature(def); let mut ctx = TyLoweringContext::new( @@ -1283,100 +1309,129 @@ fn type_for_static<'db>(db: &'db dyn HirDatabase, def: StaticId) -> EarlyBinder< LifetimeElisionKind::AnonymousReportError, ); ctx.set_lifetime_elision(LifetimeElisionKind::Elided(Region::new_static(ctx.interner))); - EarlyBinder::bind(ctx.lower_ty(data.type_ref)) + StoredEarlyBinder::bind(ctx.lower_ty(data.type_ref).store()) } /// Build the type of a tuple struct constructor. -fn type_for_struct_constructor<'db>( - db: &'db dyn HirDatabase, +fn type_for_struct_constructor( + db: &dyn HirDatabase, def: StructId, -) -> Option>> { +) -> Option> { let struct_data = def.fields(db); match struct_data.shape { FieldsShape::Record => None, FieldsShape::Unit => Some(type_for_adt(db, def.into())), FieldsShape::Tuple => { let interner = DbInterner::new_no_crate(db); - Some(EarlyBinder::bind(Ty::new_fn_def( - interner, - CallableDefId::StructId(def).into(), - GenericArgs::identity_for_item(interner, def.into()), - ))) + Some(StoredEarlyBinder::bind( + Ty::new_fn_def( + interner, + CallableDefId::StructId(def).into(), + GenericArgs::identity_for_item(interner, def.into()), + ) + .store(), + )) } } } /// Build the type of a tuple enum variant constructor. -fn type_for_enum_variant_constructor<'db>( - db: &'db dyn HirDatabase, +fn type_for_enum_variant_constructor( + db: &dyn HirDatabase, def: EnumVariantId, -) -> Option>> { +) -> Option> { let struct_data = def.fields(db); match struct_data.shape { FieldsShape::Record => None, FieldsShape::Unit => Some(type_for_adt(db, def.loc(db).parent.into())), FieldsShape::Tuple => { let interner = DbInterner::new_no_crate(db); - Some(EarlyBinder::bind(Ty::new_fn_def( - interner, - CallableDefId::EnumVariantId(def).into(), - GenericArgs::identity_for_item(interner, def.loc(db).parent.into()), - ))) + Some(StoredEarlyBinder::bind( + Ty::new_fn_def( + interner, + CallableDefId::EnumVariantId(def).into(), + GenericArgs::identity_for_item(interner, def.loc(db).parent.into()), + ) + .store(), + )) } } } -pub(crate) fn value_ty_query<'db>( +pub(crate) fn value_ty<'db>( db: &'db dyn HirDatabase, def: ValueTyDefId, ) -> Option>> { - match def { - ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)), - ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), - ValueTyDefId::UnionId(it) => Some(type_for_adt(db, it.into())), - ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it), - ValueTyDefId::ConstId(it) => Some(type_for_const(db, it)), - ValueTyDefId::StaticId(it) => Some(type_for_static(db, it)), + return value_ty_query(db, def).as_ref().map(|it| it.get()); + + #[salsa::tracked(returns(ref))] + pub(crate) fn value_ty_query<'db>( + db: &'db dyn HirDatabase, + def: ValueTyDefId, + ) -> Option> { + match def { + ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)), + ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), + ValueTyDefId::UnionId(it) => Some(type_for_adt(db, it.into())), + ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it), + ValueTyDefId::ConstId(it) => Some(type_for_const(db, it)), + ValueTyDefId::StaticId(it) => Some(type_for_static(db, it)), + } } } -pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>( +pub(crate) fn type_for_type_alias_with_diagnostics<'db>( db: &'db dyn HirDatabase, t: TypeAliasId, ) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { - let type_alias_data = db.type_alias_signature(t); - let mut diags = None; - let resolver = t.resolver(db); - let interner = DbInterner::new_no_crate(db); - let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) { - EarlyBinder::bind(Ty::new_foreign(interner, t.into())) - } else { - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &type_alias_data.store, - t.into(), - LifetimeElisionKind::AnonymousReportError, - ) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); - let res = EarlyBinder::bind( - type_alias_data - .ty - .map(|type_ref| ctx.lower_ty(type_ref)) - .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)), - ); - diags = create_diagnostics(ctx.diagnostics); - res - }; - (inner, diags) -} + let (ty, diags) = type_for_type_alias_with_diagnostics_query(db, t); + return (ty.get(), diags.clone()); -pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result<'db>( - db: &'db dyn HirDatabase, - _: salsa::Id, - _adt: TypeAliasId, -) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { - (EarlyBinder::bind(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)), None) + #[salsa::tracked(returns(ref), cycle_result = type_for_type_alias_with_diagnostics_cycle_result)] + pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + t: TypeAliasId, + ) -> (StoredEarlyBinder, Diagnostics) { + let type_alias_data = db.type_alias_signature(t); + let mut diags = None; + let resolver = t.resolver(db); + let interner = DbInterner::new_no_crate(db); + let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) { + StoredEarlyBinder::bind(Ty::new_foreign(interner, t.into()).store()) + } else { + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + t.into(), + LifetimeElisionKind::AnonymousReportError, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + let res = StoredEarlyBinder::bind( + type_alias_data + .ty + .map(|type_ref| ctx.lower_ty(type_ref)) + .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)) + .store(), + ); + diags = create_diagnostics(ctx.diagnostics); + res + }; + (inner, diags) + } + + pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result( + db: &dyn HirDatabase, + _: salsa::Id, + _adt: TypeAliasId, + ) -> (StoredEarlyBinder, Diagnostics) { + ( + StoredEarlyBinder::bind( + Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed).store(), + ), + None, + ) + } } pub(crate) fn impl_self_ty_query<'db>( @@ -1386,31 +1441,45 @@ pub(crate) fn impl_self_ty_query<'db>( db.impl_self_ty_with_diagnostics(impl_id).0 } -pub(crate) fn impl_self_ty_with_diagnostics_query<'db>( +pub(crate) fn impl_self_ty_with_diagnostics<'db>( db: &'db dyn HirDatabase, impl_id: ImplId, ) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { - let resolver = impl_id.resolver(db); + let (ty, diags) = impl_self_ty_with_diagnostics_query(db, impl_id); + return (ty.get(), diags.clone()); - let impl_data = db.impl_signature(impl_id); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &impl_data.store, - impl_id.into(), - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, - ); - let ty = ctx.lower_ty(impl_data.self_ty); - assert!(!ty.has_escaping_bound_vars()); - (EarlyBinder::bind(ty), create_diagnostics(ctx.diagnostics)) -} + #[salsa::tracked(returns(ref), cycle_result = impl_self_ty_with_diagnostics_cycle_result)] + pub(crate) fn impl_self_ty_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, + ) -> (StoredEarlyBinder, Diagnostics) { + let resolver = impl_id.resolver(db); -pub(crate) fn impl_self_ty_with_diagnostics_cycle_result( - db: &dyn HirDatabase, - _: salsa::Id, - _impl_id: ImplId, -) -> (EarlyBinder<'_, Ty<'_>>, Diagnostics) { - (EarlyBinder::bind(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)), None) + let impl_data = db.impl_signature(impl_id); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ); + let ty = ctx.lower_ty(impl_data.self_ty); + assert!(!ty.has_escaping_bound_vars()); + (StoredEarlyBinder::bind(ty.store()), create_diagnostics(ctx.diagnostics)) + } + + pub(crate) fn impl_self_ty_with_diagnostics_cycle_result( + db: &dyn HirDatabase, + _: salsa::Id, + _impl_id: ImplId, + ) -> (StoredEarlyBinder, Diagnostics) { + ( + StoredEarlyBinder::bind( + Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed).store(), + ), + None, + ) + } } pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstParamId) -> Ty<'db> { @@ -1418,57 +1487,69 @@ pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstPara } // returns None if def is a type arg -pub(crate) fn const_param_ty_with_diagnostics_query<'db>( +pub(crate) fn const_param_ty_with_diagnostics<'db>( db: &'db dyn HirDatabase, def: ConstParamId, ) -> (Ty<'db>, Diagnostics) { - let (parent_data, store) = db.generic_params_and_store(def.parent()); - let data = &parent_data[def.local_id()]; - let resolver = def.parent().resolver(db); - let interner = DbInterner::new_no_crate(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &store, - def.parent(), - LifetimeElisionKind::AnonymousReportError, - ); - let ty = match data { - TypeOrConstParamData::TypeParamData(_) => { - never!(); - Ty::new_error(interner, ErrorGuaranteed) - } - TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(d.ty), - }; - (ty, create_diagnostics(ctx.diagnostics)) -} + let (ty, diags) = const_param_ty_with_diagnostics_query(db, (), def); + return (ty.as_ref(), diags.clone()); -pub(crate) fn const_param_ty_with_diagnostics_cycle_result<'db>( - db: &'db dyn HirDatabase, - _: salsa::Id, - _: crate::db::HirDatabaseData, - _def: ConstParamId, -) -> (Ty<'db>, Diagnostics) { - let interner = DbInterner::new_no_crate(db); - (Ty::new_error(interner, ErrorGuaranteed), None) + // FIXME: Make this query non-interned. + #[salsa::tracked(returns(ref), cycle_result = const_param_ty_with_diagnostics_cycle_result)] + pub(crate) fn const_param_ty_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + _: (), + def: ConstParamId, + ) -> (StoredTy, Diagnostics) { + let (parent_data, store) = db.generic_params_and_store(def.parent()); + let data = &parent_data[def.local_id()]; + let resolver = def.parent().resolver(db); + let interner = DbInterner::new_no_crate(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &store, + def.parent(), + LifetimeElisionKind::AnonymousReportError, + ); + let ty = match data { + TypeOrConstParamData::TypeParamData(_) => { + never!(); + Ty::new_error(interner, ErrorGuaranteed) + } + TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(d.ty), + }; + (ty.store(), create_diagnostics(ctx.diagnostics)) + } + + pub(crate) fn const_param_ty_with_diagnostics_cycle_result( + db: &dyn HirDatabase, + _: salsa::Id, + _: (), + _def: ConstParamId, + ) -> (StoredTy, Diagnostics) { + let interner = DbInterner::new_no_crate(db); + (Ty::new_error(interner, ErrorGuaranteed).store(), None) + } } -pub(crate) fn field_types_query<'db>( - db: &'db dyn HirDatabase, +pub(crate) fn field_types_query( + db: &dyn HirDatabase, variant_id: VariantId, -) -> Arc>>> { - db.field_types_with_diagnostics(variant_id).0 +) -> &ArenaMap> { + &db.field_types_with_diagnostics(variant_id).0 } /// Build the type of all specific fields of a struct or enum variant. +#[salsa::tracked(returns(ref))] pub(crate) fn field_types_with_diagnostics_query<'db>( db: &'db dyn HirDatabase, variant_id: VariantId, -) -> (Arc>>>, Diagnostics) { +) -> (ArenaMap>, Diagnostics) { let var_data = variant_id.fields(db); let fields = var_data.fields(); if fields.is_empty() { - return (Arc::new(ArenaMap::default()), None); + return (ArenaMap::default(), None); } let (resolver, def): (_, GenericDefId) = match variant_id { @@ -1485,9 +1566,9 @@ pub(crate) fn field_types_with_diagnostics_query<'db>( LifetimeElisionKind::AnonymousReportError, ); for (field_id, field_data) in var_data.fields().iter() { - res.insert(field_id, EarlyBinder::bind(ctx.lower_ty(field_data.type_ref))); + res.insert(field_id, StoredEarlyBinder::bind(ctx.lower_ty(field_data.type_ref).store())); } - (Arc::new(res), create_diagnostics(ctx.diagnostics)) + (res, create_diagnostics(ctx.diagnostics)) } /// This query exists only to be used when resolving short-hand associated types @@ -1499,13 +1580,13 @@ pub(crate) fn field_types_with_diagnostics_query<'db>( /// following bounds are disallowed: `T: Foo, U: Foo`, but /// these are fine: `T: Foo, U: Foo<()>`. #[tracing::instrument(skip(db), ret)] -#[salsa::tracked(returns(ref), unsafe(non_update_types), cycle_result = generic_predicates_for_param_cycle_result)] +#[salsa::tracked(returns(ref), cycle_result = generic_predicates_for_param_cycle_result)] pub(crate) fn generic_predicates_for_param<'db>( db: &'db dyn HirDatabase, def: GenericDefId, param_id: TypeOrConstParamId, assoc_name: Option, -) -> EarlyBinder<'db, Box<[Clause<'db>]>> { +) -> StoredEarlyBinder { let generics = generics(db, def); let interner = DbInterner::new_no_crate(db); let resolver = def.resolver(db); @@ -1607,17 +1688,17 @@ pub(crate) fn generic_predicates_for_param<'db>( predicates.extend(implicitly_sized_predicates); }; } - EarlyBinder::bind(predicates.into_boxed_slice()) + StoredEarlyBinder::bind(Clauses::new_from_slice(&predicates).store()) } -pub(crate) fn generic_predicates_for_param_cycle_result<'db>( - _db: &'db dyn HirDatabase, +pub(crate) fn generic_predicates_for_param_cycle_result( + db: &dyn HirDatabase, _: salsa::Id, _def: GenericDefId, _param_id: TypeOrConstParamId, _assoc_name: Option, -) -> EarlyBinder<'db, Box<[Clause<'db>]>> { - EarlyBinder::bind(Box::new([])) +) -> StoredEarlyBinder { + StoredEarlyBinder::bind(Clauses::empty(DbInterner::new_no_crate(db)).store()) } #[inline] @@ -1625,84 +1706,95 @@ pub(crate) fn type_alias_bounds<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, ) -> EarlyBinder<'db, &'db [Clause<'db>]> { - type_alias_bounds_with_diagnostics(db, type_alias).0.as_ref().map_bound(|it| &**it) + type_alias_bounds_with_diagnostics(db, type_alias).0.map_bound(|it| it.as_slice()) } -#[salsa::tracked(returns(ref), unsafe(non_update_types))] -pub fn type_alias_bounds_with_diagnostics<'db>( +pub(crate) fn type_alias_bounds_with_diagnostics<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, -) -> (EarlyBinder<'db, Box<[Clause<'db>]>>, Diagnostics) { - let type_alias_data = db.type_alias_signature(type_alias); - let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &type_alias_data.store, - type_alias.into(), - LifetimeElisionKind::AnonymousReportError, - ); - let interner = ctx.interner; - let def_id = type_alias.into(); +) -> (EarlyBinder<'db, Clauses<'db>>, Diagnostics) { + let (bounds, diags) = type_alias_bounds_with_diagnostics_query(db, type_alias); + return (bounds.get(), diags.clone()); - let item_args = GenericArgs::identity_for_item(interner, def_id); - let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args); + #[salsa::tracked(returns(ref))] + pub fn type_alias_bounds_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + type_alias: TypeAliasId, + ) -> (StoredEarlyBinder, Diagnostics) { + let type_alias_data = db.type_alias_signature(type_alias); + let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + type_alias.into(), + LifetimeElisionKind::AnonymousReportError, + ); + let interner = ctx.interner; + let def_id = type_alias.into(); - let mut bounds = Vec::new(); - for bound in &type_alias_data.bounds { - ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| { - bounds.push(pred); - }); - } + let item_args = GenericArgs::identity_for_item(interner, def_id); + let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args); - if !ctx.unsized_types.contains(&interner_ty) { - let sized_trait = ctx.lang_items.Sized; - if let Some(sized_trait) = sized_trait { - let trait_ref = TraitRef::new_from_args( - interner, - sized_trait.into(), - GenericArgs::new_from_iter(interner, [interner_ty.into()]), - ); - bounds.push(trait_ref.upcast(interner)); - }; - } + let mut bounds = Vec::new(); + for bound in &type_alias_data.bounds { + ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| { + bounds.push(pred); + }); + } - (EarlyBinder::bind(bounds.into_boxed_slice()), create_diagnostics(ctx.diagnostics)) + if !ctx.unsized_types.contains(&interner_ty) { + let sized_trait = ctx.lang_items.Sized; + if let Some(sized_trait) = sized_trait { + let trait_ref = TraitRef::new_from_args( + interner, + sized_trait.into(), + GenericArgs::new_from_slice(&[interner_ty.into()]), + ); + bounds.push(trait_ref.upcast(interner)); + }; + } + + ( + StoredEarlyBinder::bind(Clauses::new_from_slice(&bounds).store()), + create_diagnostics(ctx.diagnostics), + ) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericPredicates<'db> { +pub struct GenericPredicates { // The order is the following: first, if `parent_is_trait == true`, comes the implicit trait predicate for the // parent. Then come the explicit predicates for the parent, then the explicit trait predicate for the child, // then the implicit trait predicate for the child, if `is_trait` is `true`. - predicates: EarlyBinder<'db, Box<[Clause<'db>]>>, + predicates: StoredEarlyBinder, own_predicates_start: u32, is_trait: bool, parent_is_trait: bool, } #[salsa::tracked] -impl<'db> GenericPredicates<'db> { +impl<'db> GenericPredicates { /// Resolve the where clause(s) of an item with generics. /// /// Diagnostics are computed only for this item's predicates, not for parents. - #[salsa::tracked(returns(ref), unsafe(non_update_types))] + #[salsa::tracked(returns(ref))] pub fn query_with_diagnostics( db: &'db dyn HirDatabase, def: GenericDefId, - ) -> (GenericPredicates<'db>, Diagnostics) { + ) -> (GenericPredicates, Diagnostics) { generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true) } } -impl<'db> GenericPredicates<'db> { +impl GenericPredicates { #[inline] - pub fn query(db: &'db dyn HirDatabase, def: GenericDefId) -> &'db GenericPredicates<'db> { + pub fn query(db: &dyn HirDatabase, def: GenericDefId) -> &GenericPredicates { &Self::query_with_diagnostics(db, def).0 } #[inline] - pub fn query_all( + pub fn query_all<'db>( db: &'db dyn HirDatabase, def: GenericDefId, ) -> EarlyBinder<'db, &'db [Clause<'db>]> { @@ -1710,7 +1802,7 @@ impl<'db> GenericPredicates<'db> { } #[inline] - pub fn query_own( + pub fn query_own<'db>( db: &'db dyn HirDatabase, def: GenericDefId, ) -> EarlyBinder<'db, &'db [Clause<'db>]> { @@ -1718,7 +1810,7 @@ impl<'db> GenericPredicates<'db> { } #[inline] - pub fn query_explicit( + pub fn query_explicit<'db>( db: &'db dyn HirDatabase, def: GenericDefId, ) -> EarlyBinder<'db, &'db [Clause<'db>]> { @@ -1726,20 +1818,20 @@ impl<'db> GenericPredicates<'db> { } #[inline] - pub fn all_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> { - self.predicates.as_ref().map_bound(|it| &**it) + pub fn all_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> { + self.predicates.get().map_bound(|it| it.as_slice()) } #[inline] - pub fn own_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> { - self.predicates.as_ref().map_bound(|it| &it[self.own_predicates_start as usize..]) + pub fn own_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> { + self.predicates.get().map_bound(|it| &it.as_slice()[self.own_predicates_start as usize..]) } /// Returns the predicates, minus the implicit `Self: Trait` predicate for a trait. #[inline] - pub fn explicit_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> { - self.predicates.as_ref().map_bound(|it| { - &it[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)] + pub fn explicit_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> { + self.predicates.get().map_bound(|it| { + &it.as_slice()[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)] }) } } @@ -1754,18 +1846,24 @@ pub(crate) fn trait_environment_for_body_query( db.trait_environment(def) } -pub(crate) fn trait_environment_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, -) -> ParamEnv<'db> { - let module = def.module(db); - let interner = DbInterner::new_with(db, module.krate(db)); - let predicates = GenericPredicates::query_all(db, def); - let clauses = rustc_type_ir::elaborate::elaborate(interner, predicates.iter_identity_copied()); - let clauses = Clauses::new_from_iter(interner, clauses); +pub(crate) fn trait_environment<'db>(db: &'db dyn HirDatabase, def: GenericDefId) -> ParamEnv<'db> { + return ParamEnv { clauses: trait_environment_query(db, def).as_ref() }; + + #[salsa::tracked(returns(ref))] + pub(crate) fn trait_environment_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, + ) -> StoredClauses { + let module = def.module(db); + let interner = DbInterner::new_with(db, module.krate(db)); + let predicates = GenericPredicates::query_all(db, def); + let clauses = + rustc_type_ir::elaborate::elaborate(interner, predicates.iter_identity_copied()); + let clauses = Clauses::new_from_iter(interner, clauses); - // FIXME: We should normalize projections here, like rustc does. - ParamEnv { clauses } + // FIXME: We should normalize projections here, like rustc does. + clauses.store() + } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -1777,12 +1875,12 @@ pub(crate) enum PredicateFilter { /// Resolve the where clause(s) of an item with generics, /// with a given filter #[tracing::instrument(skip(db, filter), ret)] -pub(crate) fn generic_predicates_filtered_by<'db, F>( - db: &'db dyn HirDatabase, +pub(crate) fn generic_predicates_filtered_by( + db: &dyn HirDatabase, def: GenericDefId, predicate_filter: PredicateFilter, filter: F, -) -> (GenericPredicates<'db>, Diagnostics) +) -> (GenericPredicates, Diagnostics) where F: Fn(GenericDefId) -> bool, { @@ -1856,7 +1954,7 @@ where let trait_ref = TraitRef::new_from_args( interner, sized_trait.into(), - GenericArgs::new_from_iter(interner, [param_ty.into()]), + GenericArgs::new_from_slice(&[param_ty.into()]), ); let clause = Clause(Predicate::new( interner, @@ -1899,7 +1997,7 @@ where own_predicates_start, is_trait, parent_is_trait, - predicates: EarlyBinder::bind(predicates.into_boxed_slice()), + predicates: StoredEarlyBinder::bind(Clauses::new_from_slice(&predicates).store()), }; return (predicates, diagnostics); @@ -1988,7 +2086,7 @@ fn implicitly_sized_clauses<'a, 'subst, 'db>( let trait_ref = TraitRef::new_from_args( interner, sized_trait.into(), - GenericArgs::new_from_iter(interner, [self_ty.into()]), + GenericArgs::new_from_slice(&[self_ty.into()]), ); Clause(Predicate::new( interner, @@ -2004,19 +2102,16 @@ fn implicitly_sized_clauses<'a, 'subst, 'db>( } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericDefaults<'db>(Option>>]>>); +pub struct GenericDefaults(Option>]>>); -impl<'db> GenericDefaults<'db> { +impl GenericDefaults { #[inline] - pub fn get(&self, idx: usize) -> Option>> { - self.0.as_ref()?[idx] + pub fn get<'db>(&self, idx: usize) -> Option>> { + Some(self.0.as_ref()?[idx].as_ref()?.get_with(|it| it.as_ref())) } } -pub(crate) fn generic_defaults_query( - db: &dyn HirDatabase, - def: GenericDefId, -) -> GenericDefaults<'_> { +pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) -> GenericDefaults { db.generic_defaults_with_diagnostics(def).0 } @@ -2026,7 +2121,7 @@ pub(crate) fn generic_defaults_query( pub(crate) fn generic_defaults_with_diagnostics_query( db: &dyn HirDatabase, def: GenericDefId, -) -> (GenericDefaults<'_>, Diagnostics) { +) -> (GenericDefaults, Diagnostics) { let generic_params = generics(db, def); if generic_params.is_empty() { return (GenericDefaults(None), None); @@ -2072,20 +2167,23 @@ pub(crate) fn generic_defaults_with_diagnostics_query( ctx: &mut TyLoweringContext<'db, '_>, idx: usize, p: GenericParamDataRef<'_>, - ) -> (Option>>, bool) { + ) -> (Option>, bool) { ctx.lowering_param_default(idx as u32); match p { GenericParamDataRef::TypeParamData(p) => { let ty = p.default.map(|ty| ctx.lower_ty(ty)); - (ty.map(|ty| EarlyBinder::bind(ty.into())), p.default.is_some()) + ( + ty.map(|ty| StoredEarlyBinder::bind(GenericArg::from(ty).store())), + p.default.is_some(), + ) } GenericParamDataRef::ConstParamData(p) => { let val = p.default.map(|c| { let param_ty = ctx.lower_ty(p.ty); let c = ctx.lower_const(c, param_ty); - c.into() + GenericArg::from(c).store() }); - (val.map(EarlyBinder::bind), p.default.is_some()) + (val.map(StoredEarlyBinder::bind), p.default.is_some()) } GenericParamDataRef::LifetimeParamData(_) => (None, false), } @@ -2096,26 +2194,31 @@ pub(crate) fn generic_defaults_with_diagnostics_cycle_result( _db: &dyn HirDatabase, _: salsa::Id, _def: GenericDefId, -) -> (GenericDefaults<'_>, Diagnostics) { +) -> (GenericDefaults, Diagnostics) { (GenericDefaults(None), None) } /// Build the signature of a callable item (function, struct or enum variant). -pub(crate) fn callable_item_signature_query<'db>( +pub(crate) fn callable_item_signature<'db>( db: &'db dyn HirDatabase, def: CallableDefId, ) -> EarlyBinder<'db, PolyFnSig<'db>> { - match def { - CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), - CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), - CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e), + return callable_item_signature_query(db, def).get_with(|sig| sig.get()); + + #[salsa::tracked(returns(ref))] + pub(crate) fn callable_item_signature_query<'db>( + db: &'db dyn HirDatabase, + def: CallableDefId, + ) -> StoredEarlyBinder { + match def { + CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), + CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), + CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e), + } } } -fn fn_sig_for_fn<'db>( - db: &'db dyn HirDatabase, - def: FunctionId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { +fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> StoredEarlyBinder { let data = db.function_signature(def); let resolver = def.resolver(db); let interner = DbInterner::new_no_crate(db); @@ -2145,56 +2248,56 @@ fn fn_sig_for_fn<'db>( let inputs_and_output = Tys::new_from_iter(interner, params.chain(Some(ret))); // If/when we track late bound vars, we need to switch this to not be `dummy` - EarlyBinder::bind(rustc_type_ir::Binder::dummy(FnSig { + StoredEarlyBinder::bind(StoredPolyFnSig::new(Binder::dummy(FnSig { abi: data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), c_variadic: data.is_varargs(), safety: if data.is_unsafe() { Safety::Unsafe } else { Safety::Safe }, inputs_and_output, - })) + }))) } -fn type_for_adt<'db>(db: &'db dyn HirDatabase, adt: AdtId) -> EarlyBinder<'db, Ty<'db>> { +fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> StoredEarlyBinder { let interner = DbInterner::new_no_crate(db); let args = GenericArgs::identity_for_item(interner, adt.into()); let ty = Ty::new_adt(interner, adt, args); - EarlyBinder::bind(ty) + StoredEarlyBinder::bind(ty.store()) } -fn fn_sig_for_struct_constructor<'db>( - db: &'db dyn HirDatabase, +fn fn_sig_for_struct_constructor( + db: &dyn HirDatabase, def: StructId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { +) -> StoredEarlyBinder { let field_tys = db.field_types(def.into()); - let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); + let params = field_tys.iter().map(|(_, ty)| ty.get().skip_binder()); let ret = type_for_adt(db, def.into()).skip_binder(); let inputs_and_output = - Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret))); - EarlyBinder::bind(Binder::dummy(FnSig { + Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret.as_ref()))); + StoredEarlyBinder::bind(StoredPolyFnSig::new(Binder::dummy(FnSig { abi: FnAbi::RustCall, c_variadic: false, safety: Safety::Safe, inputs_and_output, - })) + }))) } -fn fn_sig_for_enum_variant_constructor<'db>( - db: &'db dyn HirDatabase, +fn fn_sig_for_enum_variant_constructor( + db: &dyn HirDatabase, def: EnumVariantId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { +) -> StoredEarlyBinder { let field_tys = db.field_types(def.into()); - let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); + let params = field_tys.iter().map(|(_, ty)| ty.get().skip_binder()); let parent = def.lookup(db).parent; let ret = type_for_adt(db, parent.into()).skip_binder(); let inputs_and_output = - Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret))); - EarlyBinder::bind(Binder::dummy(FnSig { + Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret.as_ref()))); + StoredEarlyBinder::bind(StoredPolyFnSig::new(Binder::dummy(FnSig { abi: FnAbi::RustCall, c_variadic: false, safety: Safety::Safe, inputs_and_output, - })) + }))) } // FIXME(next-solver): should merge this with `explicit_item_bounds` in some way @@ -2341,7 +2444,7 @@ fn named_associated_type_shorthand_candidates<'db, R>( if let Some(alias) = check_trait(trait_ref) { return Some(alias); } - for pred in generic_predicates_filtered_by( + let predicates = generic_predicates_filtered_by( db, GenericDefId::TraitId(trait_ref.def_id.0), PredicateFilter::SelfTrait, @@ -2352,9 +2455,8 @@ fn named_associated_type_shorthand_candidates<'db, R>( |pred| pred != def && pred == GenericDefId::TraitId(trait_ref.def_id.0), ) .0 - .predicates - .instantiate_identity() - { + .predicates; + for pred in predicates.get().instantiate_identity() { tracing::debug!(?pred); let sup_trait_ref = match pred.kind().skip_binder() { rustc_type_ir::ClauseKind::Trait(pred) => pred.trait_ref, @@ -2401,8 +2503,8 @@ fn named_associated_type_shorthand_candidates<'db, R>( let predicates = generic_predicates_for_param(db, def, param_id.into(), assoc_name.clone()); predicates - .as_ref() - .iter_identity_copied() + .get() + .iter_identity() .find_map(|pred| match pred.kind().skip_binder() { rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate), _ => None, diff --git a/crates/hir-ty/src/lower/path.rs b/crates/hir-ty/src/lower/path.rs index fe96b6832e08..fba176bcb1ab 100644 --- a/crates/hir-ty/src/lower/path.rs +++ b/crates/hir-ty/src/lower/path.rs @@ -20,7 +20,7 @@ use hir_def::{ use hir_expand::name::Name; use rustc_type_ir::{ AliasTerm, AliasTy, AliasTyKind, - inherent::{GenericArgs as _, Region as _, SliceLike, Ty as _}, + inherent::{GenericArgs as _, Region as _, Ty as _}, }; use smallvec::SmallVec; use stdx::never; @@ -45,17 +45,15 @@ use super::{ const_param_ty_query, ty_query, }; -type CallbackData<'a, 'db> = Either< - PathDiagnosticCallbackData, - crate::infer::diagnostics::PathDiagnosticCallbackData<'a, 'db>, ->; +type CallbackData<'a> = + Either>; // We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box` // because of the allocation, so we create a lifetime-less callback, tailored for our needs. pub(crate) struct PathDiagnosticCallback<'a, 'db> { - pub(crate) data: CallbackData<'a, 'db>, + pub(crate) data: CallbackData<'a>, pub(crate) callback: - fn(&CallbackData<'_, 'db>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic), + fn(&CallbackData<'_>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic), } pub(crate) struct PathLoweringContext<'a, 'b, 'db> { @@ -555,7 +553,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { ValueTyDefId::UnionId(it) => it.into(), ValueTyDefId::ConstId(it) => it.into(), ValueTyDefId::StaticId(_) => { - return GenericArgs::new_from_iter(interner, []); + return GenericArgs::empty(interner); } ValueTyDefId::EnumVariantId(var) => { // the generic args for an enum variant may be either specified diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index d66a386bcdeb..aaa3cce4fad9 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -26,7 +26,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::{ TypeVisitableExt, fast_reject::{TreatParams, simplify_type}, - inherent::{BoundExistentialPredicates, IntoKind, SliceLike}, + inherent::{BoundExistentialPredicates, IntoKind}, }; use stdx::impl_from; use triomphe::Arc; diff --git a/crates/hir-ty/src/method_resolution/confirm.rs b/crates/hir-ty/src/method_resolution/confirm.rs index 6d6515a45782..9292928f99fc 100644 --- a/crates/hir-ty/src/method_resolution/confirm.rs +++ b/crates/hir-ty/src/method_resolution/confirm.rs @@ -9,7 +9,7 @@ use hir_def::{ use rustc_type_ir::{ TypeFoldable, elaborate::elaborate, - inherent::{BoundExistentialPredicates, IntoKind, SliceLike, Ty as _}, + inherent::{BoundExistentialPredicates, IntoKind, Ty as _}, }; use tracing::debug; @@ -45,7 +45,7 @@ struct ConfirmContext<'a, 'b, 'db> { pub(crate) struct ConfirmResult<'db> { pub(crate) callee: MethodCallee<'db>, pub(crate) illegal_sized_bound: bool, - pub(crate) adjustments: Box<[Adjustment<'db>]>, + pub(crate) adjustments: Box<[Adjustment]>, } impl<'a, 'db> InferenceContext<'a, 'db> { @@ -177,7 +177,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { &mut self, unadjusted_self_ty: Ty<'db>, pick: &probe::Pick<'db>, - ) -> (Ty<'db>, Box<[Adjustment<'db>]>) { + ) -> (Ty<'db>, Box<[Adjustment]>) { // Commit the autoderefs by calling `autoderef` again, but this // time writing the results into the various typeck results. let mut autoderef = self.ctx.table.autoderef_with_tracking(unadjusted_self_ty); @@ -200,8 +200,10 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { // for two-phase borrows. let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes); - adjustments - .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target }); + adjustments.push(Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), + target: target.store(), + }); if unsize { let unsized_ty = if let TyKind::Array(elem_ty, _) = base_ty.kind() { @@ -213,8 +215,10 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { ) }; target = Ty::new_ref(self.interner(), region, unsized_ty, mutbl.into()); - adjustments - .push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target }); + adjustments.push(Adjustment { + kind: Adjust::Pointer(PointerCast::Unsize), + target: target.store(), + }); } } Some(probe::AutorefOrPtrAdjustment::ToConstPtr) => { @@ -228,7 +232,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { adjustments.push(Adjustment { kind: Adjust::Pointer(PointerCast::MutToConstPointer), - target, + target: target.store(), }); } None => {} @@ -482,7 +486,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { if self.ctx.unstable_features.arbitrary_self_types { self.ctx.result.type_mismatches.get_or_insert_default().insert( self.expr.into(), - TypeMismatch { expected: method_self_ty, actual: self_ty }, + TypeMismatch { expected: method_self_ty.store(), actual: self_ty.store() }, ); } } diff --git a/crates/hir-ty/src/method_resolution/probe.rs b/crates/hir-ty/src/method_resolution/probe.rs index 9ea0d500a5f4..cdffcd9383ca 100644 --- a/crates/hir-ty/src/method_resolution/probe.rs +++ b/crates/hir-ty/src/method_resolution/probe.rs @@ -14,7 +14,7 @@ use rustc_type_ir::{ InferTy, TypeVisitableExt, Upcast, Variance, elaborate::{self, supertrait_def_ids}, fast_reject::{DeepRejectCtxt, TreatParams, simplify_type}, - inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _}, + inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, Ty as _}, }; use smallvec::{SmallVec, smallvec}; use tracing::{debug, instrument}; diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs index 836c20a43348..664238601108 100644 --- a/crates/hir-ty/src/mir.rs +++ b/crates/hir-ty/src/mir.rs @@ -12,7 +12,7 @@ use hir_def::{ use la_arena::{Arena, ArenaMap, Idx, RawIdx}; use rustc_ast_ir::Mutability; use rustc_hash::FxHashMap; -use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, Ty as _}; use smallvec::{SmallVec, smallvec}; use stdx::{impl_from, never}; @@ -23,7 +23,8 @@ use crate::{ display::{DisplayTarget, HirDisplay}, infer::PointerCast, next_solver::{ - Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, Ty, TyKind, + Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, StoredConst, StoredGenericArgs, + StoredTy, Ty, TyKind, infer::{InferCtxt, traits::ObligationCause}, obligation_ctxt::ObligationCtxt, }, @@ -49,16 +50,16 @@ pub(crate) use monomorphization::monomorphized_mir_body_cycle_result; use super::consteval::try_const_usize; -pub type BasicBlockId<'db> = Idx>; -pub type LocalId<'db> = Idx>; +pub type BasicBlockId = Idx; +pub type LocalId = Idx; -fn return_slot<'db>() -> LocalId<'db> { +fn return_slot() -> LocalId { LocalId::from_raw(RawIdx::from(0)) } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct Local<'db> { - pub ty: Ty<'db>, +pub struct Local { + pub ty: StoredTy, } /// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of @@ -80,19 +81,19 @@ pub struct Local<'db> { /// currently implements it, but it seems like this may be something to check against in the /// validator. #[derive(Debug, PartialEq, Eq, Clone)] -pub struct Operand<'db> { - kind: OperandKind<'db>, +pub struct Operand { + kind: OperandKind, // FIXME : This should actually just be of type `MirSpan`. span: Option, } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum OperandKind<'db> { +pub enum OperandKind { /// Creates a value by loading the given place. /// /// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there /// is no such requirement. - Copy(Place<'db>), + Copy(Place), /// Creates a value by performing loading the place, just like the `Copy` operand. /// @@ -101,21 +102,21 @@ pub enum OperandKind<'db> { /// place without first re-initializing it. /// /// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188 - Move(Place<'db>), + Move(Place), /// Constants are already semantically values, and remain unchanged. - Constant { konst: Const<'db>, ty: Ty<'db> }, + Constant { konst: StoredConst, ty: StoredTy }, /// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc /// handles it with the `Constant` variant somehow. Static(StaticId), } -impl<'db> Operand<'db> { +impl<'db> Operand { fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'db>, ty: Ty<'db>) -> Self { let interner = DbInterner::conjure(); Operand { kind: OperandKind::Constant { - konst: Const::new_valtree(interner, ty, data, memory_map), - ty, + konst: Const::new_valtree(interner, ty, data, memory_map).store(), + ty: ty.store(), }, span: None, } @@ -125,7 +126,7 @@ impl<'db> Operand<'db> { Operand::from_concrete_const(data, MemoryMap::default(), ty) } - fn const_zst(ty: Ty<'db>) -> Operand<'db> { + fn const_zst(ty: Ty<'db>) -> Operand { Self::from_bytes(Box::default(), ty) } @@ -133,28 +134,28 @@ impl<'db> Operand<'db> { db: &'db dyn HirDatabase, func_id: hir_def::FunctionId, generic_args: GenericArgs<'db>, - ) -> Operand<'db> { + ) -> Operand { let interner = DbInterner::new_no_crate(db); let ty = Ty::new_fn_def(interner, CallableDefId::FunctionId(func_id).into(), generic_args); Operand::from_bytes(Box::default(), ty) } } -#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] -pub enum ProjectionElem<'db, V: PartialEq> { +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ProjectionElem { Deref, Field(Either), // FIXME: get rid of this, and use FieldId for tuples and closures ClosureField(usize), - Index(#[update(unsafe(with(crate::utils::unsafe_update_eq)))] V), + Index(V), ConstantIndex { offset: u64, from_end: bool }, Subslice { from: u64, to: u64 }, //Downcast(Option, VariantIdx), - OpaqueCast(Ty<'db>), + OpaqueCast(StoredTy), } -impl<'db, V: PartialEq> ProjectionElem<'db, V> { - pub fn projected_ty( +impl ProjectionElem { + pub fn projected_ty<'db>( &self, infcx: &InferCtxt<'db>, env: ParamEnv<'db>, @@ -194,7 +195,7 @@ impl<'db, V: PartialEq> ProjectionElem<'db, V> { }, ProjectionElem::Field(Either::Left(f)) => match base.kind() { TyKind::Adt(_, subst) => { - db.field_types(f.parent)[f.local_id].instantiate(interner, subst) + db.field_types(f.parent)[f.local_id].get().instantiate(interner, subst) } ty => { never!("Only adt has field, found {:?}", ty); @@ -253,18 +254,18 @@ impl<'db, V: PartialEq> ProjectionElem<'db, V> { } } -type PlaceElem<'db> = ProjectionElem<'db, LocalId<'db>>; +type PlaceElem = ProjectionElem; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct ProjectionId(u32); #[derive(Debug, Clone, PartialEq, Eq)] -pub struct ProjectionStore<'db> { - id_to_proj: FxHashMap]>>, - proj_to_id: FxHashMap]>, ProjectionId>, +pub struct ProjectionStore { + id_to_proj: FxHashMap>, + proj_to_id: FxHashMap, ProjectionId>, } -impl Default for ProjectionStore<'_> { +impl Default for ProjectionStore { fn default() -> Self { let mut this = Self { id_to_proj: Default::default(), proj_to_id: Default::default() }; // Ensure that [] will get the id 0 which is used in `ProjectionId::Empty` @@ -273,17 +274,17 @@ impl Default for ProjectionStore<'_> { } } -impl<'db> ProjectionStore<'db> { +impl ProjectionStore { pub fn shrink_to_fit(&mut self) { self.id_to_proj.shrink_to_fit(); self.proj_to_id.shrink_to_fit(); } - pub fn intern_if_exist(&self, projection: &[PlaceElem<'db>]) -> Option { + pub fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option { self.proj_to_id.get(projection).copied() } - pub fn intern(&mut self, projection: Box<[PlaceElem<'db>]>) -> ProjectionId { + pub fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId { let new_id = ProjectionId(self.proj_to_id.len() as u32); match self.proj_to_id.entry(projection) { Entry::Occupied(id) => *id.get(), @@ -304,15 +305,11 @@ impl ProjectionId { self == ProjectionId::EMPTY } - pub fn lookup<'a, 'db>(self, store: &'a ProjectionStore<'db>) -> &'a [PlaceElem<'db>] { + pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] { store.id_to_proj.get(&self).unwrap() } - pub fn project<'db>( - self, - projection: PlaceElem<'db>, - store: &mut ProjectionStore<'db>, - ) -> ProjectionId { + pub fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId { let mut current = self.lookup(store).to_vec(); current.push(projection); store.intern(current.into()) @@ -320,13 +317,13 @@ impl ProjectionId { } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Place<'db> { - pub local: LocalId<'db>, +pub struct Place { + pub local: LocalId, pub projection: ProjectionId, } -impl<'db> Place<'db> { - fn is_parent(&self, child: &Place<'db>, store: &ProjectionStore<'db>) -> bool { +impl Place { + fn is_parent(&self, child: &Place, store: &ProjectionStore) -> bool { self.local == child.local && child.projection.lookup(store).starts_with(self.projection.lookup(store)) } @@ -334,39 +331,39 @@ impl<'db> Place<'db> { /// The place itself is not included fn iterate_over_parents<'a>( &'a self, - store: &'a ProjectionStore<'db>, - ) -> impl Iterator> + 'a { + store: &'a ProjectionStore, + ) -> impl Iterator + 'a { let projection = self.projection.lookup(store); (0..projection.len()).map(|x| &projection[0..x]).filter_map(move |x| { Some(Place { local: self.local, projection: store.intern_if_exist(x)? }) }) } - fn project(&self, projection: PlaceElem<'db>, store: &mut ProjectionStore<'db>) -> Place<'db> { + fn project(&self, projection: PlaceElem, store: &mut ProjectionStore) -> Place { Place { local: self.local, projection: self.projection.project(projection, store) } } } -impl<'db> From> for Place<'db> { - fn from(local: LocalId<'db>) -> Self { +impl From for Place { + fn from(local: LocalId) -> Self { Self { local, projection: ProjectionId::EMPTY } } } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum AggregateKind<'db> { +pub enum AggregateKind { /// The type is of the element - Array(Ty<'db>), + Array(StoredTy), /// The type is of the tuple - Tuple(Ty<'db>), - Adt(VariantId, GenericArgs<'db>), + Tuple(StoredTy), + Adt(VariantId, StoredGenericArgs), Union(UnionId, FieldId), - Closure(Ty<'db>), + Closure(StoredTy), //Coroutine(LocalDefId, SubstsRef, Movability), } #[derive(Debug, Clone, Hash, PartialEq, Eq)] -pub struct SwitchTargets<'db> { +pub struct SwitchTargets { /// Possible values. The locations to branch to in each case /// are found in the corresponding indices from the `targets` vector. values: SmallVec<[u128; 1]>, @@ -383,17 +380,17 @@ pub struct SwitchTargets<'db> { // // However we’ve decided to keep this as-is until we figure a case // where some other approach seems to be strictly better than other. - targets: SmallVec<[BasicBlockId<'db>; 2]>, + targets: SmallVec<[BasicBlockId; 2]>, } -impl<'db> SwitchTargets<'db> { +impl SwitchTargets { /// Creates switch targets from an iterator of values and target blocks. /// /// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to /// `goto otherwise;`. pub fn new( - targets: impl Iterator)>, - otherwise: BasicBlockId<'db>, + targets: impl Iterator, + otherwise: BasicBlockId, ) -> Self { let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip(); targets.push(otherwise); @@ -402,12 +399,12 @@ impl<'db> SwitchTargets<'db> { /// Builds a switch targets definition that jumps to `then` if the tested value equals `value`, /// and to `else_` if not. - pub fn static_if(value: u128, then: BasicBlockId<'db>, else_: BasicBlockId<'db>) -> Self { + pub fn static_if(value: u128, then: BasicBlockId, else_: BasicBlockId) -> Self { Self { values: smallvec![value], targets: smallvec![then, else_] } } /// Returns the fallback target that is jumped to when none of the values match the operand. - pub fn otherwise(&self) -> BasicBlockId<'db> { + pub fn otherwise(&self) -> BasicBlockId { *self.targets.last().unwrap() } @@ -417,33 +414,33 @@ impl<'db> SwitchTargets<'db> { /// including the `otherwise` fallback target. /// /// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory. - pub fn iter(&self) -> impl Iterator)> + '_ { + pub fn iter(&self) -> impl Iterator + '_ { iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y)) } /// Returns a slice with all possible jump targets (including the fallback target). - pub fn all_targets(&self) -> &[BasicBlockId<'db>] { + pub fn all_targets(&self) -> &[BasicBlockId] { &self.targets } /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the /// specific value. This cannot fail, as it'll return the `otherwise` /// branch if there's not a specific match for the value. - pub fn target_for_value(&self, value: u128) -> BasicBlockId<'db> { + pub fn target_for_value(&self, value: u128) -> BasicBlockId { self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise()) } } #[derive(Debug, PartialEq, Eq, Clone)] -pub struct Terminator<'db> { +pub struct Terminator { pub span: MirSpan, - pub kind: TerminatorKind<'db>, + pub kind: TerminatorKind, } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum TerminatorKind<'db> { +pub enum TerminatorKind { /// Block has one successor; we continue execution there. - Goto { target: BasicBlockId<'db> }, + Goto { target: BasicBlockId }, /// Switches based on the computed value. /// @@ -455,9 +452,9 @@ pub enum TerminatorKind<'db> { /// Target values may not appear more than once. SwitchInt { /// The discriminant value being tested. - discr: Operand<'db>, + discr: Operand, - targets: SwitchTargets<'db>, + targets: SwitchTargets, }, /// Indicates that the landing pad is finished and that the process should continue unwinding. @@ -508,7 +505,7 @@ pub enum TerminatorKind<'db> { /// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to /// > the place or one of its "parents" occurred more recently than a move out of it. This does not /// > consider indirect assignments. - Drop { place: Place<'db>, target: BasicBlockId<'db>, unwind: Option> }, + Drop { place: Place, target: BasicBlockId, unwind: Option }, /// Drops the place and assigns a new value to it. /// @@ -541,10 +538,10 @@ pub enum TerminatorKind<'db> { /// /// Disallowed after drop elaboration. DropAndReplace { - place: Place<'db>, - value: Operand<'db>, - target: BasicBlockId<'db>, - unwind: Option>, + place: Place, + value: Operand, + target: BasicBlockId, + unwind: Option, }, /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of @@ -559,18 +556,18 @@ pub enum TerminatorKind<'db> { /// [#71117]: https://github.com/rust-lang/rust/issues/71117 Call { /// The function that’s being called. - func: Operand<'db>, + func: Operand, /// Arguments the function is called with. /// These are owned by the callee, which is free to modify them. /// This allows the memory occupied by "by-value" arguments to be /// reused across function calls without duplicating the contents. - args: Box<[Operand<'db>]>, + args: Box<[Operand]>, /// Where the returned value will be written - destination: Place<'db>, + destination: Place, /// Where to go after this call returns. If none, the call necessarily diverges. - target: Option>, + target: Option, /// Cleanups to be done if the call unwinds. - cleanup: Option>, + cleanup: Option, /// `true` if this is from a call in HIR rather than from an overloaded /// operator. True for overloaded function call. from_hir_call: bool, @@ -586,11 +583,11 @@ pub enum TerminatorKind<'db> { /// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the /// assertion does not fail, execution continues at the specified basic block. Assert { - cond: Operand<'db>, + cond: Operand, expected: bool, //msg: AssertMessage, - target: BasicBlockId<'db>, - cleanup: Option>, + target: BasicBlockId, + cleanup: Option, }, /// Marks a suspend point. @@ -607,13 +604,13 @@ pub enum TerminatorKind<'db> { /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`? Yield { /// The value to return. - value: Operand<'db>, + value: Operand, /// Where to resume to. - resume: BasicBlockId<'db>, + resume: BasicBlockId, /// The place to store the resume argument in. - resume_arg: Place<'db>, + resume_arg: Place, /// Cleanup to be done if the coroutine is dropped at this suspend point. - drop: Option>, + drop: Option, }, /// Indicates the end of dropping a coroutine. @@ -636,10 +633,10 @@ pub enum TerminatorKind<'db> { /// Disallowed after drop elaboration. FalseEdge { /// The target normal control flow will take. - real_target: BasicBlockId<'db>, + real_target: BasicBlockId, /// A block control flow could conceptually jump to, but won't in /// practice. - imaginary_target: BasicBlockId<'db>, + imaginary_target: BasicBlockId, }, /// A terminator for blocks that only take one path in reality, but where we reserve the right @@ -651,14 +648,14 @@ pub enum TerminatorKind<'db> { /// Disallowed after drop elaboration. FalseUnwind { /// The target normal control flow will take. - real_target: BasicBlockId<'db>, + real_target: BasicBlockId, /// The imaginary cleanup block link. This particular path will never be taken /// in practice, but in order to avoid fragility we want to always /// consider it in borrowck. We don't want to accept programs which /// pass borrowck only when `panic=abort` or some assertions are disabled /// due to release vs. debug mode builds. This needs to be an `Option` because /// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes. - unwind: Option>, + unwind: Option, }, } @@ -845,8 +842,8 @@ impl From for BinOp { } } -impl<'db> From> for Rvalue<'db> { - fn from(x: Operand<'db>) -> Self { +impl From for Rvalue { + fn from(x: Operand) -> Self { Self::Use(x) } } @@ -875,14 +872,14 @@ pub enum CastKind { } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum Rvalue<'db> { +pub enum Rvalue { /// Yields the operand unchanged - Use(Operand<'db>), + Use(Operand), /// Creates an array where each element is the value of the operand. /// /// Corresponds to source code like `[x; 32]`. - Repeat(Operand<'db>, Const<'db>), + Repeat(Operand, StoredConst), /// Creates a reference of the indicated kind to the place. /// @@ -891,7 +888,7 @@ pub enum Rvalue<'db> { /// exactly what the behavior of this operation should be. /// /// `Shallow` borrows are disallowed after drop lowering. - Ref(BorrowKind, Place<'db>), + Ref(BorrowKind, Place), /// Creates a pointer/reference to the given thread local. /// @@ -922,7 +919,7 @@ pub enum Rvalue<'db> { /// If the type of the place is an array, this is the array length. For slices (`[T]`, not /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is /// ill-formed for places of other types. - Len(Place<'db>), + Len(Place), /// Performs essentially all of the casts that can be performed via `as`. /// @@ -930,7 +927,7 @@ pub enum Rvalue<'db> { /// /// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why /// `ArrayToPointer` and `MutToConstPointer` are special. - Cast(CastKind, Operand<'db>, Ty<'db>), + Cast(CastKind, Operand, StoredTy), // FIXME link to `pointer::offset` when it hits stable. /// * `Offset` has the same semantics as `pointer::offset`, except that the second @@ -962,7 +959,7 @@ pub enum Rvalue<'db> { /// when the value of right-hand side is negative. /// /// Other combinations of types and operators are unsupported. - CheckedBinaryOp(BinOp, Operand<'db>, Operand<'db>), + CheckedBinaryOp(BinOp, Operand, Operand), /// Computes a value as described by the operation. //NullaryOp(NullOp, Ty), @@ -973,7 +970,7 @@ pub enum Rvalue<'db> { /// Also does two's-complement arithmetic. Negation requires a signed integer or a float; /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds /// return a value with the same type as their operand. - UnaryOp(UnOp, Operand<'db>), + UnaryOp(UnOp, Operand), /// Computes the discriminant of the place, returning it as an integer of type /// `discriminant_ty`. Returns zero for types without discriminant. @@ -983,7 +980,7 @@ pub enum Rvalue<'db> { /// variant index; use `discriminant_for_variant` to convert. /// /// [#91095]: https://github.com/rust-lang/rust/issues/91095 - Discriminant(Place<'db>), + Discriminant(Place), /// Creates an aggregate value, like a tuple or struct. /// @@ -993,17 +990,17 @@ pub enum Rvalue<'db> { /// /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too. - Aggregate(AggregateKind<'db>, Box<[Operand<'db>]>), + Aggregate(AggregateKind, Box<[Operand]>), /// Transmutes a `*mut u8` into shallow-initialized `Box`. /// /// This is different from a normal transmute because dataflow analysis will treat the box as /// initialized but its content as uninitialized. Like other pointer casts, this in general /// affects alias analysis. - ShallowInitBox(Operand<'db>, Ty<'db>), + ShallowInitBox(Operand, StoredTy), /// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer. - ShallowInitBoxWithAlloc(Ty<'db>), + ShallowInitBoxWithAlloc(StoredTy), /// A CopyForDeref is equivalent to a read from a place at the /// codegen level, but is treated specially by drop elaboration. When such a read happens, it @@ -1013,41 +1010,41 @@ pub enum Rvalue<'db> { /// read never happened and just projects further. This allows simplifying various MIR /// optimizations and codegen backends that previously had to handle deref operations anywhere /// in a place. - CopyForDeref(Place<'db>), + CopyForDeref(Place), } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum StatementKind<'db> { - Assign(Place<'db>, Rvalue<'db>), - FakeRead(Place<'db>), +pub enum StatementKind { + Assign(Place, Rvalue), + FakeRead(Place), //SetDiscriminant { // place: Box, // variant_index: VariantIdx, //}, - Deinit(Place<'db>), - StorageLive(LocalId<'db>), - StorageDead(LocalId<'db>), + Deinit(Place), + StorageLive(LocalId), + StorageDead(LocalId), //Retag(RetagKind, Box), //AscribeUserType(Place, UserTypeProjection, Variance), //Intrinsic(Box), Nop, } -impl<'db> StatementKind<'db> { - fn with_span(self, span: MirSpan) -> Statement<'db> { +impl StatementKind { + fn with_span(self, span: MirSpan) -> Statement { Statement { kind: self, span } } } #[derive(Debug, PartialEq, Eq, Clone)] -pub struct Statement<'db> { - pub kind: StatementKind<'db>, +pub struct Statement { + pub kind: StatementKind, pub span: MirSpan, } #[derive(Debug, Default, Clone, PartialEq, Eq)] -pub struct BasicBlock<'db> { +pub struct BasicBlock { /// List of statements in this block. - pub statements: Vec>, + pub statements: Vec, /// Terminator for this block. /// @@ -1057,7 +1054,7 @@ pub struct BasicBlock<'db> { /// exception is that certain passes, such as `simplify_cfg`, swap /// out the terminator temporarily with `None` while they continue /// to recurse over the set of basic blocks. - pub terminator: Option>, + pub terminator: Option, /// If true, this block lies on an unwind path. This is used /// during codegen where distinct kinds of basic blocks may be @@ -1067,29 +1064,29 @@ pub struct BasicBlock<'db> { } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct MirBody<'db> { - pub projection_store: ProjectionStore<'db>, - pub basic_blocks: Arena>, - pub locals: Arena>, - pub start_block: BasicBlockId<'db>, +pub struct MirBody { + pub projection_store: ProjectionStore, + pub basic_blocks: Arena, + pub locals: Arena, + pub start_block: BasicBlockId, pub owner: DefWithBodyId, - pub binding_locals: ArenaMap>, - pub param_locals: Vec>, + pub binding_locals: ArenaMap, + pub param_locals: Vec, /// This field stores the closures directly owned by this body. It is used /// in traversing every mir body. pub closures: Vec, } -impl<'db> MirBody<'db> { - pub fn local_to_binding_map(&self) -> ArenaMap, BindingId> { +impl MirBody { + pub fn local_to_binding_map(&self) -> ArenaMap { self.binding_locals.iter().map(|(it, y)| (*y, it)).collect() } - fn walk_places(&mut self, mut f: impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>)) { - fn for_operand<'db>( - op: &mut Operand<'db>, - f: &mut impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>), - store: &mut ProjectionStore<'db>, + fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) { + fn for_operand( + op: &mut Operand, + f: &mut impl FnMut(&mut Place, &mut ProjectionStore), + store: &mut ProjectionStore, ) { match &mut op.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs index b39c9bc06559..941b6c75bfe7 100644 --- a/crates/hir-ty/src/mir/borrowck.rs +++ b/crates/hir-ty/src/mir/borrowck.rs @@ -17,7 +17,7 @@ use crate::{ display::DisplayTarget, mir::OperandKind, next_solver::{ - DbInterner, GenericArgs, ParamEnv, Ty, TypingMode, + DbInterner, GenericArgs, ParamEnv, StoredTy, Ty, TypingMode, infer::{DbInternerInferExt, InferCtxt}, }, }; @@ -36,44 +36,44 @@ pub enum MutabilityReason { } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct MovedOutOfRef<'db> { - pub ty: Ty<'db>, +pub struct MovedOutOfRef { + pub ty: StoredTy, pub span: MirSpan, } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct PartiallyMoved<'db> { - pub ty: Ty<'db>, +pub struct PartiallyMoved { + pub ty: StoredTy, pub span: MirSpan, - pub local: LocalId<'db>, + pub local: LocalId, } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct BorrowRegion<'db> { - pub local: LocalId<'db>, +pub struct BorrowRegion { + pub local: LocalId, pub kind: BorrowKind, pub places: Vec, } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct BorrowckResult<'db> { - pub mir_body: Arc>, - pub mutability_of_locals: ArenaMap, MutabilityReason>, - pub moved_out_of_ref: Vec>, - pub partially_moved: Vec>, - pub borrow_regions: Vec>, +pub struct BorrowckResult { + pub mir_body: Arc, + pub mutability_of_locals: ArenaMap, + pub moved_out_of_ref: Vec, + pub partially_moved: Vec, + pub borrow_regions: Vec, } -fn all_mir_bodies<'db>( - db: &'db dyn HirDatabase, +fn all_mir_bodies( + db: &dyn HirDatabase, def: DefWithBodyId, - mut cb: impl FnMut(Arc>), -) -> Result<(), MirLowerError<'db>> { - fn for_closure<'db>( - db: &'db dyn HirDatabase, + mut cb: impl FnMut(Arc), +) -> Result<(), MirLowerError> { + fn for_closure( + db: &dyn HirDatabase, c: InternedClosureId, - cb: &mut impl FnMut(Arc>), - ) -> Result<(), MirLowerError<'db>> { + cb: &mut impl FnMut(Arc), + ) -> Result<(), MirLowerError> { match db.mir_body_for_closure(c) { Ok(body) => { cb(body.clone()); @@ -91,10 +91,10 @@ fn all_mir_bodies<'db>( } } -pub fn borrowck_query<'db>( - db: &'db dyn HirDatabase, +pub fn borrowck_query( + db: &dyn HirDatabase, def: DefWithBodyId, -) -> Result]>, MirLowerError<'db>> { +) -> Result, MirLowerError> { let _p = tracing::info_span!("borrowck_query").entered(); let module = def.module(db); let interner = DbInterner::new_with(db, module.krate(db)); @@ -125,20 +125,20 @@ fn make_fetch_closure_field<'db>( let (captures, _) = infer.closure_info(c); let parent_subst = subst.split_closure_args_untupled().parent_args; let interner = DbInterner::new_no_crate(db); - captures.get(f).expect("broken closure field").ty.instantiate(interner, parent_subst) + captures.get(f).expect("broken closure field").ty.get().instantiate(interner, parent_subst) } } fn moved_out_of_ref<'db>( infcx: &InferCtxt<'db>, env: ParamEnv<'db>, - body: &MirBody<'db>, -) -> Vec> { + body: &MirBody, +) -> Vec { let db = infcx.interner.db; let mut result = vec![]; - let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind { + let mut for_operand = |op: &Operand, span: MirSpan| match op.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { - let mut ty: Ty<'db> = body.locals[p.local].ty; + let mut ty: Ty<'db> = body.locals[p.local].ty.as_ref(); let mut is_dereference_of_ref = false; for proj in p.projection.lookup(&body.projection_store) { if *proj == ProjectionElem::Deref && ty.as_reference().is_some() { @@ -156,7 +156,7 @@ fn moved_out_of_ref<'db>( && !infcx.type_is_copy_modulo_regions(env, ty) && !ty.references_non_lt_error() { - result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty }); + result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty: ty.store() }); } } OperandKind::Constant { .. } | OperandKind::Static(_) => (), @@ -233,13 +233,13 @@ fn moved_out_of_ref<'db>( fn partially_moved<'db>( infcx: &InferCtxt<'db>, env: ParamEnv<'db>, - body: &MirBody<'db>, -) -> Vec> { + body: &MirBody, +) -> Vec { let db = infcx.interner.db; let mut result = vec![]; - let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind { + let mut for_operand = |op: &Operand, span: MirSpan| match op.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { - let mut ty: Ty<'db> = body.locals[p.local].ty; + let mut ty: Ty<'db> = body.locals[p.local].ty.as_ref(); for proj in p.projection.lookup(&body.projection_store) { ty = proj.projected_ty( infcx, @@ -250,7 +250,7 @@ fn partially_moved<'db>( ); } if !infcx.type_is_copy_modulo_regions(env, ty) && !ty.references_non_lt_error() { - result.push(PartiallyMoved { span, ty, local: p.local }); + result.push(PartiallyMoved { span, ty: ty.store(), local: p.local }); } } OperandKind::Constant { .. } | OperandKind::Static(_) => (), @@ -324,7 +324,7 @@ fn partially_moved<'db>( result } -fn borrow_regions<'db>(db: &'db dyn HirDatabase, body: &MirBody<'db>) -> Vec> { +fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec { let mut borrows = FxHashMap::default(); for (_, block) in body.basic_blocks.iter() { db.unwind_if_revision_cancelled(); @@ -332,7 +332,7 @@ fn borrow_regions<'db>(db: &'db dyn HirDatabase, body: &MirBody<'db>) -> Vec| { + .and_modify(|it: &mut BorrowRegion| { it.places.push(statement.span); }) .or_insert_with(|| BorrowRegion { @@ -377,12 +377,12 @@ enum ProjectionCase { fn place_case<'db>( infcx: &InferCtxt<'db>, env: ParamEnv<'db>, - body: &MirBody<'db>, - lvalue: &Place<'db>, + body: &MirBody, + lvalue: &Place, ) -> ProjectionCase { let db = infcx.interner.db; let mut is_part_of = false; - let mut ty = body.locals[lvalue.local].ty; + let mut ty = body.locals[lvalue.local].ty.as_ref(); for proj in lvalue.projection.lookup(&body.projection_store).iter() { match proj { ProjectionElem::Deref if ty.as_adt().is_none() => return ProjectionCase::Indirect, // It's indirect in case of reference and raw @@ -410,18 +410,18 @@ fn place_case<'db>( /// Returns a map from basic blocks to the set of locals that might be ever initialized before /// the start of the block. Only `StorageDead` can remove something from this map, and we ignore /// `Uninit` and `drop` and similar after initialization. -fn ever_initialized_map<'db>( - db: &'db dyn HirDatabase, - body: &MirBody<'db>, -) -> ArenaMap, ArenaMap, bool>> { - let mut result: ArenaMap, ArenaMap, bool>> = +fn ever_initialized_map( + db: &dyn HirDatabase, + body: &MirBody, +) -> ArenaMap> { + let mut result: ArenaMap> = body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect(); - fn dfs<'db>( - db: &'db dyn HirDatabase, - body: &MirBody<'db>, - l: LocalId<'db>, - stack: &mut Vec>, - result: &mut ArenaMap, ArenaMap, bool>>, + fn dfs( + db: &dyn HirDatabase, + body: &MirBody, + l: LocalId, + stack: &mut Vec, + result: &mut ArenaMap>, ) { while let Some(b) = stack.pop() { let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs @@ -509,11 +509,7 @@ fn ever_initialized_map<'db>( result } -fn push_mut_span<'db>( - local: LocalId<'db>, - span: MirSpan, - result: &mut ArenaMap, MutabilityReason>, -) { +fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap) { match &mut result[local] { MutabilityReason::Mut { spans } => spans.push(span), it @ (MutabilityReason::Not | MutabilityReason::Unused) => { @@ -522,16 +518,13 @@ fn push_mut_span<'db>( }; } -fn record_usage<'db>(local: LocalId<'db>, result: &mut ArenaMap, MutabilityReason>) { +fn record_usage(local: LocalId, result: &mut ArenaMap) { if let it @ MutabilityReason::Unused = &mut result[local] { *it = MutabilityReason::Not; }; } -fn record_usage_for_operand<'db>( - arg: &Operand<'db>, - result: &mut ArenaMap, MutabilityReason>, -) { +fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap) { if let OperandKind::Copy(p) | OperandKind::Move(p) = arg.kind { record_usage(p.local, result); } @@ -540,10 +533,10 @@ fn record_usage_for_operand<'db>( fn mutability_of_locals<'db>( infcx: &InferCtxt<'db>, env: ParamEnv<'db>, - body: &MirBody<'db>, -) -> ArenaMap, MutabilityReason> { + body: &MirBody, +) -> ArenaMap { let db = infcx.interner.db; - let mut result: ArenaMap, MutabilityReason> = + let mut result: ArenaMap = body.locals.iter().map(|it| (it.0, MutabilityReason::Unused)).collect(); let ever_init_maps = ever_initialized_map(db, body); diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 3b4913cae3fb..b78e610b49e8 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -17,6 +17,7 @@ use hir_def::{ use hir_expand::{InFile, mod_path::path, name::Name}; use intern::sym; use la_arena::ArenaMap; +use macros::GenericTypeVisitable; use rustc_abi::TargetDataLayout; use rustc_apfloat::{ Float, @@ -42,8 +43,8 @@ use crate::{ layout::{Layout, LayoutError, RustcEnumVariantIdx}, method_resolution::{is_dyn_method, lookup_impl_const}, next_solver::{ - Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region, Ty, TyKind, - TypingMode, UnevaluatedConst, ValueConst, + Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region, + StoredConst, StoredTy, Ty, TyKind, TypingMode, UnevaluatedConst, ValueConst, infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause}, obligation_ctxt::ObligationCtxt, }, @@ -83,7 +84,7 @@ macro_rules! not_supported { }; } -#[derive(Debug, Default, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)] pub struct VTableMap<'db> { ty_to_id: FxHashMap, usize>, id_to_ty: Vec>, @@ -150,16 +151,16 @@ impl TlsData { } } -struct StackFrame<'db> { - locals: Locals<'db>, - destination: Option>, +struct StackFrame { + locals: Locals, + destination: Option, prev_stack_ptr: usize, span: (MirSpan, DefWithBodyId), } #[derive(Clone)] -enum MirOrDynIndex<'db> { - Mir(Arc>), +enum MirOrDynIndex { + Mir(Arc), Dyn(usize), } @@ -169,7 +170,7 @@ pub struct Evaluator<'db> { target_data_layout: Arc, stack: Vec, heap: Vec, - code_stack: Vec>, + code_stack: Vec, /// Stores the global location of the statics. We const evaluate every static first time we need it /// and see it's missing, then we add it to this to reuse. static_locations: FxHashMap, @@ -182,13 +183,13 @@ pub struct Evaluator<'db> { stdout: Vec, stderr: Vec, layout_cache: RefCell, Arc>>, - projected_ty_cache: RefCell, PlaceElem<'db>), Ty<'db>>>, + projected_ty_cache: RefCell, PlaceElem), Ty<'db>>>, not_special_fn_cache: RefCell>, - mir_or_dyn_index_cache: RefCell), MirOrDynIndex<'db>>>, - /// Constantly dropping and creating `Locals<'db>` is very costly. We store + mir_or_dyn_index_cache: RefCell), MirOrDynIndex>>, + /// Constantly dropping and creating `Locals` is very costly. We store /// old locals that we normally want to drop here, to reuse their allocations /// later. - unused_locals_store: RefCell>>>, + unused_locals_store: RefCell>>, cached_ptr_size: usize, cached_fn_trait_func: Option, cached_fn_mut_trait_func: Option, @@ -261,7 +262,7 @@ impl<'db> IntervalAndTy<'db> { addr: Address, ty: Ty<'db>, evaluator: &Evaluator<'db>, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, IntervalAndTy<'db>> { let size = evaluator.size_of_sized(ty, locals, "type of interval")?; Ok(IntervalAndTy { interval: Interval { addr, size }, ty }) @@ -340,22 +341,22 @@ impl Address { } #[derive(Clone, PartialEq, Eq)] -pub enum MirEvalError<'db> { - ConstEvalError(String, Box>), - LayoutError(LayoutError, Ty<'db>), +pub enum MirEvalError { + ConstEvalError(String, Box), + LayoutError(LayoutError, StoredTy), TargetDataLayoutNotAvailable(TargetLoadError), /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected /// then use this type of error. UndefinedBehavior(String), Panic(String), // FIXME: This should be folded into ConstEvalError? - MirLowerError(FunctionId, MirLowerError<'db>), - MirLowerErrorForClosure(InternedClosureId, MirLowerError<'db>), - TypeIsUnsized(Ty<'db>, &'static str), + MirLowerError(FunctionId, MirLowerError), + MirLowerErrorForClosure(InternedClosureId, MirLowerError), + TypeIsUnsized(StoredTy, &'static str), NotSupported(String), - InvalidConst(Const<'db>), + InvalidConst(StoredConst), InFunction( - Box>, + Box, Vec<(Either, MirSpan, DefWithBodyId)>, ), ExecutionLimitExceeded, @@ -363,12 +364,12 @@ pub enum MirEvalError<'db> { /// FIXME: Fold this into InternalError InvalidVTableId(usize), /// ? - CoerceUnsizedError(Ty<'db>), + CoerceUnsizedError(StoredTy), /// These should not occur, usually indicates a bug in mir lowering. InternalError(Box), } -impl MirEvalError<'_> { +impl MirEvalError { pub fn pretty_print( &self, f: &mut String, @@ -432,7 +433,9 @@ impl MirEvalError<'_> { write!( f, "Layout for type `{}` is not available due {err:?}", - ty.display(db, display_target).with_closure_style(ClosureStyle::ClosureWithId) + ty.as_ref() + .display(db, display_target) + .with_closure_style(ClosureStyle::ClosureWithId) )?; } MirEvalError::MirLowerError(func, err) => { @@ -495,7 +498,7 @@ impl MirEvalError<'_> { } } -impl std::fmt::Debug for MirEvalError<'_> { +impl std::fmt::Debug for MirEvalError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::ConstEvalError(arg0, arg1) => { @@ -534,15 +537,15 @@ impl std::fmt::Debug for MirEvalError<'_> { } } -type Result<'db, T> = std::result::Result>; +type Result<'db, T> = std::result::Result; #[derive(Debug, Default)] -struct DropFlags<'db> { - need_drop: FxHashSet>, +struct DropFlags { + need_drop: FxHashSet, } -impl<'db> DropFlags<'db> { - fn add_place(&mut self, p: Place<'db>, store: &ProjectionStore<'db>) { +impl DropFlags { + fn add_place(&mut self, p: Place, store: &ProjectionStore) { if p.iterate_over_parents(store).any(|it| self.need_drop.contains(&it)) { return; } @@ -550,7 +553,7 @@ impl<'db> DropFlags<'db> { self.need_drop.insert(p); } - fn remove_place(&mut self, p: &Place<'db>, store: &ProjectionStore<'db>) -> bool { + fn remove_place(&mut self, p: &Place, store: &ProjectionStore) -> bool { // FIXME: replace parents with parts if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(it)) { self.need_drop.remove(&parent); @@ -565,10 +568,10 @@ impl<'db> DropFlags<'db> { } #[derive(Debug)] -struct Locals<'db> { - ptr: ArenaMap, Interval>, - body: Arc>, - drop_flags: DropFlags<'db>, +struct Locals { + ptr: ArenaMap, + body: Arc, + drop_flags: DropFlags, } pub struct MirOutput { @@ -587,7 +590,7 @@ impl MirOutput { pub fn interpret_mir<'db>( db: &'db dyn HirDatabase, - body: Arc>, + body: Arc, // FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now // they share their body with their parent, so in MIR lowering we have locals of the parent body, which // might have placeholders. With this argument, we (wrongly) assume that every placeholder type has @@ -596,7 +599,7 @@ pub fn interpret_mir<'db>( assert_placeholder_ty_is_unused: bool, trait_env: Option>, ) -> Result<'db, (Result<'db, Const<'db>>, MirOutput)> { - let ty = body.locals[return_slot()].ty; + let ty = body.locals[return_slot()].ty.as_ref(); let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env)?; let it: Result<'db, Const<'db>> = (|| { if evaluator.ptr_size() != size_of::() { @@ -694,11 +697,11 @@ impl<'db> Evaluator<'db> { self.infcx.interner.lang_items() } - fn place_addr(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Address> { + fn place_addr(&self, p: &Place, locals: &Locals) -> Result<'db, Address> { Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0) } - fn place_interval(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> { + fn place_interval(&self, p: &Place, locals: &Locals) -> Result<'db, Interval> { let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?; Ok(Interval { addr: place_addr_and_ty.0, @@ -714,7 +717,7 @@ impl<'db> Evaluator<'db> { self.cached_ptr_size } - fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem<'db>) -> Ty<'db> { + fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem) -> Ty<'db> { let pair = (ty, proj); if let Some(r) = self.projected_ty_cache.borrow().get(&pair) { return *r; @@ -733,6 +736,7 @@ impl<'db> Evaluator<'db> { .get(f) .expect("broken closure field") .ty + .get() .instantiate(self.interner(), parent_subst) }, self.crate_id, @@ -743,11 +747,11 @@ impl<'db> Evaluator<'db> { fn place_addr_and_ty_and_metadata<'a>( &'a self, - p: &Place<'db>, - locals: &'a Locals<'db>, + p: &Place, + locals: &'a Locals, ) -> Result<'db, (Address, Ty<'db>, Option)> { let mut addr = locals.ptr[p.local].addr; - let mut ty: Ty<'db> = locals.body.locals[p.local].ty; + let mut ty: Ty<'db> = locals.body.locals[p.local].ty.as_ref(); let mut metadata: Option = None; // locals are always sized for proj in p.projection.lookup(&locals.body.projection_store) { let prev_ty = ty; @@ -868,8 +872,8 @@ impl<'db> Evaluator<'db> { } let r = self .db - .layout_of_ty(ty, self.param_env) - .map_err(|e| MirEvalError::LayoutError(e, ty))?; + .layout_of_ty(ty.store(), self.param_env.store()) + .map_err(|e| MirEvalError::LayoutError(e, ty.store()))?; self.layout_cache.borrow_mut().insert(ty, r.clone()); Ok(r) } @@ -878,17 +882,17 @@ impl<'db> Evaluator<'db> { self.layout(Ty::new_adt(self.interner(), adt, subst)) } - fn place_ty<'a>(&'a self, p: &Place<'db>, locals: &'a Locals<'db>) -> Result<'db, Ty<'db>> { + fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<'db, Ty<'db>> { Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1) } - fn operand_ty(&self, o: &Operand<'db>, locals: &Locals<'db>) -> Result<'db, Ty<'db>> { + fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<'db, Ty<'db>> { Ok(match &o.kind { OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?, - OperandKind::Constant { konst: _, ty } => *ty, + OperandKind::Constant { konst: _, ty } => ty.as_ref(), &OperandKind::Static(s) => { - let ty = - InferenceResult::for_body(self.db, s.into())[self.db.body(s.into()).body_expr]; + let ty = InferenceResult::for_body(self.db, s.into()) + .expr_ty(self.db.body(s.into()).body_expr); Ty::new_ref( self.interner(), Region::new_static(self.interner()), @@ -901,8 +905,8 @@ impl<'db> Evaluator<'db> { fn operand_ty_and_eval( &mut self, - o: &Operand<'db>, - locals: &mut Locals<'db>, + o: &Operand, + locals: &mut Locals, ) -> Result<'db, IntervalAndTy<'db>> { Ok(IntervalAndTy { interval: self.eval_operand(o, locals)?, @@ -912,7 +916,7 @@ impl<'db> Evaluator<'db> { fn interpret_mir( &mut self, - body: Arc>, + body: Arc, args: impl Iterator, ) -> Result<'db, Interval> { if let Some(it) = self.stack_depth_limit.checked_sub(1) { @@ -1076,8 +1080,8 @@ impl<'db> Evaluator<'db> { fn fill_locals_for_body( &mut self, - body: &MirBody<'db>, - locals: &mut Locals<'db>, + body: &MirBody, + locals: &mut Locals, args: impl Iterator, ) -> Result<'db, ()> { let mut remain_args = body.param_locals.len(); @@ -1100,9 +1104,9 @@ impl<'db> Evaluator<'db> { fn create_locals_for_body( &mut self, - body: &Arc>, + body: &Arc, destination: Option, - ) -> Result<'db, (Locals<'db>, usize)> { + ) -> Result<'db, (Locals, usize)> { let mut locals = match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() { None => Locals { @@ -1126,7 +1130,7 @@ impl<'db> Evaluator<'db> { continue; } let (size, align) = self.size_align_of_sized( - it.ty, + it.ty.as_ref(), &locals, "no unsized local in extending stack", )?; @@ -1149,11 +1153,7 @@ impl<'db> Evaluator<'db> { Ok((locals, prev_stack_pointer)) } - fn eval_rvalue( - &mut self, - r: &Rvalue<'db>, - locals: &mut Locals<'db>, - ) -> Result<'db, IntervalOrOwned> { + fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<'db, IntervalOrOwned> { use IntervalOrOwned::*; Ok(match r { Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?), @@ -1445,7 +1445,7 @@ impl<'db> Evaluator<'db> { Owned(result.to_le_bytes().to_vec()) } Rvalue::Repeat(it, len) => { - let len = match try_const_usize(self.db, *len) { + let len = match try_const_usize(self.db, len.as_ref()) { Some(it) => it as usize, None => not_supported!("non evaluatable array len in repeat Rvalue"), }; @@ -1455,7 +1455,7 @@ impl<'db> Evaluator<'db> { } Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"), Rvalue::ShallowInitBoxWithAlloc(ty) => { - let Some((size, align)) = self.size_align_of(*ty, locals)? else { + let Some((size, align)) = self.size_align_of(ty.as_ref(), locals)? else { not_supported!("unsized box initialization"); }; let addr = self.heap_allocate(size, align)?; @@ -1477,7 +1477,7 @@ impl<'db> Evaluator<'db> { Owned(r) } AggregateKind::Tuple(ty) => { - let layout = self.layout(*ty)?; + let layout = self.layout(ty.as_ref())?; Owned(self.construct_with_layout( layout.size.bytes_usize(), &layout, @@ -1486,10 +1486,8 @@ impl<'db> Evaluator<'db> { )?) } AggregateKind::Union(it, f) => { - let layout = self.layout_adt( - (*it).into(), - GenericArgs::new_from_iter(self.interner(), []), - )?; + let layout = + self.layout_adt((*it).into(), GenericArgs::empty(self.interner()))?; let offset = layout .fields .offset(u32::from(f.local_id.into_raw()) as usize) @@ -1501,7 +1499,7 @@ impl<'db> Evaluator<'db> { } AggregateKind::Adt(it, subst) => { let (size, variant_layout, tag) = - self.layout_of_variant(*it, *subst, locals)?; + self.layout_of_variant(*it, subst.as_ref(), locals)?; Owned(self.construct_with_layout( size, &variant_layout, @@ -1510,7 +1508,7 @@ impl<'db> Evaluator<'db> { )?) } AggregateKind::Closure(ty) => { - let layout = self.layout(*ty)?; + let layout = self.layout(ty.as_ref())?; Owned(self.construct_with_layout( layout.size.bytes_usize(), &layout, @@ -1537,7 +1535,7 @@ impl<'db> Evaluator<'db> { PointerCast::Unsize => { let current_ty = self.operand_ty(operand, locals)?; let addr = self.eval_operand(operand, locals)?; - self.coerce_unsized(addr, current_ty, *target_ty)? + self.coerce_unsized(addr, current_ty, target_ty.as_ref())? } PointerCast::MutToConstPointer | PointerCast::UnsafeFnPointer => { // This is no-op @@ -1556,8 +1554,11 @@ impl<'db> Evaluator<'db> { let current_ty = self.operand_ty(operand, locals)?; let is_signed = matches!(current_ty.kind(), TyKind::Int(_)); let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed); - let dest_size = - self.size_of_sized(*target_ty, locals, "destination of int to int cast")?; + let dest_size = self.size_of_sized( + target_ty.as_ref(), + locals, + "destination of int to int cast", + )?; Owned(current[0..dest_size].to_vec()) } CastKind::FloatToInt => { @@ -1579,9 +1580,12 @@ impl<'db> Evaluator<'db> { not_supported!("unstable floating point type f16 and f128"); } }; - let is_signed = matches!(target_ty.kind(), TyKind::Int(_)); - let dest_size = - self.size_of_sized(*target_ty, locals, "destination of float to int cast")?; + let is_signed = matches!(target_ty.as_ref().kind(), TyKind::Int(_)); + let dest_size = self.size_of_sized( + target_ty.as_ref(), + locals, + "destination of float to int cast", + )?; let dest_bits = dest_size * 8; let (max, min) = if dest_bits == 128 { (i128::MAX, i128::MIN) @@ -1614,7 +1618,7 @@ impl<'db> Evaluator<'db> { not_supported!("unstable floating point type f16 and f128"); } }; - let TyKind::Float(target_ty) = target_ty.kind() else { + let TyKind::Float(target_ty) = target_ty.as_ref().kind() else { not_supported!("invalid float to float cast"); }; match target_ty { @@ -1630,7 +1634,7 @@ impl<'db> Evaluator<'db> { let is_signed = matches!(current_ty.kind(), TyKind::Int(_)); let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed); let value = i128::from_le_bytes(value); - let TyKind::Float(target_ty) = target_ty.kind() else { + let TyKind::Float(target_ty) = target_ty.as_ref().kind() else { not_supported!("invalid int to float cast"); }; match target_ty { @@ -1709,12 +1713,12 @@ impl<'db> Evaluator<'db> { { let field_types = self.db.field_types(struct_id.into()); if let Some(ty) = - field_types.iter().last().map(|it| it.1.instantiate(self.interner(), subst)) + field_types.iter().last().map(|it| it.1.get().instantiate(self.interner(), subst)) { return self.coerce_unsized_look_through_fields(ty, goal); } } - Err(MirEvalError::CoerceUnsizedError(ty)) + Err(MirEvalError::CoerceUnsizedError(ty.store())) } fn coerce_unsized( @@ -1787,8 +1791,10 @@ impl<'db> Evaluator<'db> { not_supported!("unsizing struct without field"); }; let target_last_field = self.db.field_types(id.into())[last_field] + .get() .instantiate(self.interner(), target_subst); let current_last_field = self.db.field_types(id.into())[last_field] + .get() .instantiate(self.interner(), current_subst); return self.unsizing_ptr_from_addr( target_last_field, @@ -1806,7 +1812,7 @@ impl<'db> Evaluator<'db> { &mut self, it: VariantId, subst: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, (usize, Arc, Option<(usize, usize, i128)>)> { let adt = it.adt_id(self.db); if let DefWithBodyId::VariantId(f) = locals.body.owner @@ -1900,11 +1906,7 @@ impl<'db> Evaluator<'db> { Ok(result) } - fn eval_operand( - &mut self, - it: &Operand<'db>, - locals: &mut Locals<'db>, - ) -> Result<'db, Interval> { + fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<'db, Interval> { Ok(match &it.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { locals.drop_flags.remove_place(p, &locals.body.projection_store); @@ -1914,14 +1916,16 @@ impl<'db> Evaluator<'db> { let addr = self.eval_static(*st, locals)?; Interval::new(addr, self.ptr_size()) } - OperandKind::Constant { konst, .. } => self.allocate_const_in_heap(locals, *konst)?, + OperandKind::Constant { konst, .. } => { + self.allocate_const_in_heap(locals, konst.as_ref())? + } }) } #[allow(clippy::double_parens)] fn allocate_const_in_heap( &mut self, - locals: &Locals<'db>, + locals: &Locals, konst: Const<'db>, ) -> Result<'db, Interval> { let result_owner; @@ -1971,7 +1975,7 @@ impl<'db> Evaluator<'db> { } else if size < 16 && v.len() == 16 { Cow::Borrowed(&v[0..size]) } else { - return Err(MirEvalError::InvalidConst(konst)); + return Err(MirEvalError::InvalidConst(konst.store())); } } else { Cow::Borrowed(v) @@ -1993,7 +1997,7 @@ impl<'db> Evaluator<'db> { Ok(Interval::new(addr, size)) } - fn eval_place(&mut self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> { + fn eval_place(&mut self, p: &Place, locals: &Locals) -> Result<'db, Interval> { let addr = self.place_addr(p, locals)?; Ok(Interval::new( addr, @@ -2093,11 +2097,7 @@ impl<'db> Evaluator<'db> { Ok(()) } - fn size_align_of( - &self, - ty: Ty<'db>, - locals: &Locals<'db>, - ) -> Result<'db, Option<(usize, usize)>> { + fn size_align_of(&self, ty: Ty<'db>, locals: &Locals) -> Result<'db, Option<(usize, usize)>> { if let Some(layout) = self.layout_cache.borrow().get(&ty) { return Ok(layout .is_sized() @@ -2126,12 +2126,12 @@ impl<'db> Evaluator<'db> { fn size_of_sized( &self, ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, what: &'static str, ) -> Result<'db, usize> { match self.size_align_of(ty, locals)? { Some(it) => Ok(it.0), - None => Err(MirEvalError::TypeIsUnsized(ty, what)), + None => Err(MirEvalError::TypeIsUnsized(ty.store(), what)), } } @@ -2140,12 +2140,12 @@ impl<'db> Evaluator<'db> { fn size_align_of_sized( &self, ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, what: &'static str, ) -> Result<'db, (usize, usize)> { match self.size_align_of(ty, locals)? { Some(it) => Ok(it), - None => Err(MirEvalError::TypeIsUnsized(ty, what)), + None => Err(MirEvalError::TypeIsUnsized(ty.store(), what)), } } @@ -2181,13 +2181,13 @@ impl<'db> Evaluator<'db> { &self, bytes: &[u8], ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, ComplexMemoryMap<'db>> { fn rec<'db>( this: &Evaluator<'db>, bytes: &[u8], ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, mm: &mut ComplexMemoryMap<'db>, stack_depth_limit: usize, ) -> Result<'db, ()> { @@ -2288,7 +2288,7 @@ impl<'db> Evaluator<'db> { .fields .offset(u32::from(f.into_raw()) as usize) .bytes_usize(); - let ty = field_types[f].instantiate(this.interner(), subst); + let ty = field_types[f].get().instantiate(this.interner(), subst); let size = this.layout(ty)?.size.bytes_usize(); rec( this, @@ -2314,7 +2314,7 @@ impl<'db> Evaluator<'db> { for (f, _) in data.fields().iter() { let offset = l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize(); - let ty = field_types[f].instantiate(this.interner(), subst); + let ty = field_types[f].get().instantiate(this.interner(), subst); let size = this.layout(ty)?.size.bytes_usize(); rec( this, @@ -2356,7 +2356,7 @@ impl<'db> Evaluator<'db> { ty_of_bytes: impl Fn(&[u8]) -> Result<'db, Ty<'db>> + Copy, addr: Address, ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, ()> { // FIXME: support indirect references let layout = self.layout(ty)?; @@ -2389,7 +2389,7 @@ impl<'db> Evaluator<'db> { AdtId::StructId(s) => { for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() { let offset = layout.fields.offset(i).bytes_usize(); - let ty = ty.instantiate(self.interner(), args); + let ty = ty.get().instantiate(self.interner(), args); self.patch_addresses( patch_map, ty_of_bytes, @@ -2410,7 +2410,7 @@ impl<'db> Evaluator<'db> { ) { for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() { let offset = layout.fields.offset(i).bytes_usize(); - let ty = ty.instantiate(self.interner(), args); + let ty = ty.get().instantiate(self.interner(), args); self.patch_addresses( patch_map, ty_of_bytes, @@ -2477,10 +2477,10 @@ impl<'db> Evaluator<'db> { bytes: Interval, destination: Interval, args: &[IntervalAndTy<'db>], - locals: &Locals<'db>, - target_bb: Option>, + locals: &Locals, + target_bb: Option, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let id = from_bytes!(usize, bytes.get(self)?); let next_ty = self.vtable_map.ty(id)?; use rustc_type_ir::TyKind; @@ -2508,19 +2508,23 @@ impl<'db> Evaluator<'db> { generic_args: GenericArgs<'db>, destination: Interval, args: &[IntervalAndTy<'db>], - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let mir_body = self .db - .monomorphized_mir_body_for_closure(closure, generic_args, self.param_env) + .monomorphized_mir_body_for_closure( + closure, + generic_args.store(), + self.param_env.store(), + ) .map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?; - let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some() - { - closure_data.addr.to_bytes().to_vec() - } else { - closure_data.get(self)?.to_owned() - }; + let closure_data = + if mir_body.locals[mir_body.param_locals[0]].ty.as_ref().as_reference().is_some() { + closure_data.addr.to_bytes().to_vec() + } else { + closure_data.get(self)?.to_owned() + }; let arg_bytes = iter::once(Ok(closure_data)) .chain(args.iter().map(|it| Ok(it.get(self)?.to_owned()))) .collect::>>()?; @@ -2542,10 +2546,10 @@ impl<'db> Evaluator<'db> { generic_args: GenericArgs<'db>, destination: Interval, args: &[IntervalAndTy<'db>], - locals: &Locals<'db>, - target_bb: Option>, + locals: &Locals, + target_bb: Option, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { match def { CallableDefId::FunctionId(def) => { if self.detect_fn_trait(def).is_some() { @@ -2600,9 +2604,9 @@ impl<'db> Evaluator<'db> { &self, def: FunctionId, generic_args: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, - ) -> Result<'db, MirOrDynIndex<'db>> { + ) -> Result<'db, MirOrDynIndex> { let pair = (def, generic_args); if let Some(r) = self.mir_or_dyn_index_cache.borrow().get(&pair) { return Ok(r.clone()); @@ -2621,7 +2625,7 @@ impl<'db> Evaluator<'db> { let mir_body = self .db - .monomorphized_mir_body(imp.into(), generic_args, self.param_env) + .monomorphized_mir_body(imp.into(), generic_args.store(), self.param_env.store()) .map_err(|e| { MirEvalError::InFunction( Box::new(MirEvalError::MirLowerError(imp, e)), @@ -2639,11 +2643,11 @@ impl<'db> Evaluator<'db> { mut def: FunctionId, args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, - target_bb: Option>, + target_bb: Option, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { if self.detect_and_exec_special_function( def, args, @@ -2705,14 +2709,14 @@ impl<'db> Evaluator<'db> { fn exec_looked_up_function( &mut self, - mir_body: Arc>, - locals: &Locals<'db>, + mir_body: Arc, + locals: &Locals, def: FunctionId, arg_bytes: impl Iterator, span: MirSpan, destination: Interval, - target_bb: Option>, - ) -> Result<'db, Option>> { + target_bb: Option, + ) -> Result<'db, Option> { Ok(if let Some(target_bb) = target_bb { let (mut locals, prev_stack_ptr) = self.create_locals_for_body(&mir_body, Some(destination))?; @@ -2736,11 +2740,11 @@ impl<'db> Evaluator<'db> { def: FunctionId, args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, - target_bb: Option>, + target_bb: Option, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let func = args .first() .ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?; @@ -2805,7 +2809,7 @@ impl<'db> Evaluator<'db> { } } - fn eval_static(&mut self, st: StaticId, locals: &Locals<'db>) -> Result<'db, Address> { + fn eval_static(&mut self, st: StaticId, locals: &Locals) -> Result<'db, Address> { if let Some(o) = self.static_locations.get(&st) { return Ok(*o); }; @@ -2816,8 +2820,8 @@ impl<'db> Evaluator<'db> { })?; self.allocate_const_in_heap(locals, konst)? } else { - let ty = - InferenceResult::for_body(self.db, st.into())[self.db.body(st.into()).body_expr]; + let ty = InferenceResult::for_body(self.db, st.into()) + .expr_ty(self.db.body(st.into()).body_expr); let Some((size, align)) = self.size_align_of(ty, locals)? else { not_supported!("unsized extern static"); }; @@ -2852,12 +2856,7 @@ impl<'db> Evaluator<'db> { } } - fn drop_place( - &mut self, - place: &Place<'db>, - locals: &mut Locals<'db>, - span: MirSpan, - ) -> Result<'db, ()> { + fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<'db, ()> { let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?; if !locals.drop_flags.remove_place(place, &locals.body.projection_store) { return Ok(()); @@ -2872,7 +2871,7 @@ impl<'db> Evaluator<'db> { fn run_drop_glue_deep( &mut self, ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, addr: Address, _metadata: &[u8], span: MirSpan, @@ -2886,7 +2885,7 @@ impl<'db> Evaluator<'db> { return Ok(()); }; - let generic_args = GenericArgs::new_from_iter(self.interner(), [ty.into()]); + let generic_args = GenericArgs::new_from_slice(&[ty.into()]); if let Ok(MirOrDynIndex::Mir(body)) = self.get_mir_or_dyn_index(drop_fn, generic_args, locals, span) { @@ -2920,7 +2919,9 @@ impl<'db> Evaluator<'db> { .offset(u32::from(field.into_raw()) as usize) .bytes_usize(); let addr = addr.offset(offset); - let ty = field_types[field].instantiate(self.interner(), subst); + let ty = field_types[field] + .get() + .instantiate(self.interner(), subst); self.run_drop_glue_deep(ty, locals, addr, &[], span)?; } } @@ -3011,7 +3012,7 @@ pub fn render_const_using_debug_impl<'db>( let debug_fmt_fn_ptr = evaluator.vtable_map.id(Ty::new_fn_def( evaluator.interner(), CallableDefId::FunctionId(debug_fmt_fn).into(), - GenericArgs::new_from_iter(evaluator.interner(), [ty.into()]), + GenericArgs::new_from_slice(&[ty.into()]), )); evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?; // a3 = ::core::fmt::Arguments::new_v1(a1, a2) diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs index 42c11113ee30..a47a8c440007 100644 --- a/crates/hir-ty/src/mir/eval/shim.rs +++ b/crates/hir-ty/src/mir/eval/shim.rs @@ -52,7 +52,7 @@ impl<'db> Evaluator<'db> { def: FunctionId, args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, span: MirSpan, ) -> Result<'db, bool> { @@ -149,7 +149,7 @@ impl<'db> Evaluator<'db> { def: FunctionId, args: &[IntervalAndTy<'db>], self_ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, span: MirSpan, ) -> Result<'db, ()> { @@ -195,7 +195,7 @@ impl<'db> Evaluator<'db> { self.exec_fn_with_args( def, args, - GenericArgs::new_from_iter(self.interner(), [self_ty.into()]), + GenericArgs::new_from_slice(&[self_ty.into()]), locals, destination, None, @@ -212,7 +212,7 @@ impl<'db> Evaluator<'db> { layout: Arc, addr: Address, def: FunctionId, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, span: MirSpan, ) -> Result<'db, ()> { @@ -318,7 +318,7 @@ impl<'db> Evaluator<'db> { it: EvalLangItem, generic_args: GenericArgs<'db>, args: &[IntervalAndTy<'db>], - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, ) -> Result<'db, Vec> { use EvalLangItem::*; @@ -390,7 +390,7 @@ impl<'db> Evaluator<'db> { id: i64, args: &[IntervalAndTy<'db>], destination: Interval, - _locals: &Locals<'db>, + _locals: &Locals, _span: MirSpan, ) -> Result<'db, ()> { match id { @@ -421,7 +421,7 @@ impl<'db> Evaluator<'db> { args: &[IntervalAndTy<'db>], _generic_args: GenericArgs<'db>, destination: Interval, - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, ) -> Result<'db, ()> { match as_str { @@ -587,7 +587,7 @@ impl<'db> Evaluator<'db> { args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, destination: Interval, - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, needs_override: bool, ) -> Result<'db, bool> { @@ -1235,7 +1235,7 @@ impl<'db> Evaluator<'db> { def, &args, // FIXME: wrong for manual impls of `FnOnce` - GenericArgs::new_from_iter(self.interner(), []), + GenericArgs::empty(self.interner()), locals, destination, None, @@ -1369,7 +1369,7 @@ impl<'db> Evaluator<'db> { &mut self, ty: Ty<'db>, metadata: Interval, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, (usize, usize)> { Ok(match ty.kind() { TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1), @@ -1391,8 +1391,13 @@ impl<'db> Evaluator<'db> { _ => not_supported!("unsized enum or union"), }; let field_types = self.db.field_types(id.into()); - let last_field_ty = - field_types.iter().next_back().unwrap().1.instantiate(self.interner(), subst); + let last_field_ty = field_types + .iter() + .next_back() + .unwrap() + .1 + .get() + .instantiate(self.interner(), subst); let sized_part_size = layout.fields.offset(field_types.iter().count() - 1).bytes_usize(); let sized_part_align = layout.align.bytes() as usize; @@ -1423,7 +1428,7 @@ impl<'db> Evaluator<'db> { args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, destination: Interval, - locals: &Locals<'db>, + locals: &Locals, _span: MirSpan, ) -> Result<'db, ()> { // We are a single threaded runtime with no UB checking and no optimization, so diff --git a/crates/hir-ty/src/mir/eval/shim/simd.rs b/crates/hir-ty/src/mir/eval/shim/simd.rs index 4c64a70a7a62..3896917cab1a 100644 --- a/crates/hir-ty/src/mir/eval/shim/simd.rs +++ b/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -35,6 +35,7 @@ impl<'db> Evaluator<'db> { not_supported!("simd type with no field"); }; let field_ty = self.db.field_types(id.into())[first_field] + .get() .instantiate(self.interner(), subst); return Ok((fields.len(), field_ty)); } @@ -67,7 +68,7 @@ impl<'db> Evaluator<'db> { args: &[IntervalAndTy<'db>], _generic_args: GenericArgs<'db>, destination: Interval, - _locals: &Locals<'db>, + _locals: &Locals, _span: MirSpan, ) -> Result<'db, ()> { match name { diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs index c13b76c12579..61dd7757c90b 100644 --- a/crates/hir-ty/src/mir/eval/tests.rs +++ b/crates/hir-ty/src/mir/eval/tests.rs @@ -15,7 +15,7 @@ use crate::{ use super::{MirEvalError, interpret_mir}; -fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError<'_>> { +fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> { crate::attach_db(db, || { let interner = DbInterner::new_no_crate(db); let module_id = db.module_for_file(file_id.file_id(db)); @@ -39,11 +39,12 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), let body = db .monomorphized_mir_body( func_id.into(), - GenericArgs::new_from_iter(interner, []), + GenericArgs::empty(interner).store(), crate::ParamEnvAndCrate { param_env: db.trait_environment(func_id.into()), krate: func_id.krate(db), - }, + } + .store(), ) .map_err(|e| MirEvalError::MirLowerError(func_id, e))?; @@ -122,7 +123,7 @@ fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic: fn check_error_with( #[rust_analyzer::rust_fixture] ra_fixture: &str, - expect_err: impl FnOnce(MirEvalError<'_>) -> bool, + expect_err: impl FnOnce(MirEvalError) -> bool, ) { let (db, file_ids) = TestDB::with_many_files(ra_fixture); crate::attach_db(&db, || { diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index d7a8c18614a4..ef2a13848ca6 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -19,7 +19,7 @@ use hir_expand::name::Name; use la_arena::ArenaMap; use rustc_apfloat::Float; use rustc_hash::FxHashMap; -use rustc_type_ir::inherent::{Const as _, IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{Const as _, IntoKind, Ty as _}; use span::{Edition, FileId}; use syntax::TextRange; use triomphe::Arc; @@ -42,7 +42,8 @@ use crate::{ TupleFieldId, Ty, UnOp, VariantId, return_slot, }, next_solver::{ - Const, DbInterner, ParamConst, ParamEnv, Region, TyKind, TypingMode, UnevaluatedConst, + Const, DbInterner, ParamConst, ParamEnv, Region, StoredGenericArgs, StoredTy, TyKind, + TypingMode, UnevaluatedConst, infer::{DbInternerInferExt, InferCtxt}, }, traits::FnTrait, @@ -56,39 +57,39 @@ mod pattern_matching; mod tests; #[derive(Debug, Clone)] -struct LoopBlocks<'db> { - begin: BasicBlockId<'db>, +struct LoopBlocks { + begin: BasicBlockId, /// `None` for loops that are not terminating - end: Option>, - place: Place<'db>, + end: Option, + place: Place, drop_scope_index: usize, } #[derive(Debug, Clone, Default)] -struct DropScope<'db> { +struct DropScope { /// locals, in order of definition (so we should run drop glues in reverse order) - locals: Vec>, + locals: Vec, } struct MirLowerCtx<'a, 'db> { - result: MirBody<'db>, + result: MirBody, owner: DefWithBodyId, - current_loop_blocks: Option>, - labeled_loop_blocks: FxHashMap>, - discr_temp: Option>, + current_loop_blocks: Option, + labeled_loop_blocks: FxHashMap, + discr_temp: Option, db: &'db dyn HirDatabase, body: &'a Body, - infer: &'a InferenceResult<'db>, + infer: &'a InferenceResult, resolver: Resolver<'db>, - drop_scopes: Vec>, + drop_scopes: Vec, env: ParamEnv<'db>, infcx: InferCtxt<'db>, } // FIXME: Make this smaller, its stored in database queries #[derive(Debug, Clone, PartialEq, Eq)] -pub enum MirLowerError<'db> { - ConstEvalError(Box, Box>), +pub enum MirLowerError { + ConstEvalError(Box, Box), LayoutError(LayoutError), IncompleteExpr, IncompletePattern, @@ -98,9 +99,9 @@ pub enum MirLowerError<'db> { RecordLiteralWithoutPath, UnresolvedMethod(String), UnresolvedField, - UnsizedTemporary(Ty<'db>), + UnsizedTemporary(StoredTy), MissingFunctionDefinition(DefWithBodyId, ExprId), - TypeMismatch(TypeMismatch<'db>), + TypeMismatch(TypeMismatch), HasErrors, /// This should never happen. Type mismatch should catch everything. TypeError(&'static str), @@ -113,11 +114,11 @@ pub enum MirLowerError<'db> { LangItemNotFound, MutatingRvalue, UnresolvedLabel, - UnresolvedUpvar(Place<'db>), + UnresolvedUpvar(Place), InaccessibleLocal, // monomorphization errors: - GenericArgNotProvided(GenericParamId, GenericArgs<'db>), + GenericArgNotProvided(GenericParamId, StoredGenericArgs), } /// A token to ensuring that each drop scope is popped at most once, thanks to the compiler that checks moves. @@ -126,9 +127,9 @@ impl DropScopeToken { fn pop_and_drop<'db>( self, ctx: &mut MirLowerCtx<'_, 'db>, - current: BasicBlockId<'db>, + current: BasicBlockId, span: MirSpan, - ) -> BasicBlockId<'db> { + ) -> BasicBlockId { std::mem::forget(self); ctx.pop_drop_scope_internal(current, span) } @@ -158,7 +159,7 @@ impl Drop for DropScopeToken { // } // } -impl MirLowerError<'_> { +impl MirLowerError { pub fn pretty_print( &self, f: &mut String, @@ -190,8 +191,8 @@ impl MirLowerError<'_> { MirLowerError::TypeMismatch(e) => writeln!( f, "Type mismatch: Expected {}, found {}", - e.expected.display(db, display_target), - e.actual.display(db, display_target), + e.expected.as_ref().display(db, display_target), + e.actual.as_ref().display(db, display_target), )?, MirLowerError::GenericArgNotProvided(id, subst) => { let param_name = match *id { @@ -211,7 +212,7 @@ impl MirLowerError<'_> { param_name.unwrap_or(Name::missing()).display(db, display_target.edition) )?; writeln!(f, "Provided args: [")?; - for g in subst.iter() { + for g in subst.as_ref() { write!(f, " {},", g.display(db, display_target))?; } writeln!(f, "]")?; @@ -254,13 +255,13 @@ macro_rules! implementation_error { }}; } -impl From for MirLowerError<'_> { +impl From for MirLowerError { fn from(value: LayoutError) -> Self { MirLowerError::LayoutError(value) } } -impl MirLowerError<'_> { +impl MirLowerError { fn unresolved_path( db: &dyn HirDatabase, p: &Path, @@ -273,14 +274,14 @@ impl MirLowerError<'_> { } } -type Result<'db, T> = std::result::Result>; +type Result<'db, T> = std::result::Result; impl<'a, 'db> MirLowerCtx<'a, 'db> { fn new( db: &'db dyn HirDatabase, owner: DefWithBodyId, body: &'a Body, - infer: &'a InferenceResult<'db>, + infer: &'a InferenceResult, ) -> Self { let mut basic_blocks = Arena::new(); let start_block = basic_blocks.alloc(BasicBlock { @@ -289,7 +290,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { is_cleanup: false, }); let locals = Arena::new(); - let binding_locals: ArenaMap> = ArenaMap::new(); + let binding_locals: ArenaMap = ArenaMap::new(); let mir = MirBody { projection_store: ProjectionStore::default(), basic_blocks, @@ -332,16 +333,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { self.infcx.interner.lang_items() } - fn temp( - &mut self, - ty: Ty<'db>, - current: BasicBlockId<'db>, - span: MirSpan, - ) -> Result<'db, LocalId<'db>> { + fn temp(&mut self, ty: Ty<'db>, current: BasicBlockId, span: MirSpan) -> Result<'db, LocalId> { if matches!(ty.kind(), TyKind::Slice(_) | TyKind::Dynamic(..)) { - return Err(MirLowerError::UnsizedTemporary(ty)); + return Err(MirLowerError::UnsizedTemporary(ty.store())); } - let l = self.result.locals.alloc(Local { ty }); + let l = self.result.locals.alloc(Local { ty: ty.store() }); self.push_storage_live_for_local(l, current, span)?; Ok(l) } @@ -349,8 +345,8 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_some_operand( &mut self, expr_id: ExprId, - current: BasicBlockId<'db>, - ) -> Result<'db, Option<(Operand<'db>, BasicBlockId<'db>)>> { + current: BasicBlockId, + ) -> Result<'db, Option<(Operand, BasicBlockId)>> { if !self.has_adjustments(expr_id) && let Expr::Literal(l) = &self.body[expr_id] { @@ -366,10 +362,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_place_with_adjust( &mut self, expr_id: ExprId, - place: Place<'db>, - current: BasicBlockId<'db>, - adjustments: &[Adjustment<'db>], - ) -> Result<'db, Option>> { + place: Place, + current: BasicBlockId, + adjustments: &[Adjustment], + ) -> Result<'db, Option> { match adjustments.split_last() { Some((last, rest)) => match &last.kind { Adjust::NeverToAny => { @@ -416,7 +412,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { Rvalue::Cast( CastKind::PointerCoercion(*cast), Operand { kind: OperandKind::Copy(p), span: None }, - last.target, + last.target.clone(), ), expr_id.into(), ); @@ -430,11 +426,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_place_with_borrow_adjust( &mut self, expr_id: ExprId, - place: Place<'db>, - current: BasicBlockId<'db>, - rest: &[Adjustment<'db>], + place: Place, + current: BasicBlockId, + rest: &[Adjustment], m: Mutability, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else { @@ -448,9 +444,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_place( &mut self, expr_id: ExprId, - place: Place<'db>, - prev_block: BasicBlockId<'db>, - ) -> Result<'db, Option>> { + place: Place, + prev_block: BasicBlockId, + ) -> Result<'db, Option> { if let Some(adjustments) = self.infer.expr_adjustments.get(&expr_id) { return self.lower_expr_to_place_with_adjust(expr_id, place, prev_block, adjustments); } @@ -460,9 +456,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_place_without_adjust( &mut self, expr_id: ExprId, - place: Place<'db>, - mut current: BasicBlockId<'db>, - ) -> Result<'db, Option>> { + place: Place, + mut current: BasicBlockId, + ) -> Result<'db, Option> { match &self.body[expr_id] { Expr::OffsetOf(_) => { not_supported!("builtin#offset_of") @@ -537,7 +533,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { const_id.into(), current, place, - GenericArgs::new_from_iter(self.interner(), []), + GenericArgs::empty(self.interner()), expr_id.into(), )?; Ok(Some(current)) @@ -545,7 +541,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { ValueNs::EnumVariantId(variant_id) => { let variant_fields = variant_id.fields(self.db); if variant_fields.shape == FieldsShape::Unit { - let ty = self.infer.type_of_expr[expr_id]; + let ty = self.infer.expr_ty(expr_id); current = self.lower_enum_variant( variant_id, current, @@ -575,8 +571,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { konst: Const::new_param( self.interner(), ParamConst { id: p, index }, - ), - ty: self.db.const_param_ty_ns(p), + ) + .store(), + ty: self.db.const_param_ty_ns(p).store(), }, span: None, }), @@ -907,7 +904,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { operands[u32::from(field_id.into_raw()) as usize] = Some(op); } let rvalue = Rvalue::Aggregate( - AggregateKind::Adt(variant_id, subst), + AggregateKind::Adt(variant_id, subst.store()), match spread_place { Some(sp) => operands .into_iter() @@ -978,15 +975,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { let rvalue = if self.infer.coercion_casts.contains(expr) { Rvalue::Use(it) } else { - let source_ty = self.infer[*expr]; - let target_ty = self.infer[expr_id]; + let source_ty = self.infer.expr_ty(*expr); + let target_ty = self.infer.expr_ty(expr_id); let cast_kind = if source_ty.as_reference().is_some() { CastKind::PointerCoercion(PointerCast::ArrayToPointer) } else { cast_kind(self.db, source_ty, target_ty)? }; - Rvalue::Cast(cast_kind, it, target_ty) + Rvalue::Cast(cast_kind, it, target_ty.store()) }; self.push_assignment(current, place, rvalue, expr_id.into()); Ok(Some(current)) @@ -1004,7 +1001,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { self.push_assignment( current, place, - Rvalue::ShallowInitBoxWithAlloc(ty), + Rvalue::ShallowInitBoxWithAlloc(ty.store()), expr_id.into(), ); let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? @@ -1222,7 +1219,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { current, place, Rvalue::Aggregate( - AggregateKind::Adt(st.into(), subst), + AggregateKind::Adt(st.into(), subst.store()), st.fields(self.db) .fields() .iter() @@ -1284,11 +1281,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { }; match &capture.kind { CaptureKind::ByRef(bk) => { - let tmp_ty = capture.ty.instantiate_identity(); + let tmp_ty = capture.ty.get().instantiate_identity(); // FIXME: Handle more than one span. let capture_spans = capture.spans(); - let tmp: Place<'db> = - self.temp(tmp_ty, current, capture_spans[0])?.into(); + let tmp: Place = self.temp(tmp_ty, current, capture_spans[0])?.into(); self.push_assignment( current, tmp, @@ -1305,7 +1301,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { self.push_assignment( current, place, - Rvalue::Aggregate(AggregateKind::Closure(ty), operands.into()), + Rvalue::Aggregate(AggregateKind::Closure(ty.store()), operands.into()), expr_id.into(), ); Ok(Some(current)) @@ -1325,7 +1321,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { return Ok(None); }; let r = Rvalue::Aggregate( - AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id)), + AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id).store()), values, ); self.push_assignment(current, place, r, expr_id.into()); @@ -1355,7 +1351,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { else { return Ok(None); }; - let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty), values); + let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty.store()), values); self.push_assignment(current, place, r, expr_id.into()); Ok(Some(current)) } @@ -1373,7 +1369,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { )); } }; - let r = Rvalue::Repeat(init, len); + let r = Rvalue::Repeat(init, len.store()); self.push_assignment(current, place, r, expr_id.into()); Ok(Some(current)) } @@ -1388,11 +1384,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { } } - fn push_field_projection( - &mut self, - place: &mut Place<'db>, - expr_id: ExprId, - ) -> Result<'db, ()> { + fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<'db, ()> { if let Expr::Field { expr, name } = &self.body[expr_id] { if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind() { let index = @@ -1421,7 +1413,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { &mut self, ty: Ty<'db>, loc: &ExprId, - ) -> Result<'db, Operand<'db>> { + ) -> Result<'db, Operand> { match &self.body[*loc] { Expr::Literal(l) => self.lower_literal_to_operand(ty, l), Expr::Path(c) => { @@ -1443,7 +1435,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { ResolveValueResult::ValueNs(v, _) => { if let ValueNs::ConstId(c) = v { self.lower_const_to_operand( - GenericArgs::new_from_iter(self.interner(), []), + GenericArgs::empty(self.interner()), c.into(), ) } else { @@ -1461,10 +1453,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { } } - fn lower_literal_to_operand(&mut self, ty: Ty<'db>, l: &Literal) -> Result<'db, Operand<'db>> { + fn lower_literal_to_operand(&mut self, ty: Ty<'db>, l: &Literal) -> Result<'db, Operand> { let size = || { self.db - .layout_of_ty(ty, ParamEnvAndCrate { param_env: self.env, krate: self.krate() }) + .layout_of_ty( + ty.store(), + ParamEnvAndCrate { param_env: self.env, krate: self.krate() }.store(), + ) .map(|it| it.size.bytes_usize()) }; const USIZE_SIZE: usize = size_of::(); @@ -1512,15 +1507,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { Ok(Operand::from_concrete_const(bytes, MemoryMap::default(), ty)) } - fn new_basic_block(&mut self) -> BasicBlockId<'db> { + fn new_basic_block(&mut self) -> BasicBlockId { self.result.basic_blocks.alloc(BasicBlock::default()) } fn lower_const( &mut self, const_id: GeneralConstId, - prev_block: BasicBlockId<'db>, - place: Place<'db>, + prev_block: BasicBlockId, + place: Place, subst: GenericArgs<'db>, span: MirSpan, ) -> Result<'db, ()> { @@ -1533,8 +1528,8 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { &mut self, subst: GenericArgs<'db>, const_id: GeneralConstId, - ) -> Result<'db, Operand<'db>> { - let konst = if subst.len() != 0 { + ) -> Result<'db, Operand> { + let konst = if !subst.is_empty() { // We can't evaluate constant with substitution now, as generics are not monomorphized in lowering. Const::new_unevaluated( self.interner(), @@ -1564,13 +1559,16 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { }) .unwrap() .instantiate(self.interner(), subst); - Ok(Operand { kind: OperandKind::Constant { konst, ty }, span: None }) + Ok(Operand { + kind: OperandKind::Constant { konst: konst.store(), ty: ty.store() }, + span: None, + }) } fn write_bytes_to_place( &mut self, - prev_block: BasicBlockId<'db>, - place: Place<'db>, + prev_block: BasicBlockId, + place: Place, cv: Box<[u8]>, ty: Ty<'db>, span: MirSpan, @@ -1582,12 +1580,12 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_enum_variant( &mut self, variant_id: EnumVariantId, - prev_block: BasicBlockId<'db>, - place: Place<'db>, + prev_block: BasicBlockId, + place: Place, ty: Ty<'db>, - fields: Box<[Operand<'db>]>, + fields: Box<[Operand]>, span: MirSpan, - ) -> Result<'db, BasicBlockId<'db>> { + ) -> Result<'db, BasicBlockId> { let subst = match ty.kind() { TyKind::Adt(_, subst) => subst, _ => implementation_error!("Non ADT enum"), @@ -1595,7 +1593,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { self.push_assignment( prev_block, place, - Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst), fields), + Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst.store()), fields), span, ); Ok(prev_block) @@ -1603,13 +1601,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_call_and_args( &mut self, - func: Operand<'db>, + func: Operand, args: impl Iterator, - place: Place<'db>, - mut current: BasicBlockId<'db>, + place: Place, + mut current: BasicBlockId, is_uninhabited: bool, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let Some(args) = args .map(|arg| { if let Some((temp, c)) = self.lower_expr_to_some_operand(arg, current)? { @@ -1628,13 +1626,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_call( &mut self, - func: Operand<'db>, - args: Box<[Operand<'db>]>, - place: Place<'db>, - current: BasicBlockId<'db>, + func: Operand, + args: Box<[Operand]>, + place: Place, + current: BasicBlockId, is_uninhabited: bool, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let b = if is_uninhabited { None } else { Some(self.new_basic_block()) }; self.set_terminator( current, @@ -1651,25 +1649,20 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { Ok(b) } - fn is_unterminated(&mut self, source: BasicBlockId<'db>) -> bool { + fn is_unterminated(&mut self, source: BasicBlockId) -> bool { self.result.basic_blocks[source].terminator.is_none() } - fn set_terminator( - &mut self, - source: BasicBlockId<'db>, - terminator: TerminatorKind<'db>, - span: MirSpan, - ) { + fn set_terminator(&mut self, source: BasicBlockId, terminator: TerminatorKind, span: MirSpan) { self.result.basic_blocks[source].terminator = Some(Terminator { span, kind: terminator }); } - fn set_goto(&mut self, source: BasicBlockId<'db>, target: BasicBlockId<'db>, span: MirSpan) { + fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId, span: MirSpan) { self.set_terminator(source, TerminatorKind::Goto { target }, span); } fn expr_ty_without_adjust(&self, e: ExprId) -> Ty<'db> { - self.infer[e] + self.infer.expr_ty(e) } fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> { @@ -1677,36 +1670,36 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { if let Some(it) = self.infer.expr_adjustments.get(&e) && let Some(it) = it.last() { - ty = Some(it.target); + ty = Some(it.target.as_ref()); } ty.unwrap_or_else(|| self.expr_ty_without_adjust(e)) } - fn push_statement(&mut self, block: BasicBlockId<'db>, statement: Statement<'db>) { + fn push_statement(&mut self, block: BasicBlockId, statement: Statement) { self.result.basic_blocks[block].statements.push(statement); } - fn push_fake_read(&mut self, block: BasicBlockId<'db>, p: Place<'db>, span: MirSpan) { + fn push_fake_read(&mut self, block: BasicBlockId, p: Place, span: MirSpan) { self.push_statement(block, StatementKind::FakeRead(p).with_span(span)); } fn push_assignment( &mut self, - block: BasicBlockId<'db>, - place: Place<'db>, - rvalue: Rvalue<'db>, + block: BasicBlockId, + place: Place, + rvalue: Rvalue, span: MirSpan, ) { self.push_statement(block, StatementKind::Assign(place, rvalue).with_span(span)); } - fn discr_temp_place(&mut self, current: BasicBlockId<'db>) -> Place<'db> { + fn discr_temp_place(&mut self, current: BasicBlockId) -> Place { match &self.discr_temp { Some(it) => *it, None => { // FIXME: rustc's ty is dependent on the adt type, maybe we need to do that as well let discr_ty = Ty::new_int(self.interner(), rustc_type_ir::IntTy::I128); - let tmp: Place<'db> = self + let tmp: Place = self .temp(discr_ty, current, MirSpan::Unknown) .expect("discr_ty is never unsized") .into(); @@ -1718,12 +1711,12 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_loop( &mut self, - prev_block: BasicBlockId<'db>, - place: Place<'db>, + prev_block: BasicBlockId, + place: Place, label: Option, span: MirSpan, - f: impl FnOnce(&mut MirLowerCtx<'_, 'db>, BasicBlockId<'db>) -> Result<'db, ()>, - ) -> Result<'db, Option>> { + f: impl FnOnce(&mut MirLowerCtx<'_, 'db>, BasicBlockId) -> Result<'db, ()>, + ) -> Result<'db, Option> { let begin = self.new_basic_block(); let prev = self.current_loop_blocks.replace(LoopBlocks { begin, @@ -1758,10 +1751,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn merge_blocks( &mut self, - b1: Option>, - b2: Option>, + b1: Option, + b2: Option, span: MirSpan, - ) -> Option> { + ) -> Option { match (b1, b2) { (None, None) => None, (None, Some(b)) | (Some(b), None) => Some(b), @@ -1774,7 +1767,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { } } - fn current_loop_end(&mut self) -> Result<'db, BasicBlockId<'db>> { + fn current_loop_end(&mut self) -> Result<'db, BasicBlockId> { let r = match self .current_loop_blocks .as_mut() @@ -1801,7 +1794,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn is_uninhabited(&self, expr_id: ExprId) -> bool { is_ty_uninhabited_from( &self.infcx, - self.infer[expr_id], + self.infer.expr_ty(expr_id), self.owner.module(self.db), self.env, ) @@ -1809,15 +1802,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { /// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and /// `Drop` in the appropriated places. - fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId<'db>) -> Result<'db, ()> { + fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<'db, ()> { let l = self.binding_local(b)?; self.push_storage_live_for_local(l, current, MirSpan::BindingId(b)) } fn push_storage_live_for_local( &mut self, - l: LocalId<'db>, - current: BasicBlockId<'db>, + l: LocalId, + current: BasicBlockId, span: MirSpan, ) -> Result<'db, ()> { self.drop_scopes.last_mut().unwrap().locals.push(l); @@ -1828,11 +1821,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_block_to_place( &mut self, statements: &[hir_def::hir::Statement], - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, tail: Option, - place: Place<'db>, + place: Place, span: MirSpan, - ) -> Result<'db, Option>>> { + ) -> Result<'db, Option>> { let scope = self.push_drop_scope(); for statement in statements.iter() { match statement { @@ -1908,11 +1901,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { params: impl Iterator)> + Clone, self_binding: Option<(BindingId, Ty<'db>)>, pick_binding: impl Fn(BindingId) -> bool, - ) -> Result<'db, BasicBlockId<'db>> { + ) -> Result<'db, BasicBlockId> { let base_param_count = self.result.param_locals.len(); let self_binding = match self_binding { Some((self_binding, ty)) => { - let local_id = self.result.locals.alloc(Local { ty }); + let local_id = self.result.locals.alloc(Local { ty: ty.store() }); self.drop_scopes.last_mut().unwrap().locals.push(local_id); self.result.binding_locals.insert(self_binding, local_id); self.result.param_locals.push(local_id); @@ -1921,7 +1914,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { None => None, }; self.result.param_locals.extend(params.clone().map(|(it, ty)| { - let local_id = self.result.locals.alloc(Local { ty }); + let local_id = self.result.locals.alloc(Local { ty: ty.store() }); self.drop_scopes.last_mut().unwrap().locals.push(local_id); if let Pat::Bind { id, subpat: None } = self.body[it] && matches!( @@ -1939,9 +1932,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { continue; } if !self.result.binding_locals.contains_idx(id) { - self.result - .binding_locals - .insert(id, self.result.locals.alloc(Local { ty: self.infer[id] })); + self.result.binding_locals.insert( + id, + self.result.locals.alloc(Local { ty: self.infer.binding_ty(id).store() }), + ); } } let mut current = self.result.start_block; @@ -1976,7 +1970,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { Ok(current) } - fn binding_local(&self, b: BindingId) -> Result<'db, LocalId<'db>> { + fn binding_local(&self, b: BindingId) -> Result<'db, LocalId> { match self.result.binding_locals.get(b) { Some(it) => Ok(*it), None => { @@ -2025,9 +2019,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn drop_until_scope( &mut self, scope_index: usize, - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, span: MirSpan, - ) -> BasicBlockId<'db> { + ) -> BasicBlockId { for scope in self.drop_scopes[scope_index..].to_vec().iter().rev() { self.emit_drop_and_storage_dead_for_scope(scope, &mut current, span); } @@ -2047,9 +2041,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { /// Don't call directly fn pop_drop_scope_internal( &mut self, - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, span: MirSpan, - ) -> BasicBlockId<'db> { + ) -> BasicBlockId { let scope = self.drop_scopes.pop().unwrap(); self.emit_drop_and_storage_dead_for_scope(&scope, &mut current, span); current @@ -2057,9 +2051,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn pop_drop_scope_assert_finished( &mut self, - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, span: MirSpan, - ) -> Result<'db, BasicBlockId<'db>> { + ) -> Result<'db, BasicBlockId> { current = self.pop_drop_scope_internal(current, span); if !self.drop_scopes.is_empty() { implementation_error!("Mismatched count between drop scope push and pops"); @@ -2069,12 +2063,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn emit_drop_and_storage_dead_for_scope( &mut self, - scope: &DropScope<'db>, - current: &mut Idx>, + scope: &DropScope, + current: &mut Idx, span: MirSpan, ) { for &l in scope.locals.iter().rev() { - if !self.infcx.type_is_copy_modulo_regions(self.env, self.result.locals[l].ty) { + if !self.infcx.type_is_copy_modulo_regions(self.env, self.result.locals[l].ty.as_ref()) + { let prev = std::mem::replace(current, self.new_basic_block()); self.set_terminator( prev, @@ -2112,36 +2107,37 @@ fn cast_kind<'db>( pub fn mir_body_for_closure_query<'db>( db: &'db dyn HirDatabase, closure: InternedClosureId, -) -> Result<'db, Arc>> { +) -> Result<'db, Arc> { let InternedClosure(owner, expr) = db.lookup_intern_closure(closure); let body = db.body(owner); let infer = InferenceResult::for_body(db, owner); let Expr::Closure { args, body: root, .. } = &body[expr] else { implementation_error!("closure expression is not closure"); }; - let crate::next_solver::TyKind::Closure(_, substs) = infer[expr].kind() else { + let crate::next_solver::TyKind::Closure(_, substs) = infer.expr_ty(expr).kind() else { implementation_error!("closure expression is not closure"); }; let (captures, kind) = infer.closure_info(closure); let mut ctx = MirLowerCtx::new(db, owner, &body, infer); // 0 is return local - ctx.result.locals.alloc(Local { ty: infer[*root] }); + ctx.result.locals.alloc(Local { ty: infer.expr_ty(*root).store() }); let closure_local = ctx.result.locals.alloc(Local { ty: match kind { - FnTrait::FnOnce | FnTrait::AsyncFnOnce => infer[expr], + FnTrait::FnOnce | FnTrait::AsyncFnOnce => infer.expr_ty(expr), FnTrait::FnMut | FnTrait::AsyncFnMut => Ty::new_ref( ctx.interner(), Region::error(ctx.interner()), - infer[expr], + infer.expr_ty(expr), Mutability::Mut, ), FnTrait::Fn | FnTrait::AsyncFn => Ty::new_ref( ctx.interner(), Region::error(ctx.interner()), - infer[expr], + infer.expr_ty(expr), Mutability::Not, ), - }, + } + .store(), }); ctx.result.param_locals.push(closure_local); let Some(sig) = @@ -2160,8 +2156,7 @@ pub fn mir_body_for_closure_query<'db>( let current = ctx.pop_drop_scope_assert_finished(current, root.into())?; ctx.set_terminator(current, TerminatorKind::Return, (*root).into()); } - let mut upvar_map: FxHashMap, Vec<(&CapturedItem<'_>, usize)>> = - FxHashMap::default(); + let mut upvar_map: FxHashMap> = FxHashMap::default(); for (i, capture) in captures.iter().enumerate() { let local = ctx.binding_local(capture.place.local)?; upvar_map.entry(local).or_default().push((capture, i)); @@ -2226,7 +2221,7 @@ pub fn mir_body_for_closure_query<'db>( pub fn mir_body_query<'db>( db: &'db dyn HirDatabase, def: DefWithBodyId, -) -> Result<'db, Arc>> { +) -> Result<'db, Arc> { let krate = def.krate(db); let edition = krate.data(db).edition; let detail = match def { @@ -2263,7 +2258,7 @@ pub(crate) fn mir_body_cycle_result<'db>( _db: &'db dyn HirDatabase, _: salsa::Id, _def: DefWithBodyId, -) -> Result<'db, Arc>> { +) -> Result<'db, Arc> { Err(MirLowerError::Loop) } @@ -2271,17 +2266,17 @@ pub fn lower_to_mir<'db>( db: &'db dyn HirDatabase, owner: DefWithBodyId, body: &Body, - infer: &InferenceResult<'db>, + infer: &InferenceResult, // FIXME: root_expr should always be the body.body_expr, but since `X` in `[(); X]` doesn't have its own specific body yet, we // need to take this input explicitly. root_expr: ExprId, -) -> Result<'db, MirBody<'db>> { +) -> Result<'db, MirBody> { if infer.type_mismatches().next().is_some() || infer.is_erroneous() { return Err(MirLowerError::HasErrors); } let mut ctx = MirLowerCtx::new(db, owner, body, infer); // 0 is return local - ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) }); + ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr).store() }); let binding_picker = |b: BindingId| { let owner = ctx.body.binding_owner(b); if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) } diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs index 40c6c5de795a..cf05ec27ac37 100644 --- a/crates/hir-ty/src/mir/lower/as_place.rs +++ b/crates/hir-ty/src/mir/lower/as_place.rs @@ -20,8 +20,8 @@ impl<'db> MirLowerCtx<'_, 'db> { fn lower_expr_to_some_place_without_adjust( &mut self, expr_id: ExprId, - prev_block: BasicBlockId<'db>, - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + prev_block: BasicBlockId, + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let ty = self.expr_ty_without_adjust(expr_id); let place = self.temp(ty, prev_block, expr_id.into())?; let Some(current) = @@ -35,12 +35,12 @@ impl<'db> MirLowerCtx<'_, 'db> { fn lower_expr_to_some_place_with_adjust( &mut self, expr_id: ExprId, - prev_block: BasicBlockId<'db>, - adjustments: &[Adjustment<'db>], - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + prev_block: BasicBlockId, + adjustments: &[Adjustment], + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let ty = adjustments .last() - .map(|it| it.target) + .map(|it| it.target.as_ref()) .unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)); let place = self.temp(ty, prev_block, expr_id.into())?; let Some(current) = @@ -53,11 +53,11 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn lower_expr_as_place_with_adjust( &mut self, - current: BasicBlockId<'db>, + current: BasicBlockId, expr_id: ExprId, upgrade_rvalue: bool, - adjustments: &[Adjustment<'db>], - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + adjustments: &[Adjustment], + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| { if !upgrade_rvalue { return Err(MirLowerError::MutatingRvalue); @@ -93,9 +93,9 @@ impl<'db> MirLowerCtx<'_, 'db> { current, r, rest.last() - .map(|it| it.target) + .map(|it| it.target.as_ref()) .unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)), - last.target, + last.target.as_ref(), expr_id.into(), match od.0 { Some(Mutability::Mut) => true, @@ -115,10 +115,10 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn lower_expr_as_place( &mut self, - current: BasicBlockId<'db>, + current: BasicBlockId, expr_id: ExprId, upgrade_rvalue: bool, - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + ) -> Result<'db, Option<(Place, BasicBlockId)>> { match self.infer.expr_adjustments.get(&expr_id) { Some(a) => self.lower_expr_as_place_with_adjust(current, expr_id, upgrade_rvalue, a), None => self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue), @@ -127,10 +127,10 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn lower_expr_as_place_without_adjust( &mut self, - current: BasicBlockId<'db>, + current: BasicBlockId, expr_id: ExprId, upgrade_rvalue: bool, - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| { if !upgrade_rvalue { return Err(MirLowerError::MutatingRvalue); @@ -159,7 +159,7 @@ impl<'db> MirLowerCtx<'_, 'db> { ty, Mutability::Not, ); - let temp: Place<'db> = self.temp(ref_ty, current, expr_id.into())?.into(); + let temp: Place = self.temp(ref_ty, current, expr_id.into())?.into(); self.push_assignment( current, temp, @@ -279,21 +279,21 @@ impl<'db> MirLowerCtx<'_, 'db> { fn lower_overloaded_index( &mut self, - current: BasicBlockId<'db>, - place: Place<'db>, + current: BasicBlockId, + place: Place, base_ty: Ty<'db>, result_ty: Ty<'db>, - index_operand: Operand<'db>, + index_operand: Operand, span: MirSpan, index_fn: (FunctionId, GenericArgs<'db>), - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let mutability = match base_ty.as_reference() { Some((_, _, mutability)) => mutability, None => Mutability::Not, }; let result_ref = Ty::new_ref(self.interner(), Region::error(self.interner()), result_ty, mutability); - let mut result: Place<'db> = self.temp(result_ref, current, span)?.into(); + let mut result: Place = self.temp(result_ref, current, span)?.into(); let index_fn_op = Operand::const_zst(Ty::new_fn_def( self.interner(), CallableDefId::FunctionId(index_fn.0).into(), @@ -316,13 +316,13 @@ impl<'db> MirLowerCtx<'_, 'db> { fn lower_overloaded_deref( &mut self, - current: BasicBlockId<'db>, - place: Place<'db>, + current: BasicBlockId, + place: Place, source_ty: Ty<'db>, target_ty: Ty<'db>, span: MirSpan, mutability: bool, - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let lang_items = self.lang_items(); let (mutability, trait_lang_item, trait_method_name, borrow_kind) = if !mutability { ( @@ -342,7 +342,7 @@ impl<'db> MirLowerCtx<'_, 'db> { let error_region = Region::error(self.interner()); let ty_ref = Ty::new_ref(self.interner(), error_region, source_ty, mutability); let target_ty_ref = Ty::new_ref(self.interner(), error_region, target_ty, mutability); - let ref_place: Place<'db> = self.temp(ty_ref, current, span)?.into(); + let ref_place: Place = self.temp(ty_ref, current, span)?.into(); self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span); let deref_trait = trait_lang_item.ok_or(MirLowerError::LangItemNotFound)?; let deref_fn = deref_trait @@ -352,9 +352,9 @@ impl<'db> MirLowerCtx<'_, 'db> { let deref_fn_op = Operand::const_zst(Ty::new_fn_def( self.interner(), CallableDefId::FunctionId(deref_fn).into(), - GenericArgs::new_from_iter(self.interner(), [source_ty.into()]), + GenericArgs::new_from_slice(&[source_ty.into()]), )); - let mut result: Place<'db> = self.temp(target_ty_ref, current, span)?.into(); + let mut result: Place = self.temp(target_ty_ref, current, span)?.into(); let Some(current) = self.lower_call( deref_fn_op, Box::new([Operand { kind: OperandKind::Copy(ref_place), span: None }]), diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs index c3a4814a3ab4..a8aacbff16fa 100644 --- a/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -1,7 +1,7 @@ //! MIR lowering for patterns use hir_def::{hir::ExprId, signatures::VariantFields}; -use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{IntoKind, Ty as _}; use crate::{ BindingMode, @@ -63,11 +63,11 @@ impl<'db> MirLowerCtx<'_, 'db> { /// so it should be an empty block. pub(super) fn pattern_match( &mut self, - current: BasicBlockId<'db>, - current_else: Option>, - cond_place: Place<'db>, + current: BasicBlockId, + current_else: Option, + cond_place: Place, pattern: PatId, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { let (current, current_else) = self.pattern_match_inner( current, current_else, @@ -87,10 +87,10 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn pattern_match_assignment( &mut self, - current: BasicBlockId<'db>, - value: Place<'db>, + current: BasicBlockId, + value: Place, pattern: PatId, - ) -> Result<'db, BasicBlockId<'db>> { + ) -> Result<'db, BasicBlockId> { let (current, _) = self.pattern_match_inner(current, None, value, pattern, MatchingMode::Assign)?; Ok(current) @@ -99,9 +99,9 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn match_self_param( &mut self, id: BindingId, - current: BasicBlockId<'db>, - local: LocalId<'db>, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + current: BasicBlockId, + local: LocalId, + ) -> Result<'db, (BasicBlockId, Option)> { self.pattern_match_binding( id, BindingMode::Move, @@ -114,12 +114,12 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_match_inner( &mut self, - mut current: BasicBlockId<'db>, - mut current_else: Option>, - mut cond_place: Place<'db>, + mut current: BasicBlockId, + mut current_else: Option, + mut cond_place: Place, pattern: PatId, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { let cnt = self.infer.pat_adjustments.get(&pattern).map(|x| x.len()).unwrap_or_default(); cond_place.projection = self.result.projection_store.intern( cond_place @@ -135,7 +135,7 @@ impl<'db> MirLowerCtx<'_, 'db> { Pat::Missing => return Err(MirLowerError::IncompletePattern), Pat::Wild => (current, current_else), Pat::Tuple { args, ellipsis } => { - let subst = match self.infer[pattern].kind() { + let subst = match self.infer.pat_ty(pattern).kind() { TyKind::Tuple(s) => s, _ => { return Err(MirLowerError::TypeError( @@ -209,10 +209,11 @@ impl<'db> MirLowerCtx<'_, 'db> { } Pat::Range { start, end, range_type: _ } => { let mut add_check = |l: &ExprId, binop| -> Result<'db, ()> { - let lv = self.lower_literal_or_const_to_operand(self.infer[pattern], l)?; + let lv = + self.lower_literal_or_const_to_operand(self.infer.pat_ty(pattern), l)?; let else_target = *current_else.get_or_insert_with(|| self.new_basic_block()); let next = self.new_basic_block(); - let discr: Place<'db> = + let discr: Place = self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into(); self.push_assignment( current, @@ -249,9 +250,9 @@ impl<'db> MirLowerCtx<'_, 'db> { Pat::Slice { prefix, slice, suffix } => { if mode == MatchingMode::Check { // emit runtime length check for slice - if let TyKind::Slice(_) = self.infer[pattern].kind() { + if let TyKind::Slice(_) = self.infer.pat_ty(pattern).kind() { let pattern_len = prefix.len() + suffix.len(); - let place_len: Place<'db> = self + let place_len: Place = self .temp(Ty::new_usize(self.interner()), current, pattern.into())? .into(); self.push_assignment( @@ -285,7 +286,7 @@ impl<'db> MirLowerCtx<'_, 'db> { MemoryMap::default(), Ty::new_usize(self.interner()), ); - let discr: Place<'db> = self + let discr: Place = self .temp(Ty::new_bool(self.interner()), current, pattern.into())? .into(); self.push_assignment( @@ -398,15 +399,15 @@ impl<'db> MirLowerCtx<'_, 'db> { break 'b (c, x.1); } if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr { - break 'b (c, GenericArgs::new_from_iter(self.interner(), [])); + break 'b (c, GenericArgs::empty(self.interner())); } not_supported!("path in pattern position that is not const or variant") }; - let tmp: Place<'db> = - self.temp(self.infer[pattern], current, pattern.into())?.into(); + let tmp: Place = + self.temp(self.infer.pat_ty(pattern), current, pattern.into())?.into(); let span = pattern.into(); self.lower_const(c.into(), current, tmp, subst, span)?; - let tmp2: Place<'db> = + let tmp2: Place = self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into(); self.push_assignment( current, @@ -434,7 +435,7 @@ impl<'db> MirLowerCtx<'_, 'db> { Pat::Lit(l) => match &self.body[*l] { Expr::Literal(l) => { if mode == MatchingMode::Check { - let c = self.lower_literal_to_operand(self.infer[pattern], l)?; + let c = self.lower_literal_to_operand(self.infer.pat_ty(pattern), l)?; self.pattern_match_const(current_else, current, c, cond_place, pattern)? } else { (current, current_else) @@ -506,11 +507,11 @@ impl<'db> MirLowerCtx<'_, 'db> { &mut self, id: BindingId, mode: BindingMode, - cond_place: Place<'db>, + cond_place: Place, span: MirSpan, - current: BasicBlockId<'db>, - current_else: Option>, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + current: BasicBlockId, + current_else: Option, + ) -> Result<'db, (BasicBlockId, Option)> { let target_place = self.binding_local(id)?; self.push_storage_live(id, current)?; self.push_match_assignment(current, target_place, mode, cond_place, span); @@ -519,10 +520,10 @@ impl<'db> MirLowerCtx<'_, 'db> { fn push_match_assignment( &mut self, - current: BasicBlockId<'db>, - target_place: LocalId<'db>, + current: BasicBlockId, + target_place: LocalId, mode: BindingMode, - cond_place: Place<'db>, + cond_place: Place, span: MirSpan, ) { self.push_assignment( @@ -545,15 +546,15 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_match_const( &mut self, - current_else: Option>, - current: BasicBlockId<'db>, - c: Operand<'db>, - cond_place: Place<'db>, + current_else: Option, + current: BasicBlockId, + c: Operand, + cond_place: Place, pattern: Idx, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { let then_target = self.new_basic_block(); let else_target = current_else.unwrap_or_else(|| self.new_basic_block()); - let discr: Place<'db> = + let discr: Place = self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into(); self.push_assignment( current, @@ -579,14 +580,14 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_matching_variant( &mut self, - cond_place: Place<'db>, + cond_place: Place, variant: VariantId, - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, span: MirSpan, - mut current_else: Option>, + mut current_else: Option, shape: AdtPatternShape<'_>, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { Ok(match variant { VariantId::EnumVariantId(v) => { if mode == MatchingMode::Check { @@ -635,11 +636,11 @@ impl<'db> MirLowerCtx<'_, 'db> { shape: AdtPatternShape<'_>, variant_data: &VariantFields, v: VariantId, - current: BasicBlockId<'db>, - current_else: Option>, - cond_place: &Place<'db>, + current: BasicBlockId, + current_else: Option, + cond_place: &Place, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { Ok(match shape { AdtPatternShape::Record { args } => { let it = args @@ -678,12 +679,12 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_match_adt( &mut self, - mut current: BasicBlockId<'db>, - mut current_else: Option>, - args: impl Iterator, PatId)>, - cond_place: &Place<'db>, + mut current: BasicBlockId, + mut current_else: Option, + args: impl Iterator, + cond_place: &Place, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { for (proj, arg) in args { let cond_place = cond_place.project(proj, &mut self.result.projection_store); (current, current_else) = @@ -694,14 +695,14 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_match_tuple_like( &mut self, - current: BasicBlockId<'db>, - current_else: Option>, + current: BasicBlockId, + current_else: Option, args: &[PatId], ellipsis: Option, - fields: impl DoubleEndedIterator> + Clone, - cond_place: &Place<'db>, + fields: impl DoubleEndedIterator + Clone, + cond_place: &Place, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); let it = al .iter() diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs index beb0003c31ca..5752a3d7fae4 100644 --- a/crates/hir-ty/src/mir/monomorphization.rs +++ b/crates/hir-ty/src/mir/monomorphization.rs @@ -8,7 +8,7 @@ //! So the monomorphization should be called even if the substitution is empty. use hir_def::DefWithBodyId; -use rustc_type_ir::inherent::{IntoKind, SliceLike}; +use rustc_type_ir::inherent::IntoKind; use rustc_type_ir::{ FallibleTypeFolder, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeVisitableExt, }; @@ -16,7 +16,8 @@ use triomphe::Arc; use crate::{ ParamEnvAndCrate, - next_solver::{Const, ConstKind, Region, RegionKind}, + next_solver::{Const, ConstKind, Region, RegionKind, StoredConst, StoredGenericArgs, StoredTy}, + traits::StoredParamEnvAndCrate, }; use crate::{ db::{HirDatabase, InternedClosureId}, @@ -37,7 +38,7 @@ struct Filler<'db> { } impl<'db> FallibleTypeFolder> for Filler<'db> { - type Error = MirLowerError<'db>; + type Error = MirLowerError; fn cx(&self) -> DbInterner<'db> { self.infcx.interner @@ -69,7 +70,7 @@ impl<'db> FallibleTypeFolder> for Filler<'db> { .get(param.index as usize) .and_then(|arg| arg.ty()) .ok_or_else(|| { - MirLowerError::GenericArgNotProvided(param.id.into(), self.subst) + MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()) })?), _ => ty.try_super_fold_with(self), } @@ -79,22 +80,18 @@ impl<'db> FallibleTypeFolder> for Filler<'db> { let ConstKind::Param(param) = ct.kind() else { return ct.try_super_fold_with(self); }; - self.subst - .as_slice() - .get(param.index as usize) - .and_then(|arg| arg.konst()) - .ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst)) + self.subst.as_slice().get(param.index as usize).and_then(|arg| arg.konst()).ok_or_else( + || MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()), + ) } fn try_fold_region(&mut self, region: Region<'db>) -> Result, Self::Error> { let RegionKind::ReEarlyParam(param) = region.kind() else { return Ok(region); }; - self.subst - .as_slice() - .get(param.index as usize) - .and_then(|arg| arg.region()) - .ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst)) + self.subst.as_slice().get(param.index as usize).and_then(|arg| arg.region()).ok_or_else( + || MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()), + ) } } @@ -105,33 +102,50 @@ impl<'db> Filler<'db> { Self { infcx, trait_env: env, subst } } - fn fill> + Copy>( - &mut self, - t: &mut T, - ) -> Result<(), MirLowerError<'db>> { + fn fill_ty(&mut self, t: &mut StoredTy) -> Result<(), MirLowerError> { // Can't deep normalized as that'll try to normalize consts and fail. - *t = t.try_fold_with(self)?; - if references_non_lt_error(t) { + *t = t.as_ref().try_fold_with(self)?.store(); + if references_non_lt_error(&t.as_ref()) { Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned())) } else { Ok(()) } } - fn fill_operand(&mut self, op: &mut Operand<'db>) -> Result<(), MirLowerError<'db>> { + fn fill_const(&mut self, t: &mut StoredConst) -> Result<(), MirLowerError> { + // Can't deep normalized as that'll try to normalize consts and fail. + *t = t.as_ref().try_fold_with(self)?.store(); + if references_non_lt_error(&t.as_ref()) { + Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned())) + } else { + Ok(()) + } + } + + fn fill_args(&mut self, t: &mut StoredGenericArgs) -> Result<(), MirLowerError> { + // Can't deep normalized as that'll try to normalize consts and fail. + *t = t.as_ref().try_fold_with(self)?.store(); + if references_non_lt_error(&t.as_ref()) { + Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned())) + } else { + Ok(()) + } + } + + fn fill_operand(&mut self, op: &mut Operand) -> Result<(), MirLowerError> { match &mut op.kind { OperandKind::Constant { konst, ty } => { - self.fill(konst)?; - self.fill(ty)?; + self.fill_const(konst)?; + self.fill_ty(ty)?; } OperandKind::Copy(_) | OperandKind::Move(_) | OperandKind::Static(_) => (), } Ok(()) } - fn fill_body(&mut self, body: &mut MirBody<'db>) -> Result<(), MirLowerError<'db>> { + fn fill_body(&mut self, body: &mut MirBody) -> Result<(), MirLowerError> { for (_, l) in body.locals.iter_mut() { - self.fill(&mut l.ty)?; + self.fill_ty(&mut l.ty)?; } for (_, bb) in body.basic_blocks.iter_mut() { for statement in &mut bb.statements { @@ -144,20 +158,20 @@ impl<'db> Filler<'db> { match ak { super::AggregateKind::Array(ty) | super::AggregateKind::Tuple(ty) - | super::AggregateKind::Closure(ty) => self.fill(ty)?, - super::AggregateKind::Adt(_, subst) => self.fill(subst)?, + | super::AggregateKind::Closure(ty) => self.fill_ty(ty)?, + super::AggregateKind::Adt(_, subst) => self.fill_args(subst)?, super::AggregateKind::Union(_, _) => (), } } Rvalue::ShallowInitBox(_, ty) | Rvalue::ShallowInitBoxWithAlloc(ty) => { - self.fill(ty)?; + self.fill_ty(ty)?; } Rvalue::Use(op) => { self.fill_operand(op)?; } Rvalue::Repeat(op, len) => { self.fill_operand(op)?; - self.fill(len)?; + self.fill_const(len)?; } Rvalue::Ref(_, _) | Rvalue::Len(_) @@ -208,36 +222,36 @@ impl<'db> Filler<'db> { } } -pub fn monomorphized_mir_body_query<'db>( - db: &'db dyn HirDatabase, +pub fn monomorphized_mir_body_query( + db: &dyn HirDatabase, owner: DefWithBodyId, - subst: GenericArgs<'db>, - trait_env: ParamEnvAndCrate<'db>, -) -> Result>, MirLowerError<'db>> { - let mut filler = Filler::new(db, trait_env, subst); + subst: StoredGenericArgs, + trait_env: StoredParamEnvAndCrate, +) -> Result, MirLowerError> { + let mut filler = Filler::new(db, trait_env.as_ref(), subst.as_ref()); let body = db.mir_body(owner)?; let mut body = (*body).clone(); filler.fill_body(&mut body)?; Ok(Arc::new(body)) } -pub(crate) fn monomorphized_mir_body_cycle_result<'db>( - _db: &'db dyn HirDatabase, +pub(crate) fn monomorphized_mir_body_cycle_result( + _db: &dyn HirDatabase, _: salsa::Id, _: DefWithBodyId, - _: GenericArgs<'db>, - _: ParamEnvAndCrate<'db>, -) -> Result>, MirLowerError<'db>> { + _: StoredGenericArgs, + _: StoredParamEnvAndCrate, +) -> Result, MirLowerError> { Err(MirLowerError::Loop) } -pub fn monomorphized_mir_body_for_closure_query<'db>( - db: &'db dyn HirDatabase, +pub fn monomorphized_mir_body_for_closure_query( + db: &dyn HirDatabase, closure: InternedClosureId, - subst: GenericArgs<'db>, - trait_env: ParamEnvAndCrate<'db>, -) -> Result>, MirLowerError<'db>> { - let mut filler = Filler::new(db, trait_env, subst); + subst: StoredGenericArgs, + trait_env: StoredParamEnvAndCrate, +) -> Result, MirLowerError> { + let mut filler = Filler::new(db, trait_env.as_ref(), subst.as_ref()); let body = db.mir_body_for_closure(closure)?; let mut body = (*body).clone(); filler.fill_body(&mut body)?; diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs index 0c5a64935e49..96b90a3f4074 100644 --- a/crates/hir-ty/src/mir/pretty.rs +++ b/crates/hir-ty/src/mir/pretty.rs @@ -36,8 +36,8 @@ macro_rules! wln { }; } -impl<'db> MirBody<'db> { - pub fn pretty_print(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> String { +impl MirBody { + pub fn pretty_print(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> String { let hir_body = db.body(self.owner); let mut ctx = MirPrettyCtx::new(self, &hir_body, db, display_target); ctx.for_body(|this| match ctx.body.owner { @@ -80,7 +80,7 @@ impl<'db> MirBody<'db> { // String with lines is rendered poorly in `dbg` macros, which I use very much, so this // function exists to solve that. - pub fn dbg(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> impl Debug { + pub fn dbg(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> impl Debug { struct StringDbg(String); impl Debug for StringDbg { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -92,12 +92,12 @@ impl<'db> MirBody<'db> { } struct MirPrettyCtx<'a, 'db> { - body: &'a MirBody<'db>, + body: &'a MirBody, hir_body: &'a Body, db: &'db dyn HirDatabase, result: String, indent: String, - local_to_binding: ArenaMap, BindingId>, + local_to_binding: ArenaMap, display_target: DisplayTarget, } @@ -113,12 +113,12 @@ impl Write for MirPrettyCtx<'_, '_> { } } -enum LocalName<'db> { - Unknown(LocalId<'db>), - Binding(Name, LocalId<'db>), +enum LocalName { + Unknown(LocalId), + Binding(Name, LocalId), } -impl<'db> HirDisplay<'db> for LocalName<'db> { +impl<'db> HirDisplay<'db> for LocalName { fn hir_fmt( &self, f: &mut crate::display::HirFormatter<'_, 'db>, @@ -179,7 +179,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { } fn new( - body: &'a MirBody<'db>, + body: &'a MirBody, hir_body: &'a Body, db: &'db dyn HirDatabase, display_target: DisplayTarget, @@ -211,19 +211,19 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { self, "let {}: {};", self.local_name(id).display_test(self.db, self.display_target), - self.hir_display(&local.ty) + self.hir_display(&local.ty.as_ref()) ); } } - fn local_name(&self, local: LocalId<'db>) -> LocalName<'db> { + fn local_name(&self, local: LocalId) -> LocalName { match self.local_to_binding.get(local) { Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local), None => LocalName::Unknown(local), } } - fn basic_block_id(&self, basic_block_id: BasicBlockId<'db>) -> String { + fn basic_block_id(&self, basic_block_id: BasicBlockId) -> String { format!("'bb{}", u32::from(basic_block_id.into_raw())) } @@ -311,12 +311,8 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { } } - fn place(&mut self, p: &Place<'db>) { - fn f<'db>( - this: &mut MirPrettyCtx<'_, 'db>, - local: LocalId<'db>, - projections: &[PlaceElem<'db>], - ) { + fn place(&mut self, p: &Place) { + fn f<'db>(this: &mut MirPrettyCtx<'_, 'db>, local: LocalId, projections: &[PlaceElem]) { let Some((last, head)) = projections.split_last() else { // no projection w!(this, "{}", this.local_name(local).display_test(this.db, this.display_target)); @@ -376,19 +372,21 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { f(self, p.local, p.projection.lookup(&self.body.projection_store)); } - fn operand(&mut self, r: &Operand<'db>) { + fn operand(&mut self, r: &Operand) { match &r.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { // MIR at the time of writing doesn't have difference between move and copy, so we show them // equally. Feel free to change it. self.place(p); } - OperandKind::Constant { konst, .. } => w!(self, "Const({})", self.hir_display(konst)), + OperandKind::Constant { konst, .. } => { + w!(self, "Const({})", self.hir_display(&konst.as_ref())) + } OperandKind::Static(s) => w!(self, "Static({:?})", s), } } - fn rvalue(&mut self, r: &Rvalue<'db>) { + fn rvalue(&mut self, r: &Rvalue) { match r { Rvalue::Use(op) => self.operand(op), Rvalue::Ref(r, p) => { @@ -415,7 +413,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { Rvalue::Repeat(op, len) => { w!(self, "["); self.operand(op); - w!(self, "; {}]", len.display_test(self.db, self.display_target)); + w!(self, "; {}]", len.as_ref().display_test(self.db, self.display_target)); } Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => { w!(self, "Adt("); @@ -440,7 +438,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { Rvalue::Cast(ck, op, ty) => { w!(self, "Cast({ck:?}, "); self.operand(op); - w!(self, ", {})", self.hir_display(ty)); + w!(self, ", {})", self.hir_display(&ty.as_ref())); } Rvalue::CheckedBinaryOp(b, o1, o2) => { self.operand(o1); @@ -478,7 +476,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { } } - fn operand_list(&mut self, it: &[Operand<'db>]) { + fn operand_list(&mut self, it: &[Operand]) { let mut it = it.iter(); if let Some(first) = it.next() { self.operand(first); diff --git a/crates/hir-ty/src/next_solver.rs b/crates/hir-ty/src/next_solver.rs index 8c52a847d1e9..539d09a8f5df 100644 --- a/crates/hir-ty/src/next_solver.rs +++ b/crates/hir-ty/src/next_solver.rs @@ -1,6 +1,7 @@ //! Things relevant to the next trait solver. pub mod abi; +mod binder; mod consts; mod def_id; pub mod fold; @@ -21,6 +22,7 @@ mod structural_normalize; mod ty; pub mod util; +pub use binder::*; pub use consts::*; pub use def_id::*; pub use generic_arg::*; diff --git a/crates/hir-ty/src/next_solver/binder.rs b/crates/hir-ty/src/next_solver/binder.rs new file mode 100644 index 000000000000..3645f8096cfd --- /dev/null +++ b/crates/hir-ty/src/next_solver/binder.rs @@ -0,0 +1,83 @@ +use crate::{ + FnAbi, + next_solver::{ + Binder, Clauses, EarlyBinder, FnSig, PolyFnSig, StoredBoundVarKinds, StoredClauses, + StoredTy, StoredTys, Ty, abi::Safety, + }, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct StoredEarlyBinder(T); + +impl StoredEarlyBinder { + #[inline] + pub fn bind(value: T) -> Self { + Self(value) + } + + #[inline] + pub fn skip_binder(self) -> T { + self.0 + } + + #[inline] + pub fn as_ref(&self) -> StoredEarlyBinder<&T> { + StoredEarlyBinder(&self.0) + } + + #[inline] + pub fn get_with<'db, 'a, R>(&'a self, f: impl FnOnce(&'a T) -> R) -> EarlyBinder<'db, R> { + EarlyBinder::bind(f(&self.0)) + } +} + +impl StoredEarlyBinder { + #[inline] + pub fn get<'db>(&self) -> EarlyBinder<'db, Ty<'db>> { + self.get_with(|it| it.as_ref()) + } +} + +impl StoredEarlyBinder { + #[inline] + pub fn get<'db>(&self) -> EarlyBinder<'db, Clauses<'db>> { + self.get_with(|it| it.as_ref()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct StoredPolyFnSig { + bound_vars: StoredBoundVarKinds, + inputs_and_output: StoredTys, + c_variadic: bool, + safety: Safety, + abi: FnAbi, +} + +impl StoredPolyFnSig { + #[inline] + pub fn new(sig: PolyFnSig<'_>) -> Self { + let bound_vars = sig.bound_vars().store(); + let sig = sig.skip_binder(); + Self { + bound_vars, + inputs_and_output: sig.inputs_and_output.store(), + c_variadic: sig.c_variadic, + safety: sig.safety, + abi: sig.abi, + } + } + + #[inline] + pub fn get(&self) -> PolyFnSig<'_> { + Binder::bind_with_vars( + FnSig { + inputs_and_output: self.inputs_and_output.as_ref(), + c_variadic: self.c_variadic, + safety: self.safety, + abi: self.abi, + }, + self.bound_vars.as_ref(), + ) + } +} diff --git a/crates/hir-ty/src/next_solver/consts.rs b/crates/hir-ty/src/next_solver/consts.rs index 3a32cf8b1fef..8417e145a46d 100644 --- a/crates/hir-ty/src/next_solver/consts.rs +++ b/crates/hir-ty/src/next_solver/consts.rs @@ -3,19 +3,20 @@ use std::hash::Hash; use hir_def::ConstParamId; -use macros::{TypeFoldable, TypeVisitable}; +use intern::{Interned, InternedRef, impl_internable}; +use macros::{GenericTypeVisitable, TypeFoldable, TypeVisitable}; use rustc_ast_ir::visit::VisitorResult; use rustc_type_ir::{ - BoundVar, BoundVarIndexKind, ConstVid, DebruijnIndex, FlagComputation, Flags, InferConst, - TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, - WithCachedTypeInfo, + BoundVar, BoundVarIndexKind, ConstVid, DebruijnIndex, FlagComputation, Flags, + GenericTypeVisitable, InferConst, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, + TypeVisitable, TypeVisitableExt, WithCachedTypeInfo, inherent::{IntoKind, ParamEnv as _, PlaceholderLike, SliceLike}, relate::Relate, }; use crate::{ MemoryMap, - next_solver::{ClauseKind, ParamEnv, interner::InternedWrapperNoDebug}, + next_solver::{ClauseKind, ParamEnv, impl_stored_interned}, }; use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder, Ty}; @@ -23,30 +24,43 @@ use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder, pub type ConstKind<'db> = rustc_type_ir::ConstKind>; pub type UnevaluatedConst<'db> = rustc_type_ir::UnevaluatedConst>; -#[salsa::interned(constructor = new_, unsafe(non_update_types))] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Const<'db> { - #[returns(ref)] - kind_: InternedWrapperNoDebug>>, + pub(super) interned: InternedRef<'db, ConstInterned>, } +#[derive(PartialEq, Eq, Hash)] +#[repr(align(4))] // Required for `GenericArg` bit-tagging. +pub(super) struct ConstInterned(WithCachedTypeInfo>); + +impl_internable!(gc; ConstInterned); +impl_stored_interned!(ConstInterned, Const, StoredConst); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; + impl<'db> Const<'db> { - pub fn new(interner: DbInterner<'db>, kind: ConstKind<'db>) -> Self { + pub fn new(_interner: DbInterner<'db>, kind: ConstKind<'db>) -> Self { + let kind = unsafe { std::mem::transmute::, ConstKind<'static>>(kind) }; let flags = FlagComputation::for_const_kind(&kind); let cached = WithCachedTypeInfo { internee: kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; - Const::new_(interner.db(), InternedWrapperNoDebug(cached)) + Self { interned: Interned::new_gc(ConstInterned(cached)) } } pub fn inner(&self) -> &WithCachedTypeInfo> { - crate::with_attached_db(|db| { - let inner = &self.kind_(db).0; - // SAFETY: The caller already has access to a `Const<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { + std::mem::transmute::< + &WithCachedTypeInfo>, + &WithCachedTypeInfo>, + >(inner) + } } pub fn error(interner: DbInterner<'db>) -> Self { @@ -106,12 +120,6 @@ impl<'db> std::fmt::Debug for Const<'db> { } } -impl<'db> std::fmt::Debug for InternedWrapperNoDebug>> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.internee.fmt(f) - } -} - pub type PlaceholderConst = Placeholder; #[derive(Copy, Clone, Hash, Eq, PartialEq)] @@ -164,7 +172,9 @@ impl ParamConst { /// A type-level constant value. /// /// Represents a typed, fully evaluated constant. -#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, TypeFoldable, TypeVisitable)] +#[derive( + Debug, Copy, Clone, Eq, PartialEq, Hash, TypeFoldable, TypeVisitable, GenericTypeVisitable, +)] pub struct ValueConst<'db> { pub ty: Ty<'db>, // FIXME: Should we ignore this for TypeVisitable, TypeFoldable? @@ -190,7 +200,7 @@ impl<'db> rustc_type_ir::inherent::ValueConst> for ValueConst<'d } } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, GenericTypeVisitable)] pub struct ConstBytes<'db> { pub memory: Box<[u8]>, pub memory_map: MemoryMap<'db>, @@ -202,31 +212,53 @@ impl Hash for ConstBytes<'_> { } } -#[salsa::interned(constructor = new_, debug, unsafe(non_update_types))] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Valtree<'db> { - #[returns(ref)] - bytes_: ConstBytes<'db>, + interned: InternedRef<'db, ValtreeInterned>, +} + +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Valtree<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + visitor.on_interned(self.interned); + self.inner().generic_visit_with(visitor); + } } +#[derive(Debug, PartialEq, Eq, Hash)] +struct ValtreeInterned { + bytes: ConstBytes<'static>, +} + +impl_internable!(gc; ValtreeInterned); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; + impl<'db> Valtree<'db> { + #[inline] pub fn new(bytes: ConstBytes<'db>) -> Self { - crate::with_attached_db(|db| unsafe { - // SAFETY: ¯\_(ツ)_/¯ - std::mem::transmute(Valtree::new_(db, bytes)) - }) + let bytes = unsafe { std::mem::transmute::, ConstBytes<'static>>(bytes) }; + Self { interned: Interned::new_gc(ValtreeInterned { bytes }) } } + #[inline] pub fn inner(&self) -> &ConstBytes<'db> { - crate::with_attached_db(|db| { - let inner = self.bytes_(db); - // SAFETY: The caller already has access to a `Valtree<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.bytes; + unsafe { std::mem::transmute::<&ConstBytes<'static>, &ConstBytes<'db>>(inner) } + } +} + +impl std::fmt::Debug for Valtree<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.interned.fmt(f) } } -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)] +#[derive( + Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable, GenericTypeVisitable, +)] pub struct ExprConst; impl rustc_type_ir::inherent::ParamLike for ParamConst { @@ -243,6 +275,13 @@ impl<'db> IntoKind for Const<'db> { } } +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Const<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + visitor.on_interned(self.interned); + self.kind().generic_visit_with(visitor); + } +} + impl<'db> TypeVisitable> for Const<'db> { fn visit_with>>( &self, diff --git a/crates/hir-ty/src/next_solver/fulfill.rs b/crates/hir-ty/src/next_solver/fulfill.rs index 40cc84e0c0ed..4a19e30c64f7 100644 --- a/crates/hir-ty/src/next_solver/fulfill.rs +++ b/crates/hir-ty/src/next_solver/fulfill.rs @@ -249,7 +249,7 @@ impl<'db> FulfillmentCtxt<'db> { | TypingMode::PostBorrowckAnalysis { defined_opaque_types: _ } | TypingMode::PostAnalysis => return Default::default(), }; - let stalled_coroutines = stalled_coroutines.inner(); + let stalled_coroutines = stalled_coroutines.as_slice(); if stalled_coroutines.is_empty() { return Default::default(); diff --git a/crates/hir-ty/src/next_solver/fulfill/errors.rs b/crates/hir-ty/src/next_solver/fulfill/errors.rs index 8495af4b755e..8f798b4ade24 100644 --- a/crates/hir-ty/src/next_solver/fulfill/errors.rs +++ b/crates/hir-ty/src/next_solver/fulfill/errors.rs @@ -9,7 +9,7 @@ use rustc_next_trait_solver::solve::{GoalEvaluation, SolverDelegateEvalExt}; use rustc_type_ir::{ AliasRelationDirection, AliasTermKind, HostEffectPredicate, Interner, PredicatePolarity, error::ExpectedFound, - inherent::{IntoKind, SliceLike, Span as _}, + inherent::{IntoKind, Span as _}, lang_items::SolverTraitLangItem, solve::{Certainty, GoalSource, MaybeCause, NoSolution}, }; diff --git a/crates/hir-ty/src/next_solver/generic_arg.rs b/crates/hir-ty/src/next_solver/generic_arg.rs index 10f2ba2b119b..f31b487eaed5 100644 --- a/crates/hir-ty/src/next_solver/generic_arg.rs +++ b/crates/hir-ty/src/next_solver/generic_arg.rs @@ -1,41 +1,209 @@ //! Things related to generic args in the next-trait-solver. +use std::{hint::unreachable_unchecked, marker::PhantomData, ptr::NonNull}; + use hir_def::{GenericDefId, GenericParamId}; -use macros::{TypeFoldable, TypeVisitable}; +use intern::InternedRef; use rustc_type_ir::{ - ClosureArgs, CollectAndApply, ConstVid, CoroutineArgs, CoroutineClosureArgs, FnSigTys, - GenericArgKind, Interner, TermKind, TyKind, TyVid, Variance, + ClosureArgs, ConstVid, CoroutineArgs, CoroutineClosureArgs, FallibleTypeFolder, FnSigTys, + GenericTypeVisitable, Interner, TyKind, TyVid, TypeFoldable, TypeFolder, TypeVisitable, + TypeVisitor, Variance, inherent::{GenericArg as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _}, relate::{Relate, VarianceDiagInfo}, walk::TypeWalker, }; use smallvec::SmallVec; -use crate::next_solver::{PolyFnSig, interned_vec_db}; +use crate::next_solver::{ + ConstInterned, PolyFnSig, RegionInterned, TyInterned, impl_foldable_for_interned_slice, + impl_stored_interned_slice, interned_slice, +}; use super::{ Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId, Ty, Tys, generics::Generics, }; -#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, salsa::Supertype)] -pub enum GenericArg<'db> { - Ty(Ty<'db>), - Lifetime(Region<'db>), - Const(Const<'db>), +pub type GenericArgKind<'db> = rustc_type_ir::GenericArgKind>; +pub type TermKind<'db> = rustc_type_ir::TermKind>; + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +struct GenericArgImpl<'db> { + ptr: NonNull<()>, + _marker: PhantomData<(Ty<'db>, Const<'db>, Region<'db>)>, +} + +unsafe impl Send for GenericArgImpl<'_> {} +unsafe impl Sync for GenericArgImpl<'_> {} + +impl<'db> GenericArgImpl<'db> { + const KIND_MASK: usize = 0b11; + const PTR_MASK: usize = !Self::KIND_MASK; + const TY_TAG: usize = 0b00; + const CONST_TAG: usize = 0b01; + const REGION_TAG: usize = 0b10; + + #[inline] + fn new_ty(ty: Ty<'db>) -> Self { + Self { + ptr: unsafe { + NonNull::new_unchecked( + ty.interned + .as_raw() + .cast::<()>() + .cast_mut() + .map_addr(|addr| addr | Self::TY_TAG), + ) + }, + _marker: PhantomData, + } + } + + #[inline] + fn new_const(ty: Const<'db>) -> Self { + Self { + ptr: unsafe { + NonNull::new_unchecked( + ty.interned + .as_raw() + .cast::<()>() + .cast_mut() + .map_addr(|addr| addr | Self::CONST_TAG), + ) + }, + _marker: PhantomData, + } + } + + #[inline] + fn new_region(ty: Region<'db>) -> Self { + Self { + ptr: unsafe { + NonNull::new_unchecked( + ty.interned + .as_raw() + .cast::<()>() + .cast_mut() + .map_addr(|addr| addr | Self::REGION_TAG), + ) + }, + _marker: PhantomData, + } + } + + #[inline] + fn kind(self) -> GenericArgKind<'db> { + let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK); + unsafe { + match self.ptr.addr().get() & Self::KIND_MASK { + Self::TY_TAG => GenericArgKind::Type(Ty { + interned: InternedRef::from_raw(ptr.cast::()), + }), + Self::CONST_TAG => GenericArgKind::Const(Const { + interned: InternedRef::from_raw(ptr.cast::()), + }), + Self::REGION_TAG => GenericArgKind::Lifetime(Region { + interned: InternedRef::from_raw(ptr.cast::()), + }), + _ => unreachable_unchecked(), + } + } + } + + #[inline] + fn term_kind(self) -> TermKind<'db> { + let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK); + unsafe { + match self.ptr.addr().get() & Self::KIND_MASK { + Self::TY_TAG => { + TermKind::Ty(Ty { interned: InternedRef::from_raw(ptr.cast::()) }) + } + Self::CONST_TAG => TermKind::Const(Const { + interned: InternedRef::from_raw(ptr.cast::()), + }), + _ => unreachable_unchecked(), + } + } + } +} + +#[derive(PartialEq, Eq, Hash)] +pub struct StoredGenericArg { + ptr: GenericArgImpl<'static>, +} + +impl Clone for StoredGenericArg { + #[inline] + fn clone(&self) -> Self { + match self.ptr.kind() { + GenericArgKind::Lifetime(it) => std::mem::forget(it.interned.to_owned()), + GenericArgKind::Type(it) => std::mem::forget(it.interned.to_owned()), + GenericArgKind::Const(it) => std::mem::forget(it.interned.to_owned()), + } + Self { ptr: self.ptr } + } +} + +impl Drop for StoredGenericArg { + #[inline] + fn drop(&mut self) { + unsafe { + match self.ptr.kind() { + GenericArgKind::Lifetime(it) => it.interned.decrement_refcount(), + GenericArgKind::Type(it) => it.interned.decrement_refcount(), + GenericArgKind::Const(it) => it.interned.decrement_refcount(), + } + } + } +} + +impl StoredGenericArg { + #[inline] + fn new(value: GenericArg<'_>) -> Self { + let result = Self { ptr: GenericArgImpl { ptr: value.ptr.ptr, _marker: PhantomData } }; + // Increase refcount. + std::mem::forget(result.clone()); + result + } + + #[inline] + pub fn as_ref<'db>(&self) -> GenericArg<'db> { + GenericArg { ptr: self.ptr } + } +} + +impl std::fmt::Debug for StoredGenericArg { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_ref().fmt(f) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct GenericArg<'db> { + ptr: GenericArgImpl<'db>, } impl<'db> std::fmt::Debug for GenericArg<'db> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Ty(t) => std::fmt::Debug::fmt(t, f), - Self::Lifetime(r) => std::fmt::Debug::fmt(r, f), - Self::Const(c) => std::fmt::Debug::fmt(c, f), + match self.kind() { + GenericArgKind::Type(t) => std::fmt::Debug::fmt(&t, f), + GenericArgKind::Lifetime(r) => std::fmt::Debug::fmt(&r, f), + GenericArgKind::Const(c) => std::fmt::Debug::fmt(&c, f), } } } impl<'db> GenericArg<'db> { + #[inline] + pub fn store(self) -> StoredGenericArg { + StoredGenericArg::new(self) + } + + #[inline] + pub fn kind(self) -> GenericArgKind<'db> { + self.ptr.kind() + } + pub fn ty(self) -> Option> { match self.kind() { GenericArgKind::Type(ty) => Some(ty), @@ -66,8 +234,8 @@ impl<'db> GenericArg<'db> { #[inline] pub(crate) fn expect_region(self) -> Region<'db> { - match self { - GenericArg::Lifetime(region) => region, + match self.kind() { + GenericArgKind::Lifetime(region) => region, _ => panic!("expected a region, got {self:?}"), } } @@ -87,30 +255,32 @@ impl<'db> GenericArg<'db> { } impl<'db> From> for GenericArg<'db> { + #[inline] fn from(value: Term<'db>) -> Self { - match value { - Term::Ty(ty) => GenericArg::Ty(ty), - Term::Const(c) => GenericArg::Const(c), - } + GenericArg { ptr: value.ptr } } } -#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)] -pub enum Term<'db> { - Ty(Ty<'db>), - Const(Const<'db>), +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct Term<'db> { + ptr: GenericArgImpl<'db>, } impl<'db> std::fmt::Debug for Term<'db> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Ty(t) => std::fmt::Debug::fmt(t, f), - Self::Const(c) => std::fmt::Debug::fmt(c, f), + match self.kind() { + TermKind::Ty(t) => std::fmt::Debug::fmt(&t, f), + TermKind::Const(c) => std::fmt::Debug::fmt(&c, f), } } } impl<'db> Term<'db> { + #[inline] + pub fn kind(self) -> TermKind<'db> { + self.ptr.term_kind() + } + pub fn expect_type(&self) -> Ty<'db> { self.as_type().expect("expected a type, but found a const") } @@ -124,31 +294,108 @@ impl<'db> Term<'db> { } impl<'db> From> for GenericArg<'db> { + #[inline] fn from(value: Ty<'db>) -> Self { - Self::Ty(value) + GenericArg { ptr: GenericArgImpl::new_ty(value) } } } impl<'db> From> for GenericArg<'db> { + #[inline] fn from(value: Region<'db>) -> Self { - Self::Lifetime(value) + GenericArg { ptr: GenericArgImpl::new_region(value) } } } impl<'db> From> for GenericArg<'db> { + #[inline] fn from(value: Const<'db>) -> Self { - Self::Const(value) + GenericArg { ptr: GenericArgImpl::new_const(value) } } } impl<'db> IntoKind for GenericArg<'db> { - type Kind = GenericArgKind>; + type Kind = GenericArgKind<'db>; + #[inline] fn kind(self) -> Self::Kind { - match self { - GenericArg::Ty(ty) => GenericArgKind::Type(ty), - GenericArg::Lifetime(region) => GenericArgKind::Lifetime(region), - GenericArg::Const(c) => GenericArgKind::Const(c), + self.ptr.kind() + } +} + +impl<'db, V> GenericTypeVisitable for GenericArg<'db> +where + GenericArgKind<'db>: GenericTypeVisitable, +{ + fn generic_visit_with(&self, visitor: &mut V) { + self.kind().generic_visit_with(visitor); + } +} + +impl<'db, V> GenericTypeVisitable for Term<'db> +where + TermKind<'db>: GenericTypeVisitable, +{ + fn generic_visit_with(&self, visitor: &mut V) { + self.kind().generic_visit_with(visitor); + } +} + +impl<'db> TypeVisitable> for GenericArg<'db> { + fn visit_with>>(&self, visitor: &mut V) -> V::Result { + match self.kind() { + GenericArgKind::Lifetime(it) => it.visit_with(visitor), + GenericArgKind::Type(it) => it.visit_with(visitor), + GenericArgKind::Const(it) => it.visit_with(visitor), + } + } +} + +impl<'db> TypeVisitable> for Term<'db> { + fn visit_with>>(&self, visitor: &mut V) -> V::Result { + match self.kind() { + TermKind::Ty(it) => it.visit_with(visitor), + TermKind::Const(it) => it.visit_with(visitor), + } + } +} + +impl<'db> TypeFoldable> for GenericArg<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(match self.kind() { + GenericArgKind::Lifetime(it) => it.try_fold_with(folder)?.into(), + GenericArgKind::Type(it) => it.try_fold_with(folder)?.into(), + GenericArgKind::Const(it) => it.try_fold_with(folder)?.into(), + }) + } + + fn fold_with>>(self, folder: &mut F) -> Self { + match self.kind() { + GenericArgKind::Lifetime(it) => it.fold_with(folder).into(), + GenericArgKind::Type(it) => it.fold_with(folder).into(), + GenericArgKind::Const(it) => it.fold_with(folder).into(), + } + } +} + +impl<'db> TypeFoldable> for Term<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(match self.kind() { + TermKind::Ty(it) => it.try_fold_with(folder)?.into(), + TermKind::Const(it) => it.try_fold_with(folder)?.into(), + }) + } + + fn fold_with>>(self, folder: &mut F) -> Self { + match self.kind() { + TermKind::Ty(it) => it.fold_with(folder).into(), + TermKind::Const(it) => it.fold_with(folder).into(), } } } @@ -182,7 +429,9 @@ impl<'db> Relate> for GenericArg<'db> { } } -interned_vec_db!(GenericArgs, GenericArg); +interned_slice!(GenericArgsStorage, GenericArgs, GenericArg<'db>, GenericArg<'static>,); +impl_foldable_for_interned_slice!(GenericArgs); +impl_stored_interned_slice!(GenericArgsStorage, GenericArgs, StoredGenericArgs); impl<'db> rustc_type_ir::inherent::GenericArg> for GenericArg<'db> {} @@ -306,7 +555,7 @@ impl<'db> GenericArgs<'db> { /// A "sensible" `.split_closure_args()`, where the arguments are not in a tuple. pub fn split_closure_args_untupled(self) -> rustc_type_ir::ClosureArgsParts> { // FIXME: should use `ClosureSubst` when possible - match self.inner().as_slice() { + match self.as_slice() { [parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => { let interner = DbInterner::conjure(); rustc_type_ir::ClosureArgsParts { @@ -341,8 +590,8 @@ impl<'db> rustc_type_ir::relate::Relate> for GenericArgs<'db> { a: Self, b: Self, ) -> rustc_type_ir::relate::RelateResult, Self> { - let interner = relation.cx(); - CollectAndApply::collect_and_apply( + GenericArgs::new_from_iter( + relation.cx(), std::iter::zip(a.iter(), b.iter()).map(|(a, b)| { relation.relate_with_variance( Variance::Invariant, @@ -351,7 +600,6 @@ impl<'db> rustc_type_ir::relate::Relate> for GenericArgs<'db> { b, ) }), - |g| GenericArgs::new_from_iter(interner, g.iter().cloned()), ) } } @@ -397,29 +645,26 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< }) } fn type_at(self, i: usize) -> as rustc_type_ir::Interner>::Ty { - self.inner() - .get(i) + self.get(i) .and_then(|g| g.as_type()) .unwrap_or_else(|| Ty::new_error(DbInterner::conjure(), ErrorGuaranteed)) } fn region_at(self, i: usize) -> as rustc_type_ir::Interner>::Region { - self.inner() - .get(i) + self.get(i) .and_then(|g| g.as_region()) .unwrap_or_else(|| Region::error(DbInterner::conjure())) } fn const_at(self, i: usize) -> as rustc_type_ir::Interner>::Const { - self.inner() - .get(i) + self.get(i) .and_then(|g| g.as_const()) .unwrap_or_else(|| Const::error(DbInterner::conjure())) } fn split_closure_args(self) -> rustc_type_ir::ClosureArgsParts> { // FIXME: should use `ClosureSubst` when possible - match self.inner().as_slice() { + match self.as_slice() { [parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => { let interner = DbInterner::conjure(); // This is stupid, but the next solver expects the first input to actually be a tuple @@ -458,7 +703,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< fn split_coroutine_closure_args( self, ) -> rustc_type_ir::CoroutineClosureArgsParts> { - match self.inner().as_slice() { + match self.as_slice() { [ parent_args @ .., closure_kind_ty, @@ -481,7 +726,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< fn split_coroutine_args(self) -> rustc_type_ir::CoroutineArgsParts> { let interner = DbInterner::conjure(); - match self.inner().as_slice() { + match self.as_slice() { [parent_args @ .., kind_ty, resume_ty, yield_ty, return_ty, tupled_upvars_ty] => { rustc_type_ir::CoroutineArgsParts { parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()), @@ -518,25 +763,25 @@ pub fn error_for_param_kind<'db>(id: GenericParamId, interner: DbInterner<'db>) } impl<'db> IntoKind for Term<'db> { - type Kind = TermKind>; + type Kind = TermKind<'db>; + #[inline] fn kind(self) -> Self::Kind { - match self { - Term::Ty(ty) => TermKind::Ty(ty), - Term::Const(c) => TermKind::Const(c), - } + self.ptr.term_kind() } } impl<'db> From> for Term<'db> { + #[inline] fn from(value: Ty<'db>) -> Self { - Self::Ty(value) + Term { ptr: GenericArgImpl::new_ty(value) } } } impl<'db> From> for Term<'db> { + #[inline] fn from(value: Const<'db>) -> Self { - Self::Const(value) + Term { ptr: GenericArgImpl::new_const(value) } } } @@ -583,7 +828,7 @@ impl From for TermVid { impl<'db> DbInterner<'db> { pub(super) fn mk_args(self, args: &[GenericArg<'db>]) -> GenericArgs<'db> { - GenericArgs::new_from_iter(self, args.iter().cloned()) + GenericArgs::new_from_slice(args) } pub(super) fn mk_args_from_iter(self, iter: I) -> T::Output diff --git a/crates/hir-ty/src/next_solver/infer/at.rs b/crates/hir-ty/src/next_solver/infer/at.rs index 70b659406f86..6ba1aae2d486 100644 --- a/crates/hir-ty/src/next_solver/infer/at.rs +++ b/crates/hir-ty/src/next_solver/infer/at.rs @@ -28,7 +28,7 @@ use rustc_type_ir::{ FnSig, GenericArgKind, TypeFoldable, TypingMode, Variance, error::ExpectedFound, - inherent::{IntoKind, Span as _}, + inherent::Span as _, relate::{Relate, TypeRelation, solver_relating::RelateExt}, }; diff --git a/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs b/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs index 1029a7ff39e8..4422eeaa9a4b 100644 --- a/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs +++ b/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs @@ -8,7 +8,7 @@ use rustc_hash::FxHashMap; use rustc_index::Idx; use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar}; -use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, Ty as _}; use rustc_type_ir::{ BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex, @@ -498,7 +498,7 @@ impl<'cx, 'db> Canonicalizer<'cx, 'db> { { let base = Canonical { max_universe: UniverseIndex::ROOT, - variables: CanonicalVars::new_from_iter(tcx, []), + variables: CanonicalVars::empty(tcx), value: (), }; Canonicalizer::canonicalize_with_base( diff --git a/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs b/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs index 13c620cfdbc9..b758042e85b0 100644 --- a/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs +++ b/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs @@ -23,7 +23,7 @@ use rustc_index::{Idx as _, IndexVec}; use rustc_type_ir::{ BoundVar, BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex, - inherent::{GenericArg as _, IntoKind, SliceLike}, + inherent::{GenericArg as _, IntoKind}, }; use tracing::{debug, instrument}; diff --git a/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs b/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs index befb2001b1b9..617dbda1338b 100644 --- a/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs +++ b/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs @@ -3,7 +3,7 @@ use rustc_type_ir::{OutlivesPredicate, TypeVisitableExt}; use tracing::{debug, instrument}; use crate::next_solver::{ - ArgOutlivesPredicate, GenericArg, Region, RegionOutlivesPredicate, Ty, + ArgOutlivesPredicate, GenericArgKind, Region, RegionOutlivesPredicate, Ty, infer::{InferCtxt, TypeOutlivesConstraint, snapshot::undo_log::UndoLog}, }; @@ -12,14 +12,14 @@ impl<'db> InferCtxt<'db> { &self, OutlivesPredicate(arg, r2): ArgOutlivesPredicate<'db>, ) { - match arg { - GenericArg::Lifetime(r1) => { + match arg.kind() { + GenericArgKind::Lifetime(r1) => { self.register_region_outlives_constraint(OutlivesPredicate(r1, r2)); } - GenericArg::Ty(ty1) => { + GenericArgKind::Type(ty1) => { self.register_type_outlives_constraint(ty1, r2); } - GenericArg::Const(_) => unreachable!(), + GenericArgKind::Const(_) => unreachable!(), } } diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs index 4d3c028879a1..1a946cb90817 100644 --- a/crates/hir-ty/src/next_solver/interner.rs +++ b/crates/hir-ty/src/next_solver/interner.rs @@ -2,6 +2,7 @@ use std::fmt; +use intern::{Interned, InternedRef, InternedSliceRef, impl_internable}; use rustc_ast_ir::{FloatTy, IntTy, UintTy}; pub use tls_cache::clear_tls_solver_cache; pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db}; @@ -57,216 +58,244 @@ use super::{ util::sizedness_constraint_for_ty, }; -#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)] -pub struct InternedWrapperNoDebug(pub(crate) T); +macro_rules! interned_slice { + ($storage:ident, $name:ident, $ty_db:ty, $ty_static:ty $(,)?) => { + const _: () = { + #[allow(unused_lifetimes)] + fn _ensure_correct_types<'db: 'static>(v: $ty_db) -> $ty_static { v } + }; -#[macro_export] -#[doc(hidden)] -macro_rules! _interned_vec_nolifetime_salsa { - ($name:ident, $ty:ty) => { - interned_vec_nolifetime_salsa!($name, $ty, nofold); + ::intern::impl_slice_internable!(gc; $storage, (), $ty_static); - impl<'db> rustc_type_ir::TypeFoldable> for $name<'db> { - fn try_fold_with>>( - self, - folder: &mut F, - ) -> Result { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.try_fold_with(folder)).collect::>()?; - Ok($name::new_(folder.cx().db(), inner)) - } - fn fold_with>>( - self, - folder: &mut F, - ) -> Self { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.fold_with(folder)).collect(); - $name::new_(folder.cx().db(), inner) - } + #[derive(Clone, Copy, PartialEq, Eq, Hash)] + pub struct $name<'db> { + interned: ::intern::InternedSliceRef<'db, $storage>, } - impl<'db> rustc_type_ir::TypeVisitable> for $name<'db> { - fn visit_with>>( - &self, - visitor: &mut V, - ) -> V::Result { - use rustc_ast_ir::visit::VisitorResult; - use rustc_type_ir::inherent::SliceLike as _; - rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter()); - V::Result::output() + impl<'db> std::fmt::Debug for $name<'db> { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_slice().fmt(fmt) } } - }; - ($name:ident, $ty:ty, nofold) => { - #[salsa::interned(constructor = new_)] - pub struct $name { - #[returns(ref)] - inner_: smallvec::SmallVec<[$ty; 2]>, - } impl<'db> $name<'db> { - pub fn new_from_iter( - interner: DbInterner<'db>, - data: impl IntoIterator, - ) -> Self { - $name::new_(interner.db(), data.into_iter().collect::>()) + #[inline] + pub fn empty(_interner: DbInterner<'db>) -> Self { + // FIXME: Get from a static. + Self::new_from_slice(&[]) + } + + #[inline] + pub fn new_from_slice(slice: &[$ty_db]) -> Self { + let slice = unsafe { ::std::mem::transmute::<&[$ty_db], &[$ty_static]>(slice) }; + Self { interned: ::intern::InternedSlice::from_header_and_slice((), slice) } } - pub fn inner(&self) -> &smallvec::SmallVec<[$ty; 2]> { - // SAFETY: ¯\_(ツ)_/¯ - $crate::with_attached_db(|db| { - let inner = self.inner_(db); - unsafe { std::mem::transmute(inner) } + #[inline] + pub fn new_from_iter(_interner: DbInterner<'db>, args: I) -> T::Output + where + I: IntoIterator, + T: ::rustc_type_ir::CollectAndApply<$ty_db, Self>, + { + ::rustc_type_ir::CollectAndApply::collect_and_apply(args.into_iter(), |g| { + Self::new_from_slice(g) }) } + + #[inline] + pub fn as_slice(self) -> &'db [$ty_db] { + let slice = &self.interned.get().slice; + unsafe { ::std::mem::transmute::<&[$ty_static], &[$ty_db]>(slice) } + } + + #[inline] + pub fn iter(self) -> ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>> { + self.as_slice().iter().copied() + } + + #[inline] + pub fn len(self) -> usize { + self.as_slice().len() + } + + #[inline] + pub fn is_empty(self) -> bool { + self.as_slice().is_empty() + } } - impl<'db> std::fmt::Debug for $name<'db> { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.as_slice().fmt(fmt) + impl<'db> IntoIterator for $name<'db> { + type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>>; + type Item = $ty_db; + #[inline] + fn into_iter(self) -> Self::IntoIter { self.iter() } + } + + impl<'db> ::std::ops::Deref for $name<'db> { + type Target = [$ty_db]; + + #[inline] + fn deref(&self) -> &Self::Target { + (*self).as_slice() } } impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> { - type Item = $ty; + type Item = $ty_db; - type IntoIter = as IntoIterator>::IntoIter; + type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>>; + #[inline] fn iter(self) -> Self::IntoIter { - self.inner().clone().into_iter() + self.iter() } + #[inline] fn as_slice(&self) -> &[Self::Item] { - self.inner().as_slice() + (*self).as_slice() } } - impl<'db> IntoIterator for $name<'db> { - type Item = $ty; - type IntoIter = ::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - rustc_type_ir::inherent::SliceLike::iter(self) + impl<'db> Default for $name<'db> { + #[inline] + fn default() -> Self { + $name::empty(DbInterner::conjure()) } } - impl<'db> Default for $name<'db> { - fn default() -> Self { - $name::new_from_iter(DbInterner::conjure(), []) + + impl<'db, V: $crate::next_solver::interner::WorldExposer> + rustc_type_ir::GenericTypeVisitable for $name<'db> + { + #[inline] + fn generic_visit_with(&self, visitor: &mut V) { + visitor.on_interned_slice(self.interned); + self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor)); } } }; } +pub(crate) use interned_slice; -pub use crate::_interned_vec_nolifetime_salsa as interned_vec_nolifetime_salsa; - -#[macro_export] -#[doc(hidden)] -macro_rules! _interned_vec_db { - ($name:ident, $ty:ident) => { - interned_vec_db!($name, $ty, nofold); +macro_rules! impl_foldable_for_interned_slice { + ($name:ident) => { + impl<'db> ::rustc_type_ir::TypeVisitable> for $name<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + use rustc_ast_ir::visit::VisitorResult; + rustc_ast_ir::walk_visitable_list!(visitor, (*self).iter()); + V::Result::output() + } + } impl<'db> rustc_type_ir::TypeFoldable> for $name<'db> { fn try_fold_with>>( self, folder: &mut F, ) -> Result { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.try_fold_with(folder)).collect::>()?; - Ok($name::new_(folder.cx().db(), inner)) + Self::new_from_iter(folder.cx(), self.iter().map(|it| it.try_fold_with(folder))) } fn fold_with>>( self, folder: &mut F, ) -> Self { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.fold_with(folder)).collect(); - $name::new_(folder.cx().db(), inner) - } - } - - impl<'db> rustc_type_ir::TypeVisitable> for $name<'db> { - fn visit_with>>( - &self, - visitor: &mut V, - ) -> V::Result { - use rustc_ast_ir::visit::VisitorResult; - use rustc_type_ir::inherent::SliceLike as _; - rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter()); - V::Result::output() + Self::new_from_iter(folder.cx(), self.iter().map(|it| it.fold_with(folder))) } } }; - ($name:ident, $ty:ident, nofold) => { - #[salsa::interned(constructor = new_, unsafe(non_update_types))] - pub struct $name<'db> { - #[returns(ref)] - inner_: smallvec::SmallVec<[$ty<'db>; 2]>, +} +pub(crate) use impl_foldable_for_interned_slice; + +macro_rules! impl_stored_interned_slice { + ( $storage:ident, $name:ident, $stored_name:ident $(,)? ) => { + #[derive(Clone, PartialEq, Eq, Hash)] + pub struct $stored_name { + interned: ::intern::InternedSlice<$storage>, } - impl<'db> std::fmt::Debug for $name<'db> { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.as_slice().fmt(fmt) + impl $stored_name { + #[inline] + fn new(it: $name<'_>) -> Self { + Self { interned: it.interned.to_owned() } } - } - impl<'db> $name<'db> { - pub fn empty(interner: DbInterner<'db>) -> Self { - $name::new_(interner.db(), smallvec::SmallVec::new()) + #[inline] + pub fn as_ref<'a, 'db>(&'a self) -> $name<'db> { + let it = $name { interned: self.interned.as_ref() }; + unsafe { std::mem::transmute::<$name<'a>, $name<'db>>(it) } } + } - pub fn new_from_iter( - interner: DbInterner<'db>, - data: impl IntoIterator>, - ) -> Self { - $name::new_(interner.db(), data.into_iter().collect::>()) + unsafe impl salsa::Update for $stored_name { + unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool { + unsafe { crate::utils::unsafe_update_eq(old_pointer, new_value) } } + } - pub fn inner(&self) -> &smallvec::SmallVec<[$ty<'db>; 2]> { - // SAFETY: ¯\_(ツ)_/¯ - $crate::with_attached_db(|db| { - let inner = self.inner_(db); - unsafe { std::mem::transmute(inner) } - }) + impl std::fmt::Debug for $stored_name { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_ref().fmt(f) } } - impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> { - type Item = $ty<'db>; + impl $name<'_> { + #[inline] + pub fn store(self) -> $stored_name { + $stored_name::new(self) + } + } + }; +} +pub(crate) use impl_stored_interned_slice; - type IntoIter = ; 2]> as IntoIterator>::IntoIter; +macro_rules! impl_stored_interned { + ( $storage:ident, $name:ident, $stored_name:ident $(,)? ) => { + #[derive(Clone, PartialEq, Eq, Hash)] + pub struct $stored_name { + interned: ::intern::Interned<$storage>, + } - fn iter(self) -> Self::IntoIter { - self.inner().clone().into_iter() + impl $stored_name { + #[inline] + fn new(it: $name<'_>) -> Self { + Self { interned: it.interned.to_owned() } } - fn as_slice(&self) -> &[Self::Item] { - self.inner().as_slice() + #[inline] + pub fn as_ref<'a, 'db>(&'a self) -> $name<'db> { + let it = $name { interned: self.interned.as_ref() }; + unsafe { std::mem::transmute::<$name<'a>, $name<'db>>(it) } } } - impl<'db> IntoIterator for $name<'db> { - type Item = $ty<'db>; - type IntoIter = ::IntoIter; + unsafe impl salsa::Update for $stored_name { + unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool { + unsafe { crate::utils::unsafe_update_eq(old_pointer, new_value) } + } + } - fn into_iter(self) -> Self::IntoIter { - rustc_type_ir::inherent::SliceLike::iter(self) + impl std::fmt::Debug for $stored_name { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_ref().fmt(f) } } - impl<'db> Default for $name<'db> { - fn default() -> Self { - $name::new_from_iter(DbInterner::conjure(), []) + impl $name<'_> { + #[inline] + pub fn store(self) -> $stored_name { + $stored_name::new(self) } } }; } +pub(crate) use impl_stored_interned; -pub use crate::_interned_vec_db as interned_vec_db; +pub trait WorldExposer { + fn on_interned(&mut self, interned: InternedRef<'_, T>); + fn on_interned_slice(&mut self, interned: InternedSliceRef<'_, T>); +} #[derive(Debug, Copy, Clone)] pub struct DbInterner<'db> { @@ -333,7 +362,8 @@ impl<'db> inherent::Span> for Span { } } -interned_vec_nolifetime_salsa!(BoundVarKinds, BoundVarKind, nofold); +interned_slice!(BoundVarKindsStorage, BoundVarKinds, BoundVarKind, BoundVarKind); +impl_stored_interned_slice!(BoundVarKindsStorage, BoundVarKinds, StoredBoundVarKinds); #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub enum BoundVarKind { @@ -365,7 +395,12 @@ impl BoundVarKind { } } -interned_vec_db!(CanonicalVars, CanonicalVarKind, nofold); +interned_slice!( + CanonicalVarsStorage, + CanonicalVars, + CanonicalVarKind<'db>, + CanonicalVarKind<'static> +); pub struct DepNodeIndex; @@ -391,7 +426,8 @@ impl std::fmt::Debug for Placeholder { #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub struct AllocId; -interned_vec_nolifetime_salsa!(VariancesOf, Variance, nofold); +interned_slice!(VariancesOfStorage, VariancesOf, Variance, Variance); +impl_stored_interned_slice!(VariancesOfStorage, VariancesOf, StoredVariancesOf); #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct VariantIdx(usize); @@ -658,7 +694,7 @@ impl<'db> inherent::AdtDef> for AdtDef { let id: VariantId = struct_id.into(); let field_types = interner.db().field_types(id); - field_types.iter().last().map(|f| *f.1) + field_types.iter().last().map(|f| f.1.get()) } fn all_field_tys( @@ -668,7 +704,7 @@ impl<'db> inherent::AdtDef> for AdtDef { let db = interner.db(); // FIXME: this is disabled just to match the behavior with chalk right now let _field_tys = |id: VariantId| { - db.field_types(id).iter().map(|(_, ty)| ty.skip_binder()).collect::>() + db.field_types(id).iter().map(|(_, ty)| ty.get().skip_binder()).collect::>() }; let field_tys = |_id: VariantId| vec![]; let tys: Vec<_> = match self.inner().id { @@ -762,30 +798,36 @@ impl std::ops::Deref for UnsizingParams { pub type PatternKind<'db> = rustc_type_ir::PatternKind>; -#[salsa::interned(constructor = new_, debug, unsafe(non_update_types))] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Pattern<'db> { - #[returns(ref)] - kind_: InternedWrapperNoDebug>, + interned: InternedRef<'db, PatternInterned>, } -impl<'db> std::fmt::Debug for InternedWrapperNoDebug> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } -} +#[derive(PartialEq, Eq, Hash)] +struct PatternInterned(PatternKind<'static>); + +impl_internable!(gc; PatternInterned); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; impl<'db> Pattern<'db> { - pub fn new(interner: DbInterner<'db>, kind: PatternKind<'db>) -> Self { - Pattern::new_(interner.db(), InternedWrapperNoDebug(kind)) + pub fn new(_interner: DbInterner<'db>, kind: PatternKind<'db>) -> Self { + let kind = unsafe { std::mem::transmute::, PatternKind<'static>>(kind) }; + Self { interned: Interned::new_gc(PatternInterned(kind)) } } pub fn inner(&self) -> &PatternKind<'db> { - crate::with_attached_db(|db| { - let inner = &self.kind_(db).0; - // SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { std::mem::transmute::<&PatternKind<'static>, &PatternKind<'db>>(inner) } + } +} + +impl<'db> std::fmt::Debug for Pattern<'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.kind().fmt(f) } } @@ -831,6 +873,35 @@ impl<'db> rustc_type_ir::inherent::IntoKind for Pattern<'db> { } } +impl<'db> rustc_type_ir::TypeVisitable> for Pattern<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + self.kind().visit_with(visitor) + } +} + +impl<'db, V: WorldExposer> rustc_type_ir::GenericTypeVisitable for Pattern<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + visitor.on_interned(self.interned); + self.kind().generic_visit_with(visitor); + } +} + +impl<'db> rustc_type_ir::TypeFoldable> for Pattern<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(Pattern::new(folder.cx(), self.kind().try_fold_with(folder)?)) + } + + fn fold_with>>(self, folder: &mut F) -> Self { + Pattern::new(folder.cx(), self.kind().fold_with(folder)) + } +} + impl<'db> rustc_type_ir::relate::Relate> for Pattern<'db> { fn relate>>( relation: &mut R, @@ -851,9 +922,9 @@ impl<'db> rustc_type_ir::relate::Relate> for Pattern<'db> { if a.len() != b.len() { return Err(TypeError::Mismatch); } - let pats = CollectAndApply::collect_and_apply( + let pats = PatList::new_from_iter( + relation.cx(), std::iter::zip(a.iter(), b.iter()).map(|(a, b)| relation.relate(a, b)), - |g| PatList::new_from_iter(tcx, g.iter().cloned()), )?; Ok(Pattern::new(tcx, PatternKind::Or(pats))) } @@ -865,7 +936,8 @@ impl<'db> rustc_type_ir::relate::Relate> for Pattern<'db> { } } -interned_vec_db!(PatList, Pattern); +interned_slice!(PatListStorage, PatList, Pattern<'db>, Pattern<'static>); +impl_foldable_for_interned_slice!(PatList); macro_rules! as_lang_item { ( @@ -941,7 +1013,7 @@ impl<'db> Interner for DbInterner<'db> { self, data: &[(OpaqueTypeKey<'db>, Self::Ty)], ) -> Self::PredefinedOpaques { - PredefinedOpaques::new_from_iter(self, data.iter().cloned()) + PredefinedOpaques::new_from_slice(data) } type CanonicalVarKinds = CanonicalVars<'db>; @@ -950,7 +1022,7 @@ impl<'db> Interner for DbInterner<'db> { self, kinds: &[rustc_type_ir::CanonicalVarKind], ) -> Self::CanonicalVarKinds { - CanonicalVars::new_from_iter(self, kinds.iter().cloned()) + CanonicalVars::new_from_slice(kinds) } type ExternalConstraints = ExternalConstraints<'db>; @@ -1012,7 +1084,7 @@ impl<'db> Interner for DbInterner<'db> { type Features = Features; fn mk_args(self, args: &[Self::GenericArg]) -> Self::GenericArgs { - GenericArgs::new_from_iter(self, args.iter().cloned()) + GenericArgs::new_from_slice(args) } fn mk_args_from_iter(self, args: I) -> T::Output @@ -1020,9 +1092,7 @@ impl<'db> Interner for DbInterner<'db> { I: Iterator, T: rustc_type_ir::CollectAndApply, { - CollectAndApply::collect_and_apply(args, |g| { - GenericArgs::new_from_iter(self, g.iter().cloned()) - }) + GenericArgs::new_from_iter(self, args) } type UnsizingParams = UnsizingParams; @@ -1096,7 +1166,7 @@ impl<'db> Interner for DbInterner<'db> { | SolverDefId::ImplId(_) | SolverDefId::InternedClosureId(_) | SolverDefId::InternedCoroutineId(_) => { - return VariancesOf::new_from_iter(self, []); + return VariancesOf::empty(self); } }; self.db.variances_of(generic_def) @@ -1202,7 +1272,7 @@ impl<'db> Interner for DbInterner<'db> { I: Iterator, T: rustc_type_ir::CollectAndApply, { - CollectAndApply::collect_and_apply(args, |g| Tys::new_from_iter(self, g.iter().cloned())) + Tys::new_from_iter(self, args) } fn parent(self, def_id: Self::DefId) -> Self::DefId { @@ -1338,7 +1408,7 @@ impl<'db> Interner for DbInterner<'db> { let own_bounds: FxHashSet<_> = self.item_self_bounds(def_id).skip_binder().into_iter().collect(); if all_bounds.len() == own_bounds.len() { - EarlyBinder::bind(Clauses::new_from_iter(self, [])) + EarlyBinder::bind(Clauses::empty(self)) } else { EarlyBinder::bind(Clauses::new_from_iter( self, @@ -1952,7 +2022,7 @@ impl<'db> Interner for DbInterner<'db> { let field_types = self.db().field_types(variant.id()); let mut unsizing_params = DenseBitSet::new_empty(num_params); - let ty = field_types[tail_field.0]; + let ty = field_types[tail_field.0].get(); for arg in ty.instantiate_identity().walk() { if let Some(i) = maybe_unsizing_param_idx(arg) { unsizing_params.insert(i); @@ -1962,7 +2032,7 @@ impl<'db> Interner for DbInterner<'db> { // Ensure none of the other fields mention the parameters used // in unsizing. for field in prefix_fields { - for arg in field_types[field.0].instantiate_identity().walk() { + for arg in field_types[field.0].get().instantiate_identity().walk() { if let Some(i) = maybe_unsizing_param_idx(arg) { unsizing_params.remove(i); } @@ -2022,7 +2092,7 @@ impl<'db> Interner for DbInterner<'db> { }; let mut result = Vec::new(); crate::opaques::opaque_types_defined_by(self.db, def_id, &mut result); - SolverDefIds::new_from_iter(self, result) + SolverDefIds::new_from_slice(&result) } fn opaque_types_and_coroutines_defined_by(self, def_id: Self::LocalDefId) -> Self::LocalDefIds { @@ -2051,7 +2121,7 @@ impl<'db> Interner for DbInterner<'db> { } }); - SolverDefIds::new_from_iter(self, result) + SolverDefIds::new_from_slice(&result) } fn alias_has_const_conditions(self, _def_id: Self::DefId) -> bool { @@ -2096,10 +2166,10 @@ impl<'db> Interner for DbInterner<'db> { let impl_trait_id = self.db().lookup_intern_impl_trait_id(opaque); match impl_trait_id { crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { - crate::opaques::rpit_hidden_types(self.db, func)[idx] + crate::opaques::rpit_hidden_types(self.db, func)[idx].get() } crate::ImplTraitId::TypeAliasImplTrait(type_alias, idx) => { - crate::opaques::tait_hidden_types(self.db, type_alias)[idx] + crate::opaques::tait_hidden_types(self.db, type_alias)[idx].get() } } } @@ -2288,6 +2358,11 @@ macro_rules! TrivialTypeTraversalImpls { ::output() } } + + impl rustc_type_ir::GenericTypeVisitable for $ty { + #[inline] + fn generic_visit_with(&self, _visitor: &mut V) {} + } )+ }; } @@ -2302,17 +2377,22 @@ TrivialTypeTraversalImpls! { AdtIdWrapper, ImplIdWrapper, GeneralConstIdWrapper, - Pattern<'db>, Safety, FnAbi, Span, ParamConst, ParamTy, BoundRegion, - BoundVar, Placeholder, Placeholder, Placeholder, + Placeholder, + BoundVarKind, + EarlyParamRegion, + LateParamRegion, + AdtDef, + BoundTy, + BoundConst, } mod tls_db { diff --git a/crates/hir-ty/src/next_solver/ir_print.rs b/crates/hir-ty/src/next_solver/ir_print.rs index dab0fe9e4a90..998aab5a3fff 100644 --- a/crates/hir-ty/src/next_solver/ir_print.rs +++ b/crates/hir-ty/src/next_solver/ir_print.rs @@ -2,7 +2,6 @@ use std::any::type_name_of_val; -use rustc_type_ir::inherent::SliceLike; use rustc_type_ir::{self as ty, ir_print::IrPrint}; use super::SolverDefId; diff --git a/crates/hir-ty/src/next_solver/opaques.rs b/crates/hir-ty/src/next_solver/opaques.rs index 4fbeb76adaad..7e1d6b7328cd 100644 --- a/crates/hir-ty/src/next_solver/opaques.rs +++ b/crates/hir-ty/src/next_solver/opaques.rs @@ -1,37 +1,64 @@ //! Things related to opaques in the next-trait-solver. +use intern::{Interned, InternedRef, impl_internable}; use rustc_ast_ir::try_visit; use rustc_type_ir::inherent::SliceLike; -use super::{DbInterner, SolverDefId, Ty, interned_vec_db, interned_vec_nolifetime_salsa}; +use crate::next_solver::{impl_foldable_for_interned_slice, interned_slice}; + +use super::{DbInterner, SolverDefId, Ty}; pub type OpaqueTypeKey<'db> = rustc_type_ir::OpaqueTypeKey>; type PredefinedOpaque<'db> = (OpaqueTypeKey<'db>, Ty<'db>); -interned_vec_db!(PredefinedOpaques, PredefinedOpaque); +interned_slice!( + PredefinedOpaquesStorage, + PredefinedOpaques, + PredefinedOpaque<'db>, + PredefinedOpaque<'static>, +); +impl_foldable_for_interned_slice!(PredefinedOpaques); pub type ExternalConstraintsData<'db> = rustc_type_ir::solve::ExternalConstraintsData>; -interned_vec_nolifetime_salsa!(SolverDefIds, SolverDefId); +interned_slice!(SolverDefIdsStorage, SolverDefIds, SolverDefId, SolverDefId); +impl_foldable_for_interned_slice!(SolverDefIds); -#[salsa::interned(constructor = new_, debug, unsafe(non_update_types))] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct ExternalConstraints<'db> { - #[returns(ref)] - kind_: rustc_type_ir::solve::ExternalConstraintsData>, + interned: InternedRef<'db, ExternalConstraintsInterned>, } +#[derive(PartialEq, Eq, Hash)] +struct ExternalConstraintsInterned(ExternalConstraintsData<'static>); + +impl_internable!(gc; ExternalConstraintsInterned); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; + impl<'db> ExternalConstraints<'db> { - pub fn new(interner: DbInterner<'db>, data: ExternalConstraintsData<'db>) -> Self { - ExternalConstraints::new_(interner.db(), data) + #[inline] + pub fn new(_interner: DbInterner<'db>, data: ExternalConstraintsData<'db>) -> Self { + let data = unsafe { + std::mem::transmute::, ExternalConstraintsData<'static>>( + data, + ) + }; + Self { interned: Interned::new_gc(ExternalConstraintsInterned(data)) } } + #[inline] pub fn inner(&self) -> &ExternalConstraintsData<'db> { - crate::with_attached_db(|db| { - let inner = self.kind_(db); - // SAFETY: ¯\_(ツ)_/¯ - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { + std::mem::transmute::<&ExternalConstraintsData<'static>, &ExternalConstraintsData<'db>>( + inner, + ) + } } } @@ -43,6 +70,12 @@ impl<'db> std::ops::Deref for ExternalConstraints<'db> { } } +impl std::fmt::Debug for ExternalConstraints<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.inner().fmt(f) + } +} + impl<'db> rustc_type_ir::TypeVisitable> for ExternalConstraints<'db> { fn visit_with>>( &self, diff --git a/crates/hir-ty/src/next_solver/predicate.rs b/crates/hir-ty/src/next_solver/predicate.rs index 71d9b0b55a30..95b48c73c12f 100644 --- a/crates/hir-ty/src/next_solver/predicate.rs +++ b/crates/hir-ty/src/next_solver/predicate.rs @@ -2,20 +2,25 @@ use std::cmp::Ordering; -use macros::{TypeFoldable, TypeVisitable}; +use intern::{ + Interned, InternedRef, InternedSlice, InternedSliceRef, impl_internable, impl_slice_internable, +}; +use macros::{GenericTypeVisitable, TypeFoldable, TypeVisitable}; use rustc_type_ir::{ - self as ty, CollectAndApply, DebruijnIndex, EarlyBinder, FlagComputation, Flags, - PredicatePolarity, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, - TypeVisitable, Upcast, UpcastFrom, WithCachedTypeInfo, + self as ty, CollectAndApply, EarlyBinder, FlagComputation, Flags, GenericTypeVisitable, + PredicatePolarity, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, Upcast, + UpcastFrom, WithCachedTypeInfo, elaborate::Elaboratable, error::{ExpectedFound, TypeError}, inherent::{IntoKind, SliceLike}, }; -use smallvec::SmallVec; -use crate::next_solver::{GenericArg, InternedWrapperNoDebug, TraitIdWrapper}; +use crate::next_solver::{ + GenericArg, TraitIdWrapper, impl_foldable_for_interned_slice, impl_stored_interned_slice, + interned_slice, +}; -use super::{Binder, BoundVarKinds, DbInterner, Region, Ty, interned_vec_db}; +use super::{Binder, BoundVarKinds, DbInterner, Region, Ty}; pub type BoundExistentialPredicate<'db> = Binder<'db, ExistentialPredicate<'db>>; @@ -68,7 +73,13 @@ fn stable_cmp_existential_predicate<'db>( (ExistentialPredicate::AutoTrait(_), _) => Ordering::Greater, } } -interned_vec_db!(BoundExistentialPredicates, BoundExistentialPredicate); +interned_slice!( + BoundExistentialPredicatesStorage, + BoundExistentialPredicates, + BoundExistentialPredicate<'db>, + BoundExistentialPredicate<'static>, +); +impl_foldable_for_interned_slice!(BoundExistentialPredicates); impl<'db> rustc_type_ir::inherent::BoundExistentialPredicates> for BoundExistentialPredicates<'db> @@ -82,7 +93,7 @@ impl<'db> rustc_type_ir::inherent::BoundExistentialPredicates> ) -> Option< rustc_type_ir::Binder, rustc_type_ir::ExistentialTraitRef>>, > { - self.inner()[0] + self[0] .map_bound(|this| match this { ExistentialPredicate::Trait(tr) => Some(tr), _ => None, @@ -166,74 +177,50 @@ impl<'db> rustc_type_ir::relate::Relate> for BoundExistentialPre }, ); - CollectAndApply::collect_and_apply(v, |g| { - BoundExistentialPredicates::new_from_iter(interner, g.iter().cloned()) - }) + BoundExistentialPredicates::new_from_iter(interner, v) } } -#[salsa::interned(constructor = new_, unsafe(non_update_types))] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Predicate<'db> { - #[returns(ref)] - kind_: InternedWrapperNoDebug>>>, + interned: InternedRef<'db, PredicateInterned>, } -impl<'db> std::fmt::Debug for Predicate<'db> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.inner().internee.fmt(f) - } -} +#[derive(PartialEq, Eq, Hash)] +struct PredicateInterned(WithCachedTypeInfo>>); -impl<'db> std::fmt::Debug - for InternedWrapperNoDebug>>> -{ - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "Binder<")?; - match self.0.internee.skip_binder() { - rustc_type_ir::PredicateKind::Clause(clause_kind) => { - write!(f, "{clause_kind:?}") - } - rustc_type_ir::PredicateKind::DynCompatible(trait_def_id) => { - write!(f, "the trait `{trait_def_id:?}` is dyn-compatible") - } - rustc_type_ir::PredicateKind::Subtype(subtype_predicate) => { - write!(f, "{subtype_predicate:?}") - } - rustc_type_ir::PredicateKind::Coerce(coerce_predicate) => { - write!(f, "{coerce_predicate:?}") - } - rustc_type_ir::PredicateKind::ConstEquate(c1, c2) => { - write!(f, "the constant `{c1:?}` equals `{c2:?}`") - } - rustc_type_ir::PredicateKind::Ambiguous => write!(f, "ambiguous"), - rustc_type_ir::PredicateKind::NormalizesTo(data) => write!(f, "{data:?}"), - rustc_type_ir::PredicateKind::AliasRelate(t1, t2, dir) => { - write!(f, "{t1:?} {dir:?} {t2:?}") - } - }?; - write!(f, ", [{:?}]>", self.0.internee.bound_vars())?; - Ok(()) - } -} +impl_internable!(gc; PredicateInterned); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; impl<'db> Predicate<'db> { - pub fn new(interner: DbInterner<'db>, kind: Binder<'db, PredicateKind<'db>>) -> Self { + pub fn new(_interner: DbInterner<'db>, kind: Binder<'db, PredicateKind<'db>>) -> Self { + let kind = unsafe { + std::mem::transmute::< + Binder<'db, PredicateKind<'db>>, + Binder<'static, PredicateKind<'static>>, + >(kind) + }; let flags = FlagComputation::for_predicate(kind); let cached = WithCachedTypeInfo { internee: kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; - Predicate::new_(interner.db(), InternedWrapperNoDebug(cached)) + Self { interned: Interned::new_gc(PredicateInterned(cached)) } } pub fn inner(&self) -> &WithCachedTypeInfo>> { - crate::with_attached_db(|db| { - let inner = &self.kind_(db).0; - // SAFETY: The caller already has access to a `Predicate<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { + std::mem::transmute::< + &WithCachedTypeInfo>>, + &WithCachedTypeInfo>>, + >(inner) + } } /// Flips the polarity of a Predicate. @@ -259,110 +246,132 @@ impl<'db> Predicate<'db> { } } -// FIXME: should make a "header" in interned_vec - -#[derive(Debug, Clone)] -pub struct InternedClausesWrapper<'db>(SmallVec<[Clause<'db>; 2]>, TypeFlags, DebruijnIndex); - -impl<'db> PartialEq for InternedClausesWrapper<'db> { - fn eq(&self, other: &Self) -> bool { - self.0.eq(&other.0) +impl<'db> std::fmt::Debug for Predicate<'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.kind().fmt(f) } } -impl<'db> Eq for InternedClausesWrapper<'db> {} +impl_slice_internable!(gc; ClausesStorage, WithCachedTypeInfo<()>, Clause<'static>); +impl_stored_interned_slice!(ClausesStorage, Clauses, StoredClauses); -impl<'db> std::hash::Hash for InternedClausesWrapper<'db> { - fn hash(&self, state: &mut H) { - self.0.hash(state) - } +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct Clauses<'db> { + interned: InternedSliceRef<'db, ClausesStorage>, } -#[salsa::interned(constructor = new_, unsafe(non_update_types))] -pub struct Clauses<'db> { - #[returns(ref)] - inner_: InternedClausesWrapper<'db>, +impl<'db> std::fmt::Debug for Clauses<'db> { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_slice().fmt(fmt) + } } impl<'db> Clauses<'db> { - pub fn new_from_iter( - interner: DbInterner<'db>, - data: impl IntoIterator>, - ) -> Self { - let clauses: SmallVec<_> = data.into_iter().collect(); - let flags = FlagComputation::>::for_clauses(&clauses); - let wrapper = InternedClausesWrapper(clauses, flags.flags, flags.outer_exclusive_binder); - Clauses::new_(interner.db(), wrapper) - } - - pub fn inner(&self) -> &InternedClausesWrapper<'db> { - crate::with_attached_db(|db| { - let inner = self.inner_(db); - // SAFETY: The caller already has access to a `Clauses<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + #[inline] + pub fn empty(_interner: DbInterner<'db>) -> Self { + // FIXME: Get from a static. + Self::new_from_slice(&[]) + } + + #[inline] + pub fn new_from_slice(slice: &[Clause<'db>]) -> Self { + let slice = unsafe { ::std::mem::transmute::<&[Clause<'db>], &[Clause<'static>]>(slice) }; + let flags = FlagComputation::>::for_clauses(slice); + let flags = WithCachedTypeInfo { + internee: (), + flags: flags.flags, + outer_exclusive_binder: flags.outer_exclusive_binder, + }; + Self { interned: InternedSlice::from_header_and_slice(flags, slice) } + } + + #[inline] + pub fn new_from_iter(_interner: DbInterner<'db>, args: I) -> T::Output + where + I: IntoIterator, + T: CollectAndApply, Self>, + { + CollectAndApply::collect_and_apply(args.into_iter(), Self::new_from_slice) + } + + #[inline] + pub fn as_slice(self) -> &'db [Clause<'db>] { + let slice = &self.interned.get().slice; + unsafe { ::std::mem::transmute::<&[Clause<'static>], &[Clause<'db>]>(slice) } + } + + #[inline] + pub fn iter(self) -> ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>> { + self.as_slice().iter().copied() + } + + #[inline] + pub fn len(self) -> usize { + self.as_slice().len() + } + + #[inline] + pub fn is_empty(self) -> bool { + self.as_slice().is_empty() } } -impl<'db> std::fmt::Debug for Clauses<'db> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.inner().0.fmt(f) +impl<'db> IntoIterator for Clauses<'db> { + type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>>; + type Item = Clause<'db>; + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.iter() } } -impl<'db> rustc_type_ir::inherent::Clauses> for Clauses<'db> {} +impl<'db> std::ops::Deref for Clauses<'db> { + type Target = [Clause<'db>]; + + #[inline] + fn deref(&self) -> &Self::Target { + (*self).as_slice() + } +} impl<'db> rustc_type_ir::inherent::SliceLike for Clauses<'db> { type Item = Clause<'db>; - type IntoIter = ; 2]> as IntoIterator>::IntoIter; + type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>>; + #[inline] fn iter(self) -> Self::IntoIter { - self.inner().0.clone().into_iter() + self.iter() } + #[inline] fn as_slice(&self) -> &[Self::Item] { - self.inner().0.as_slice() - } -} - -impl<'db> IntoIterator for Clauses<'db> { - type Item = Clause<'db>; - type IntoIter = ::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - rustc_type_ir::inherent::SliceLike::iter(self) + (*self).as_slice() } } impl<'db> Default for Clauses<'db> { + #[inline] fn default() -> Self { - Clauses::new_from_iter(DbInterner::conjure(), []) + Clauses::empty(DbInterner::conjure()) } } +impl<'db> rustc_type_ir::inherent::Clauses> for Clauses<'db> {} + impl<'db> rustc_type_ir::TypeSuperFoldable> for Clauses<'db> { fn try_super_fold_with>>( self, folder: &mut F, ) -> Result { - let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len()); - for c in self { - clauses.push(c.try_fold_with(folder)?); - } - Ok(Clauses::new_from_iter(folder.cx(), clauses)) + Clauses::new_from_iter(folder.cx(), self.iter().map(|clause| clause.try_fold_with(folder))) } fn super_fold_with>>( self, folder: &mut F, ) -> Self { - let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len()); - for c in self { - clauses.push(c.fold_with(folder)); - } - Clauses::new_from_iter(folder.cx(), clauses) + Clauses::new_from_iter(folder.cx(), self.iter().map(|clause| clause.fold_with(folder))) } } @@ -371,15 +380,10 @@ impl<'db> rustc_type_ir::TypeFoldable> for Clauses<'db> { self, folder: &mut F, ) -> Result { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.try_fold_with(folder)).collect::>()?; - Ok(Clauses::new_from_iter(folder.cx(), inner)) + self.try_super_fold_with(folder) } fn fold_with>>(self, folder: &mut F) -> Self { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = self.iter().map(|v| v.fold_with(folder)).collect(); - Clauses::new_from_iter(folder.cx(), inner) + self.super_fold_with(folder) } } @@ -389,19 +393,27 @@ impl<'db> rustc_type_ir::TypeVisitable> for Clauses<'db> { visitor: &mut V, ) -> V::Result { use rustc_ast_ir::visit::VisitorResult; - use rustc_type_ir::inherent::SliceLike as _; - rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter()); + rustc_ast_ir::walk_visitable_list!(visitor, self.iter()); V::Result::output() } } +impl<'db, V: super::WorldExposer> rustc_type_ir::GenericTypeVisitable for Clauses<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + visitor.on_interned_slice(self.interned); + self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor)); + } +} + impl<'db> rustc_type_ir::Flags for Clauses<'db> { + #[inline] fn flags(&self) -> rustc_type_ir::TypeFlags { - self.inner().1 + self.interned.header.header.flags } + #[inline] fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { - self.inner().2 + self.interned.header.header.outer_exclusive_binder } } @@ -414,7 +426,7 @@ impl<'db> rustc_type_ir::TypeSuperVisitable> for Clauses<'db> { } } -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] // TODO implement Debug by hand +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, GenericTypeVisitable)] // TODO implement Debug by hand pub struct Clause<'db>(pub(crate) Predicate<'db>); // We could cram the reveal into the clauses like rustc does, probably @@ -425,7 +437,7 @@ pub struct ParamEnv<'db> { impl<'db> ParamEnv<'db> { pub fn empty() -> Self { - ParamEnv { clauses: Clauses::new_from_iter(DbInterner::conjure(), []) } + ParamEnv { clauses: Clauses::empty(DbInterner::conjure()) } } pub fn clauses(self) -> Clauses<'db> { @@ -460,6 +472,13 @@ impl<'db> TypeVisitable> for Predicate<'db> { } } +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Predicate<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + visitor.on_interned(self.interned); + self.kind().generic_visit_with(visitor); + } +} + impl<'db> TypeSuperVisitable> for Predicate<'db> { fn super_visit_with>>( &self, diff --git a/crates/hir-ty/src/next_solver/region.rs b/crates/hir-ty/src/next_solver/region.rs index 3be715ab07d3..dee7953ae36c 100644 --- a/crates/hir-ty/src/next_solver/region.rs +++ b/crates/hir-ty/src/next_solver/region.rs @@ -1,47 +1,50 @@ //! Things related to regions. use hir_def::LifetimeParamId; -use intern::Symbol; +use intern::{Interned, InternedRef, Symbol, impl_internable}; use rustc_type_ir::{ - BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, INNERMOST, RegionVid, TypeFlags, - TypeFoldable, TypeVisitable, + BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, GenericTypeVisitable, INNERMOST, RegionVid, + TypeFlags, TypeFoldable, TypeVisitable, inherent::{IntoKind, PlaceholderLike, SliceLike}, relate::Relate, }; -use crate::next_solver::{GenericArg, OutlivesPredicate}; +use crate::next_solver::{ + GenericArg, OutlivesPredicate, impl_foldable_for_interned_slice, interned_slice, +}; use super::{ - ErrorGuaranteed, SolverDefId, interned_vec_db, + ErrorGuaranteed, SolverDefId, interner::{BoundVarKind, DbInterner, Placeholder}, }; pub type RegionKind<'db> = rustc_type_ir::RegionKind>; -#[salsa::interned(constructor = new_, unsafe(non_update_types))] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Region<'db> { - #[returns(ref)] - kind_: RegionKind<'db>, + pub(super) interned: InternedRef<'db, RegionInterned>, } -impl std::fmt::Debug for Region<'_> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.kind().fmt(f) - } -} +#[derive(PartialEq, Eq, Hash)] +#[repr(align(4))] // Required for `GenericArg` bit-tagging. +pub(super) struct RegionInterned(RegionKind<'static>); + +impl_internable!(gc; RegionInterned); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; impl<'db> Region<'db> { - pub fn new(interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self { - Region::new_(interner.db(), kind) + pub fn new(_interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self { + let kind = unsafe { std::mem::transmute::, RegionKind<'static>>(kind) }; + Self { interned: Interned::new_gc(RegionInterned(kind)) } } pub fn inner(&self) -> &RegionKind<'db> { - crate::with_attached_db(|db| { - let inner = self.kind_(db); - // SAFETY: The caller already has access to a `Region<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute::<&RegionKind<'_>, &RegionKind<'db>>(inner) } - }) + let inner = &self.interned.0; + unsafe { std::mem::transmute::<&RegionKind<'static>, &RegionKind<'db>>(inner) } } pub fn new_early_param( @@ -256,6 +259,12 @@ impl BoundRegionKind { } } +impl std::fmt::Debug for Region<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.kind().fmt(f) + } +} + impl<'db> IntoKind for Region<'db> { type Kind = RegionKind<'db>; @@ -377,6 +386,19 @@ impl<'db> PlaceholderLike> for PlaceholderRegion { } } +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Region<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + visitor.on_interned(self.interned); + self.kind().generic_visit_with(visitor); + } +} + type GenericArgOutlivesPredicate<'db> = OutlivesPredicate<'db, GenericArg<'db>>; -interned_vec_db!(RegionAssumptions, GenericArgOutlivesPredicate); +interned_slice!( + RegionAssumptionsStorage, + RegionAssumptions, + GenericArgOutlivesPredicate<'db>, + GenericArgOutlivesPredicate<'static>, +); +impl_foldable_for_interned_slice!(RegionAssumptions); diff --git a/crates/hir-ty/src/next_solver/solver.rs b/crates/hir-ty/src/next_solver/solver.rs index 40a3f17cf169..d800925ba4e9 100644 --- a/crates/hir-ty/src/next_solver/solver.rs +++ b/crates/hir-ty/src/next_solver/solver.rs @@ -5,7 +5,7 @@ use rustc_next_trait_solver::delegate::SolverDelegate; use rustc_type_ir::{ AliasTyKind, GenericArgKind, InferCtxtLike, Interner, PredicatePolarity, TypeFlags, TypeVisitableExt, - inherent::{IntoKind, SliceLike, Term as _, Ty as _}, + inherent::{IntoKind, Term as _, Ty as _}, lang_items::SolverTraitLangItem, solve::{Certainty, NoSolution}, }; diff --git a/crates/hir-ty/src/next_solver/ty.rs b/crates/hir-ty/src/next_solver/ty.rs index 7dcb8567c2e4..305da50b6d9f 100644 --- a/crates/hir-ty/src/next_solver/ty.rs +++ b/crates/hir-ty/src/next_solver/ty.rs @@ -7,13 +7,14 @@ use hir_def::{ hir::generics::{TypeOrConstParamData, TypeParamProvenance}, }; use hir_def::{TraitId, type_ref::Rawness}; +use intern::{Interned, InternedRef, impl_internable}; use rustc_abi::{Float, Integer, Size}; use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult}; use rustc_type_ir::{ AliasTyKind, BoundVar, BoundVarIndexKind, ClosureKind, CoroutineArgs, CoroutineArgsParts, - DebruijnIndex, FlagComputation, Flags, FloatTy, FloatVid, InferTy, IntTy, IntVid, Interner, - TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, - TypeVisitor, UintTy, Upcast, WithCachedTypeInfo, + DebruijnIndex, FlagComputation, Flags, FloatTy, FloatVid, GenericTypeVisitable, InferTy, IntTy, + IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, + TypeVisitableExt, TypeVisitor, UintTy, Upcast, WithCachedTypeInfo, inherent::{ AdtDef as _, BoundExistentialPredicates, BoundVarLike, Const as _, GenericArgs as _, IntoKind, ParamLike, PlaceholderLike, Safety as _, SliceLike, Ty as _, @@ -28,15 +29,16 @@ use crate::{ lower::GenericPredicates, next_solver::{ AdtDef, AliasTy, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const, - CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper, + CoroutineIdWrapper, FnSig, GenericArgKind, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper, abi::Safety, - interner::InternedWrapperNoDebug, + impl_foldable_for_interned_slice, impl_stored_interned, impl_stored_interned_slice, + interned_slice, util::{CoroutineArgsExt, IntegerTypeExt}, }, }; use super::{ - BoundVarKind, DbInterner, GenericArgs, Placeholder, SolverDefId, interned_vec_db, + BoundVarKind, DbInterner, GenericArgs, Placeholder, SolverDefId, util::{FloatExt, IntegerExt}, }; @@ -44,35 +46,45 @@ pub type SimplifiedType = rustc_type_ir::fast_reject::SimplifiedType = rustc_type_ir::TyKind>; pub type FnHeader<'db> = rustc_type_ir::FnHeader>; -#[salsa::interned(constructor = new_, unsafe(non_update_types))] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Ty<'db> { - #[returns(ref)] - kind_: InternedWrapperNoDebug>>, + pub(super) interned: InternedRef<'db, TyInterned>, } +#[derive(PartialEq, Eq, Hash)] +#[repr(align(4))] // Required for `GenericArg` bit-tagging. +pub(super) struct TyInterned(WithCachedTypeInfo>); + +impl_internable!(gc; TyInterned); +impl_stored_interned!(TyInterned, Ty, StoredTy); + const _: () = { const fn is_copy() {} is_copy::>(); }; impl<'db> Ty<'db> { - pub fn new(interner: DbInterner<'db>, kind: TyKind<'db>) -> Self { + #[inline] + pub fn new(_interner: DbInterner<'db>, kind: TyKind<'db>) -> Self { + let kind = unsafe { std::mem::transmute::, TyKind<'static>>(kind) }; let flags = FlagComputation::for_kind(&kind); let cached = WithCachedTypeInfo { internee: kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; - Ty::new_(interner.db(), InternedWrapperNoDebug(cached)) + Self { interned: Interned::new_gc(TyInterned(cached)) } } + #[inline] pub fn inner(&self) -> &WithCachedTypeInfo> { - crate::with_attached_db(|db| { - let inner = &self.kind_(db).0; - // SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { + std::mem::transmute::< + &WithCachedTypeInfo>, + &WithCachedTypeInfo>, + >(inner) + } } pub fn new_adt(interner: DbInterner<'db>, adt_id: AdtId, args: GenericArgs<'db>) -> Self { @@ -383,7 +395,7 @@ impl<'db> Ty<'db> { #[inline] pub fn is_unit(self) -> bool { - matches!(self.kind(), TyKind::Tuple(tys) if tys.inner().is_empty()) + matches!(self.kind(), TyKind::Tuple(tys) if tys.is_empty()) } #[inline] @@ -661,12 +673,9 @@ impl<'db> Ty<'db> { // This is only used by type walking. // Parameters will be walked outside, and projection predicate is not used. // So just provide the Future trait. - let impl_bound = TraitRef::new( - interner, - future_trait.into(), - GenericArgs::new_from_iter(interner, []), - ) - .upcast(interner); + let impl_bound = + TraitRef::new(interner, future_trait.into(), GenericArgs::empty(interner)) + .upcast(interner); Some(vec![impl_bound]) } else { None @@ -730,20 +739,22 @@ impl<'db> std::fmt::Debug for Ty<'db> { } } -impl<'db> std::fmt::Debug for InternedWrapperNoDebug>> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.internee.fmt(f) - } -} - impl<'db> IntoKind for Ty<'db> { type Kind = TyKind<'db>; + #[inline] fn kind(self) -> Self::Kind { self.inner().internee } } +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Ty<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + visitor.on_interned(self.interned); + self.kind().generic_visit_with(visitor); + } +} + impl<'db> TypeVisitable> for Ty<'db> { fn visit_with>>( &self, @@ -1068,9 +1079,9 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { // to unnecessary overflows in async code. See the issue: // . let coroutine_args = interner.mk_args_from_iter(coroutine_args.iter().map(|arg| { - match arg { - GenericArg::Ty(_) | GenericArg::Const(_) => arg, - GenericArg::Lifetime(_) => { + match arg.kind() { + GenericArgKind::Type(_) | GenericArgKind::Const(_) => arg, + GenericArgKind::Lifetime(_) => { crate::next_solver::Region::new(interner, rustc_type_ir::RegionKind::ReErased) .into() } @@ -1254,10 +1265,13 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { } } -interned_vec_db!(Tys, Ty); +interned_slice!(TysStorage, Tys, Ty<'db>, Ty<'static>); +impl_foldable_for_interned_slice!(Tys); +impl_stored_interned_slice!(TysStorage, Tys, StoredTys); impl<'db> Tys<'db> { - pub fn inputs(&self) -> &[Ty<'db>] { + #[inline] + pub fn inputs(self) -> &'db [Ty<'db>] { self.as_slice().split_last().unwrap().1 } } @@ -1323,6 +1337,10 @@ pub enum BoundTyKind { #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub struct ErrorGuaranteed; +impl GenericTypeVisitable for ErrorGuaranteed { + fn generic_visit_with(&self, _visitor: &mut V) {} +} + impl<'db> TypeVisitable> for ErrorGuaranteed { fn visit_with>>( &self, diff --git a/crates/hir-ty/src/opaques.rs b/crates/hir-ty/src/opaques.rs index 76a6207e2338..27ae5e39d55b 100644 --- a/crates/hir-ty/src/opaques.rs +++ b/crates/hir-ty/src/opaques.rs @@ -13,7 +13,7 @@ use crate::{ db::{HirDatabase, InternedOpaqueTyId}, lower::{ImplTraitIdx, ImplTraits}, next_solver::{ - DbInterner, EarlyBinder, ErrorGuaranteed, SolverDefId, Ty, TypingMode, + DbInterner, ErrorGuaranteed, SolverDefId, StoredEarlyBinder, StoredTy, Ty, TypingMode, infer::{DbInternerInferExt, traits::ObligationCause}, obligation_ctxt::ObligationCtxt, }, @@ -72,10 +72,10 @@ pub(crate) fn opaque_types_defined_by( // FIXME: Collect opaques from `#[define_opaque]`. - fn extend_with_opaques<'db>( - db: &'db dyn HirDatabase, - opaques: &Option>>>, - mut make_impl_trait: impl FnMut(ImplTraitIdx<'db>) -> ImplTraitId<'db>, + fn extend_with_opaques( + db: &dyn HirDatabase, + opaques: &Option>>, + mut make_impl_trait: impl FnMut(ImplTraitIdx) -> ImplTraitId, result: &mut Vec, ) { if let Some(opaques) = opaques { @@ -89,25 +89,25 @@ pub(crate) fn opaque_types_defined_by( // These are firewall queries to prevent drawing dependencies between infers: -#[salsa::tracked(returns(ref), unsafe(non_update_types))] +#[salsa::tracked(returns(ref))] pub(crate) fn rpit_hidden_types<'db>( db: &'db dyn HirDatabase, function: FunctionId, -) -> ArenaMap, EarlyBinder<'db, Ty<'db>>> { +) -> ArenaMap> { let infer = InferenceResult::for_body(db, function.into()); let mut result = ArenaMap::new(); for (opaque, hidden_type) in infer.return_position_impl_trait_types(db) { - result.insert(opaque, EarlyBinder::bind(hidden_type)); + result.insert(opaque, StoredEarlyBinder::bind(hidden_type.store())); } result.shrink_to_fit(); result } -#[salsa::tracked(returns(ref), unsafe(non_update_types))] +#[salsa::tracked(returns(ref))] pub(crate) fn tait_hidden_types<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, -) -> ArenaMap, EarlyBinder<'db, Ty<'db>>> { +) -> ArenaMap> { // Call this first, to not perform redundant work if there are no TAITs. let Some(taits_count) = ImplTraits::type_alias_impl_traits(db, type_alias) .as_deref() @@ -129,7 +129,7 @@ pub(crate) fn tait_hidden_types<'db>( let mut result = ArenaMap::with_capacity(taits_count); for defining_body in defining_bodies { let infer = InferenceResult::for_body(db, defining_body); - for (&opaque, &hidden_type) in &infer.type_of_opaque { + for (&opaque, hidden_type) in &infer.type_of_opaque { let ImplTraitId::TypeAliasImplTrait(opaque_owner, opaque_idx) = opaque.loc(db) else { continue; }; @@ -138,13 +138,18 @@ pub(crate) fn tait_hidden_types<'db>( } // In the presence of errors, we attempt to create a unified type from all // types. rustc doesn't do that, but this should improve the experience. - let hidden_type = infcx.insert_type_vars(hidden_type); + let hidden_type = infcx.insert_type_vars(hidden_type.as_ref()); match result.entry(opaque_idx) { la_arena::Entry::Vacant(entry) => { - entry.insert(EarlyBinder::bind(hidden_type)); + entry.insert(StoredEarlyBinder::bind(hidden_type.store())); } la_arena::Entry::Occupied(entry) => { - _ = ocx.eq(&cause, param_env, entry.get().instantiate_identity(), hidden_type); + _ = ocx.eq( + &cause, + param_env, + entry.get().get().instantiate_identity(), + hidden_type, + ); } } } @@ -157,12 +162,15 @@ pub(crate) fn tait_hidden_types<'db>( let idx = la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx as u32)); match result.entry(idx) { la_arena::Entry::Vacant(entry) => { - entry.insert(EarlyBinder::bind(Ty::new_error(interner, ErrorGuaranteed))); + entry.insert(StoredEarlyBinder::bind( + Ty::new_error(interner, ErrorGuaranteed).store(), + )); } la_arena::Entry::Occupied(mut entry) => { - *entry.get_mut() = entry.get().map_bound(|hidden_type| { - infcx.resolve_vars_if_possible(hidden_type).replace_infer_with_error(interner) - }); + let hidden_type = entry.get().get().skip_binder(); + let hidden_type = + infcx.resolve_vars_if_possible(hidden_type).replace_infer_with_error(interner); + *entry.get_mut() = StoredEarlyBinder::bind(hidden_type.store()); } } } diff --git a/crates/hir-ty/src/specialization.rs b/crates/hir-ty/src/specialization.rs index 4164e39e6592..d97a35549ca4 100644 --- a/crates/hir-ty/src/specialization.rs +++ b/crates/hir-ty/src/specialization.rs @@ -2,7 +2,6 @@ use hir_def::{HasModule, ImplId, nameres::crate_def_map}; use intern::sym; -use rustc_type_ir::inherent::SliceLike; use tracing::debug; use crate::{ diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index a31353f1e336..67ab89f5ec8f 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -149,9 +149,10 @@ fn check_impl( let (body, body_source_map) = db.body_with_source_map(def); let inference_result = InferenceResult::for_body(&db, def); - for (pat, mut ty) in inference_result.type_of_pat.iter() { + for (pat, ty) in inference_result.type_of_pat.iter() { + let mut ty = ty.as_ref(); if let Pat::Bind { id, .. } = body[pat] { - ty = &inference_result.type_of_binding[id]; + ty = inference_result.type_of_binding[id].as_ref(); } let node = match pat_node(&body_source_map, pat, &db) { Some(value) => value, @@ -169,6 +170,7 @@ fn check_impl( } for (expr, ty) in inference_result.type_of_expr.iter() { + let ty = ty.as_ref(); let node = match expr_node(&body_source_map, expr, &db) { Some(value) => value, None => continue, @@ -209,8 +211,8 @@ fn check_impl( let range = node.as_ref().original_file_range_rooted(&db); let actual = format!( "expected {}, got {}", - mismatch.expected.display_test(&db, display_target), - mismatch.actual.display_test(&db, display_target) + mismatch.expected.as_ref().display_test(&db, display_target), + mismatch.actual.as_ref().display_test(&db, display_target) ); match mismatches.remove(&range) { Some(annotation) => assert_eq!(actual, annotation), @@ -318,20 +320,20 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { crate::attach_db(&db, || { let mut buf = String::new(); - let mut infer_def = |inference_result: &InferenceResult<'_>, + let mut infer_def = |inference_result: &InferenceResult, body: Arc, body_source_map: Arc, krate: Crate| { let display_target = DisplayTarget::from_crate(&db, krate); - let mut types: Vec<(InFile, &Ty<'_>)> = Vec::new(); - let mut mismatches: Vec<(InFile, &TypeMismatch<'_>)> = Vec::new(); + let mut types: Vec<(InFile, Ty<'_>)> = Vec::new(); + let mut mismatches: Vec<(InFile, &TypeMismatch)> = Vec::new(); if let Some(self_param) = body.self_param { let ty = &inference_result.type_of_binding[self_param]; if let Some(syntax_ptr) = body_source_map.self_param_syntax() { let root = db.parse_or_expand(syntax_ptr.file_id); let node = syntax_ptr.map(|ptr| ptr.to_node(&root).syntax().clone()); - types.push((node, ty)); + types.push((node, ty.as_ref())); } } @@ -346,7 +348,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { } Err(SyntheticSyntax) => continue, }; - types.push((node.clone(), ty)); + types.push((node.clone(), ty.as_ref())); if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) { mismatches.push((node, mismatch)); } @@ -360,7 +362,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { } Err(SyntheticSyntax) => continue, }; - types.push((node.clone(), ty)); + types.push((node.clone(), ty.as_ref())); if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) { mismatches.push((node, mismatch)); } @@ -401,8 +403,8 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { "{}{:?}: expected {}, got {}\n", macro_prefix, range, - mismatch.expected.display_test(&db, display_target), - mismatch.actual.display_test(&db, display_target), + mismatch.expected.as_ref().display_test(&db, display_target), + mismatch.actual.as_ref().display_test(&db, display_target), ); } } diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs index ff55ff54cefb..3bdc72d01500 100644 --- a/crates/hir-ty/src/tests/closure_captures.rs +++ b/crates/hir-ty/src/tests/closure_captures.rs @@ -74,6 +74,7 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec let place = capture.display_place(closure.0, db); let capture_ty = capture .ty + .get() .skip_binder() .display_test(db, DisplayTarget::from_crate(db, module.krate(db))) .to_string(); diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs index 7b0c1d35442e..6558d2179fba 100644 --- a/crates/hir-ty/src/tests/incremental.rs +++ b/crates/hir-ty/src/tests/incremental.rs @@ -28,7 +28,7 @@ fn foo() -> i32 { } }); }, - &[("InferenceResult < 'db >::for_body_", 1)], + &[("InferenceResult::for_body_", 1)], expect_test::expect![[r#" [ "crate_local_def_map", @@ -36,17 +36,17 @@ fn foo() -> i32 { "ast_id_map_shim", "parse_shim", "real_span_map_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", "AttrFlags::query_", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", + "trait_environment_query", "lang_items", "crate_lang_items", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", ] "#]], @@ -72,7 +72,7 @@ fn foo() -> i32 { } }); }, - &[("InferenceResult < 'db >::for_body_", 0)], + &[("InferenceResult::for_body_", 0)], expect_test::expect![[r#" [ "parse_shim", @@ -115,7 +115,7 @@ fn baz() -> i32 { } }); }, - &[("InferenceResult < 'db >::for_body_", 3)], + &[("InferenceResult::for_body_", 3)], expect_test::expect![[r#" [ "crate_local_def_map", @@ -123,37 +123,37 @@ fn baz() -> i32 { "ast_id_map_shim", "parse_shim", "real_span_map_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", "AttrFlags::query_", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", + "trait_environment_query", "lang_items", "crate_lang_items", "AttrFlags::query_", "AttrFlags::query_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "trait_environment_query", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "trait_environment_query", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", ] "#]], @@ -184,7 +184,7 @@ fn baz() -> i32 { } }); }, - &[("InferenceResult < 'db >::for_body_", 1)], + &[("InferenceResult::for_body_", 1)], expect_test::expect![[r#" [ "parse_shim", @@ -202,7 +202,7 @@ fn baz() -> i32 { "function_signature_shim", "body_with_source_map_shim", "body_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "expr_scopes_shim", "function_signature_with_source_map_shim", "function_signature_shim", @@ -502,7 +502,7 @@ impl SomeStruct { "crate_local_def_map", "TraitImpls::for_crate_", "AttrFlags::query_", - "impl_trait_with_diagnostics_shim", + "impl_trait_with_diagnostics_query", "impl_signature_shim", "impl_signature_with_source_map_shim", "lang_items", @@ -512,7 +512,7 @@ impl SomeStruct { "AttrFlags::query_", "AttrFlags::query_", "AttrFlags::query_", - "impl_self_ty_with_diagnostics_shim", + "impl_self_ty_with_diagnostics_query", "struct_signature_shim", "struct_signature_with_source_map_shim", ] @@ -574,7 +574,7 @@ fn main() { "body_with_source_map_shim", "AttrFlags::query_", "ImplItems::of_", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "trait_signature_shim", "trait_signature_with_source_map_shim", "AttrFlags::query_", @@ -583,36 +583,36 @@ fn main() { "AttrFlags::query_", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", + "trait_environment_query", "lang_items", "crate_lang_items", "AttrFlags::query_", "AttrFlags::query_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", - "InferenceResult < 'db >::for_body_", + "GenericPredicates::query_with_diagnostics_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", - "trait_environment_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "trait_environment_query", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", "struct_signature_shim", "struct_signature_with_source_map_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "value_ty_shim", + "GenericPredicates::query_with_diagnostics_", + "value_ty_query", "VariantFields::firewall_", "VariantFields::query_", "InherentImpls::for_crate_", "impl_signature_shim", "impl_signature_with_source_map_shim", - "callable_item_signature_shim", + "callable_item_signature_query", "TraitImpls::for_crate_and_deps_", "TraitImpls::for_crate_", - "impl_trait_with_diagnostics_shim", - "impl_self_ty_with_diagnostics_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", + "impl_trait_with_diagnostics_query", + "impl_self_ty_with_diagnostics_query", + "GenericPredicates::query_with_diagnostics_", ] "#]], ); @@ -671,7 +671,7 @@ fn main() { "AttrFlags::query_", "body_shim", "ImplItems::of_", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "AttrFlags::query_", "trait_signature_with_source_map_shim", "AttrFlags::query_", @@ -683,25 +683,25 @@ fn main() { "AttrFlags::query_", "AttrFlags::query_", "AttrFlags::query_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", - "InferenceResult < 'db >::for_body_", + "GenericPredicates::query_with_diagnostics_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", + "InferenceResult::for_body_", "function_signature_with_source_map_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", "struct_signature_with_source_map_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", + "GenericPredicates::query_with_diagnostics_", "VariantFields::query_", "InherentImpls::for_crate_", "impl_signature_with_source_map_shim", "impl_signature_shim", - "callable_item_signature_shim", + "callable_item_signature_query", "TraitImpls::for_crate_", - "impl_trait_with_diagnostics_shim", - "impl_self_ty_with_diagnostics_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", + "impl_trait_with_diagnostics_query", + "impl_self_ty_with_diagnostics_query", + "GenericPredicates::query_with_diagnostics_", ] "#]], ); diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index 2f8c31ec6074..ad8de293d5a5 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs @@ -21,8 +21,8 @@ use rustc_type_ir::{ use crate::{ db::HirDatabase, next_solver::{ - Canonical, DbInterner, GenericArgs, Goal, ParamEnv, Predicate, SolverContext, Span, Ty, - TyKind, + Canonical, DbInterner, GenericArgs, Goal, ParamEnv, Predicate, SolverContext, Span, + StoredClauses, Ty, TyKind, infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause}, obligation_ctxt::ObligationCtxt, }, @@ -35,6 +35,31 @@ pub struct ParamEnvAndCrate<'db> { pub krate: Crate, } +impl<'db> ParamEnvAndCrate<'db> { + #[inline] + pub fn store(self) -> StoredParamEnvAndCrate { + StoredParamEnvAndCrate { param_env: self.param_env.clauses.store(), krate: self.krate } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct StoredParamEnvAndCrate { + param_env: StoredClauses, + pub krate: Crate, +} + +impl StoredParamEnvAndCrate { + #[inline] + pub fn param_env(&self) -> ParamEnv<'_> { + ParamEnv { clauses: self.param_env.as_ref() } + } + + #[inline] + pub fn as_ref(&self) -> ParamEnvAndCrate<'_> { + ParamEnvAndCrate { param_env: self.param_env(), krate: self.krate } + } +} + /// This should be used in `hir` only. pub fn structurally_normalize_ty<'db>( infcx: &InferCtxt<'db>, diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs index b32180d41247..bfb9a3f77e94 100644 --- a/crates/hir-ty/src/variance.rs +++ b/crates/hir-ty/src/variance.rs @@ -17,7 +17,7 @@ use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId, signatures::Struct use rustc_ast_ir::Mutability; use rustc_type_ir::{ Variance, - inherent::{AdtDef, IntoKind, SliceLike}, + inherent::{AdtDef, IntoKind}, }; use stdx::never; @@ -25,12 +25,22 @@ use crate::{ db::HirDatabase, generics::{Generics, generics}, next_solver::{ - Const, ConstKind, DbInterner, ExistentialPredicate, GenericArg, GenericArgs, Region, - RegionKind, Term, Ty, TyKind, VariancesOf, + Const, ConstKind, DbInterner, ExistentialPredicate, GenericArgKind, GenericArgs, Region, + RegionKind, StoredVariancesOf, TermKind, Ty, TyKind, VariancesOf, }, }; pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> VariancesOf<'_> { + variances_of_query(db, def).as_ref() +} + +#[salsa::tracked( + returns(ref), + // cycle_fn = crate::variance::variances_of_cycle_fn, + // cycle_initial = crate::variance::variances_of_cycle_initial, + cycle_result = crate::variance::variances_of_cycle_initial, +)] +fn variances_of_query(db: &dyn HirDatabase, def: GenericDefId) -> StoredVariancesOf { tracing::debug!("variances_of(def={:?})", def); let interner = DbInterner::new_no_crate(db); match def { @@ -39,19 +49,19 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Variances if let AdtId::StructId(id) = adt { let flags = &db.struct_signature(id).flags; if flags.contains(StructFlags::IS_UNSAFE_CELL) { - return VariancesOf::new_from_iter(interner, [Variance::Invariant]); + return VariancesOf::new_from_iter(interner, [Variance::Invariant]).store(); } else if flags.contains(StructFlags::IS_PHANTOM_DATA) { - return VariancesOf::new_from_iter(interner, [Variance::Covariant]); + return VariancesOf::new_from_iter(interner, [Variance::Covariant]).store(); } } } - _ => return VariancesOf::new_from_iter(interner, []), + _ => return VariancesOf::empty(interner).store(), } let generics = generics(db, def); let count = generics.len(); if count == 0 { - return VariancesOf::new_from_iter(interner, []); + return VariancesOf::empty(interner).store(); } let mut variances = Context { generics, variances: vec![Variance::Bivariant; count], db }.solve(); @@ -71,7 +81,7 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Variances } } - VariancesOf::new_from_iter(interner, variances) + VariancesOf::new_from_iter(interner, variances).store() } // pub(crate) fn variances_of_cycle_fn( @@ -107,13 +117,13 @@ pub(crate) fn variances_of_cycle_initial( db: &dyn HirDatabase, _: salsa::Id, def: GenericDefId, -) -> VariancesOf<'_> { +) -> StoredVariancesOf { let interner = DbInterner::new_no_crate(db); let generics = generics(db, def); let count = generics.len(); // FIXME(next-solver): Returns `Invariance` and not `Bivariance` here, see the comment in the main query. - VariancesOf::new_from_iter(interner, std::iter::repeat_n(Variance::Invariant, count)) + VariancesOf::new_from_iter(interner, std::iter::repeat_n(Variance::Invariant, count)).store() } struct Context<'db> { @@ -131,7 +141,7 @@ impl<'db> Context<'db> { let mut add_constraints_from_variant = |variant| { for (_, field) in db.field_types(variant).iter() { self.add_constraints_from_ty( - field.instantiate_identity(), + field.get().instantiate_identity(), Variance::Covariant, ); } @@ -233,11 +243,11 @@ impl<'db> Context<'db> { } ExistentialPredicate::Projection(projection) => { self.add_constraints_from_invariant_args(projection.args); - match projection.term { - Term::Ty(ty) => { + match projection.term.kind() { + TermKind::Ty(ty) => { self.add_constraints_from_ty(ty, Variance::Invariant) } - Term::Const(konst) => self.add_constraints_from_const(konst), + TermKind::Const(konst) => self.add_constraints_from_const(konst), } } ExistentialPredicate::AutoTrait(_) => {} @@ -267,12 +277,12 @@ impl<'db> Context<'db> { fn add_constraints_from_invariant_args(&mut self, args: GenericArgs<'db>) { for k in args.iter() { - match k { - GenericArg::Lifetime(lt) => { + match k.kind() { + GenericArgKind::Lifetime(lt) => { self.add_constraints_from_region(lt, Variance::Invariant) } - GenericArg::Ty(ty) => self.add_constraints_from_ty(ty, Variance::Invariant), - GenericArg::Const(val) => self.add_constraints_from_const(val), + GenericArgKind::Type(ty) => self.add_constraints_from_ty(ty, Variance::Invariant), + GenericArgKind::Const(val) => self.add_constraints_from_const(val), } } } @@ -291,10 +301,12 @@ impl<'db> Context<'db> { let variances = self.db.variances_of(def_id); for (k, v) in args.iter().zip(variances) { - match k { - GenericArg::Lifetime(lt) => self.add_constraints_from_region(lt, variance.xform(v)), - GenericArg::Ty(ty) => self.add_constraints_from_ty(ty, variance.xform(v)), - GenericArg::Const(val) => self.add_constraints_from_const(val), + match k.kind() { + GenericArgKind::Lifetime(lt) => { + self.add_constraints_from_region(lt, variance.xform(v)) + } + GenericArgKind::Type(ty) => self.add_constraints_from_ty(ty, variance.xform(v)), + GenericArgKind::Const(val) => self.add_constraints_from_const(val), } } } @@ -388,7 +400,7 @@ mod tests { AdtId, GenericDefId, ModuleDefId, hir::generics::GenericParamDataRef, src::HasSource, }; use itertools::Itertools; - use rustc_type_ir::{Variance, inherent::SliceLike}; + use rustc_type_ir::Variance; use stdx::format_to; use syntax::{AstNode, ast::HasName}; use test_fixture::WithFixture; diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 6ef6ea272e58..050777a4806d 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -617,7 +617,7 @@ impl<'db> AnyDiagnostic<'db> { pub(crate) fn inference_diagnostic( db: &'db dyn HirDatabase, def: DefWithBodyId, - d: &InferenceDiagnostic<'db>, + d: &InferenceDiagnostic, source_map: &hir_def::expr_store::BodySourceMap, sig_map: &hir_def::expr_store::ExpressionStoreSourceMap, ) -> Option> { @@ -663,7 +663,8 @@ impl<'db> AnyDiagnostic<'db> { } InferenceDiagnostic::ExpectedFunction { call_expr, found } => { let call_expr = expr_syntax(*call_expr)?; - ExpectedFunction { call: call_expr, found: Type::new(db, def, *found) }.into() + ExpectedFunction { call: call_expr, found: Type::new(db, def, found.as_ref()) } + .into() } InferenceDiagnostic::UnresolvedField { expr, @@ -675,7 +676,7 @@ impl<'db> AnyDiagnostic<'db> { UnresolvedField { expr, name: name.clone(), - receiver: Type::new(db, def, *receiver), + receiver: Type::new(db, def, receiver.as_ref()), method_with_same_name_exists: *method_with_same_name_exists, } .into() @@ -691,8 +692,10 @@ impl<'db> AnyDiagnostic<'db> { UnresolvedMethodCall { expr, name: name.clone(), - receiver: Type::new(db, def, *receiver), - field_with_same_name: (*field_with_same_name).map(|ty| Type::new(db, def, ty)), + receiver: Type::new(db, def, receiver.as_ref()), + field_with_same_name: field_with_same_name + .as_ref() + .map(|ty| Type::new(db, def, ty.as_ref())), assoc_func_with_same_name: assoc_func_with_same_name.map(Into::into), } .into() @@ -719,7 +722,7 @@ impl<'db> AnyDiagnostic<'db> { } InferenceDiagnostic::TypedHole { expr, expected } => { let expr = expr_syntax(*expr)?; - TypedHole { expr, expected: Type::new(db, def, *expected) }.into() + TypedHole { expr, expected: Type::new(db, def, expected.as_ref()) }.into() } &InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => { let expr_or_pat = match pat { @@ -736,12 +739,12 @@ impl<'db> AnyDiagnostic<'db> { } InferenceDiagnostic::CastToUnsized { expr, cast_ty } => { let expr = expr_syntax(*expr)?; - CastToUnsized { expr, cast_ty: Type::new(db, def, *cast_ty) }.into() + CastToUnsized { expr, cast_ty: Type::new(db, def, cast_ty.as_ref()) }.into() } InferenceDiagnostic::InvalidCast { expr, error, expr_ty, cast_ty } => { let expr = expr_syntax(*expr)?; - let expr_ty = Type::new(db, def, *expr_ty); - let cast_ty = Type::new(db, def, *cast_ty); + let expr_ty = Type::new(db, def, expr_ty.as_ref()); + let cast_ty = Type::new(db, def, cast_ty.as_ref()); InvalidCast { expr, error: *error, expr_ty, cast_ty }.into() } InferenceDiagnostic::TyDiagnostic { source, diag } => { diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ae7e4aa6c8ac..3532220b72ae 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -102,7 +102,7 @@ use rustc_type_ir::{ }; use smallvec::SmallVec; use span::{AstIdNode, Edition, FileId}; -use stdx::{format_to, impl_from, never}; +use stdx::{format_to, impl_from, never, variance::PhantomCovariantLifetime}; use syntax::{ AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr, ast::{self, HasName, HasVisibility as _}, @@ -697,7 +697,7 @@ impl Module { push_ty_diagnostics( db, acc, - db.field_types_with_diagnostics(s.id.into()).1, + db.field_types_with_diagnostics(s.id.into()).1.clone(), source_map, ); } @@ -709,7 +709,7 @@ impl Module { push_ty_diagnostics( db, acc, - db.field_types_with_diagnostics(u.id.into()).1, + db.field_types_with_diagnostics(u.id.into()).1.clone(), source_map, ); } @@ -739,7 +739,7 @@ impl Module { push_ty_diagnostics( db, acc, - db.field_types_with_diagnostics(v.into()).1, + db.field_types_with_diagnostics(v.into()).1.clone(), source_map, ); expr_store_diagnostics(db, acc, source_map); @@ -1219,7 +1219,7 @@ impl<'db> InstantiatedField<'db> { let interner = DbInterner::new_no_crate(db); let var_id = self.inner.parent.into(); - let field = db.field_types(var_id)[self.inner.id]; + let field = db.field_types(var_id)[self.inner.id].get(); let ty = field.instantiate(interner, self.args); TypeNs::new(db, var_id, ty) } @@ -1297,7 +1297,7 @@ impl Field { /// context of the field definition. pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> TypeNs<'db> { let var_id = self.parent.into(); - let ty = db.field_types(var_id)[self.id].skip_binder(); + let ty = db.field_types(var_id)[self.id].get().skip_binder(); TypeNs::new(db, var_id, ty) } @@ -1315,13 +1315,13 @@ impl Field { }; let interner = DbInterner::new_no_crate(db); let args = generic_args_from_tys(interner, def_id.into(), generics.map(|ty| ty.ty)); - let ty = db.field_types(var_id)[self.id].instantiate(interner, args); + let ty = db.field_types(var_id)[self.id].get().instantiate(interner, args); Type::new(db, var_id, ty) } pub fn layout(&self, db: &dyn HirDatabase) -> Result { db.layout_of_ty( - self.ty(db).ty, + self.ty(db).ty.store(), param_env_from_has_crate( db, match hir_def::VariantId::from(self.parent) { @@ -1331,7 +1331,8 @@ impl Field { hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()), hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()), }, - ), + ) + .store(), ) .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).into()).unwrap())) } @@ -1662,7 +1663,7 @@ impl Variant { self.source(db)?.value.expr() } - pub fn eval(self, db: &dyn HirDatabase) -> Result> { + pub fn eval(self, db: &dyn HirDatabase) -> Result { db.const_eval_discriminant(self.into()) } @@ -1753,7 +1754,7 @@ impl Adt { let args = GenericArgs::for_item_with_defaults(interner, adt_id.into(), |_, id, _| { GenericArg::error_from_id(interner, id) }); - db.layout_of_adt(adt_id, args, param_env_from_has_crate(db, adt_id)) + db.layout_of_adt(adt_id, args.store(), param_env_from_has_crate(db, adt_id).store()) .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).id).unwrap())) } @@ -1988,8 +1989,8 @@ impl DefWithBody { acc.push( TypeMismatch { expr_or_pat, - expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected), - actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual), + expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.as_ref()), + actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.as_ref()), } .into(), ); @@ -2059,7 +2060,10 @@ impl DefWithBody { } mir::MirSpan::Unknown => continue, }; - acc.push(MovedOutOfRef { ty: Type::new_for_crate(krate, moof.ty), span }.into()) + acc.push( + MovedOutOfRef { ty: Type::new_for_crate(krate, moof.ty.as_ref()), span } + .into(), + ) } let mol = &borrowck_result.mutability_of_locals; for (binding_id, binding_data) in body.bindings() { @@ -2468,15 +2472,16 @@ impl Function { self, db: &dyn HirDatabase, span_formatter: impl Fn(FileId, TextRange) -> String, - ) -> Result> { + ) -> Result { let interner = DbInterner::new_no_crate(db); let body = db.monomorphized_mir_body( self.id.into(), - GenericArgs::new_from_iter(interner, []), + GenericArgs::empty(interner).store(), ParamEnvAndCrate { param_env: db.trait_environment(self.id.into()), krate: self.id.module(db).krate(db), - }, + } + .store(), )?; let (result, output) = interpret_mir(db, body, false, None)?; let mut text = match result { @@ -2728,11 +2733,14 @@ impl Const { } /// Evaluate the constant. - pub fn eval(self, db: &dyn HirDatabase) -> Result, ConstEvalError<'_>> { + pub fn eval(self, db: &dyn HirDatabase) -> Result, ConstEvalError> { let interner = DbInterner::new_no_crate(db); let ty = db.value_ty(self.id.into()).unwrap().instantiate_identity(); - db.const_eval(self.id, GenericArgs::new_from_iter(interner, []), None) - .map(|it| EvaluatedConst { const_: it, def: self.id.into(), ty }) + db.const_eval(self.id, GenericArgs::empty(interner), None).map(|it| EvaluatedConst { + const_: it, + def: self.id.into(), + ty, + }) } } @@ -2753,7 +2761,7 @@ impl<'db> EvaluatedConst<'db> { format!("{}", self.const_.display(db, display_target)) } - pub fn render_debug(&self, db: &'db dyn HirDatabase) -> Result> { + pub fn render_debug(&self, db: &'db dyn HirDatabase) -> Result { let kind = self.const_.kind(); if let ConstKind::Value(c) = kind && let ty = c.ty.kind() @@ -2809,7 +2817,7 @@ impl Static { } /// Evaluate the static initializer. - pub fn eval(self, db: &dyn HirDatabase) -> Result, ConstEvalError<'_>> { + pub fn eval(self, db: &dyn HirDatabase) -> Result, ConstEvalError> { let ty = db.value_ty(self.id.into()).unwrap().instantiate_identity(); db.const_eval_static(self.id).map(|it| EvaluatedConst { const_: it, @@ -3847,7 +3855,7 @@ impl Local { pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> { let def = self.parent; let infer = InferenceResult::for_body(db, def); - let ty = infer[self.binding_id]; + let ty = infer.binding_ty(self.binding_id); Type::new(db, def, ty) } @@ -4152,8 +4160,8 @@ impl TypeParam { pub fn default(self, db: &dyn HirDatabase) -> Option> { let ty = generic_arg_from_param(db, self.id.into())?; let resolver = self.id.parent().resolver(db); - match ty { - GenericArg::Ty(it) if !it.is_ty_error() => { + match ty.kind() { + rustc_type_ir::GenericArgKind::Type(it) if !it.is_ty_error() => { Some(Type::new_with_resolver_inner(db, &resolver, it)) } _ => None, @@ -4545,7 +4553,12 @@ impl<'db> Closure<'db> { info.0 .iter() .cloned() - .map(|capture| ClosureCapture { owner, closure: id, capture }) + .map(|capture| ClosureCapture { + owner, + closure: id, + capture, + _marker: PhantomCovariantLifetime::new(), + }) .collect() } @@ -4650,7 +4663,8 @@ impl FnTrait { pub struct ClosureCapture<'db> { owner: DefWithBodyId, closure: InternedClosureId, - capture: hir_ty::CapturedItem<'db>, + capture: hir_ty::CapturedItem, + _marker: PhantomCovariantLifetime<'db>, } impl<'db> ClosureCapture<'db> { @@ -4917,7 +4931,7 @@ impl<'db> Type<'db> { .fields() .iter() .map(|(idx, _)| { - field_types[idx].instantiate(self.interner, args) + field_types[idx].get().instantiate(self.interner, args) }) .filter(|it| !it.references_non_lt_error()) .collect() @@ -5241,7 +5255,7 @@ impl<'db> Type<'db> { .iter() .map(|(local_id, ty)| { let def = Field { parent: variant_id.into(), id: local_id }; - let ty = ty.instantiate(interner, substs); + let ty = ty.get().instantiate(interner, substs); (def, self.derived(ty)) }) .collect() @@ -5399,12 +5413,14 @@ impl<'db> Type<'db> { .as_adt() .into_iter() .flat_map(|(_, substs)| substs.iter()) - .filter_map(move |arg| match arg { - GenericArg::Ty(ty) => Some(format_smolstr!("{}", ty.display(db, display_target))), - GenericArg::Const(const_) => { + .filter_map(move |arg| match arg.kind() { + rustc_type_ir::GenericArgKind::Type(ty) => { + Some(format_smolstr!("{}", ty.display(db, display_target))) + } + rustc_type_ir::GenericArgKind::Const(const_) => { Some(format_smolstr!("{}", const_.display(db, display_target))) } - GenericArg::Lifetime(_) => None, + rustc_type_ir::GenericArgKind::Lifetime(_) => None, }) } @@ -5808,7 +5824,7 @@ impl<'db> Type<'db> { } pub fn layout(&self, db: &'db dyn HirDatabase) -> Result { - db.layout_of_ty(self.ty, self.env) + db.layout_of_ty(self.ty.store(), self.env.store()) .map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap())) } @@ -5840,7 +5856,7 @@ impl<'db> TypeNs<'db> { pub fn impls_trait(&self, infcx: InferCtxt<'db>, trait_: Trait, args: &[TypeNs<'db>]) -> bool { let args = GenericArgs::new_from_iter( infcx.interner, - [self.ty].into_iter().chain(args.iter().map(|t| t.ty)).map(|t| t.into()), + [self.ty].into_iter().chain(args.iter().map(|t| t.ty)).map(GenericArg::from), ); let trait_ref = hir_ty::next_solver::TraitRef::new(infcx.interner, trait_.id.into(), args); diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index b65a24d61c6e..fd08528e86ab 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -1653,8 +1653,11 @@ impl<'db> SemanticsImpl<'db> { analyzer.expr_adjustments(expr).map(|it| { it.iter() .map(|adjust| { - let target = - Type::new_with_resolver(self.db, &analyzer.resolver, adjust.target); + let target = Type::new_with_resolver( + self.db, + &analyzer.resolver, + adjust.target.as_ref(), + ); let kind = match adjust.kind { hir_ty::Adjust::NeverToAny => Adjust::NeverToAny, hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => { diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index b90eb97d8791..848ad3380133 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -79,7 +79,7 @@ pub(crate) enum BodyOrSig<'db> { def: DefWithBodyId, body: Arc, source_map: Arc, - infer: Option<&'db InferenceResult<'db>>, + infer: Option<&'db InferenceResult>, }, // To be folded into body once it is considered one VariantFields { @@ -119,7 +119,7 @@ impl<'db> SourceAnalyzer<'db> { def: DefWithBodyId, node @ InFile { file_id, .. }: InFile<&SyntaxNode>, offset: Option, - infer: Option<&'db InferenceResult<'db>>, + infer: Option<&'db InferenceResult>, ) -> SourceAnalyzer<'db> { let (body, source_map) = db.body_with_source_map(def); let scopes = db.expr_scopes(def); @@ -185,9 +185,7 @@ impl<'db> SourceAnalyzer<'db> { } // FIXME: Remove this - fn body_( - &self, - ) -> Option<(DefWithBodyId, &Body, &BodySourceMap, Option<&InferenceResult<'db>>)> { + fn body_(&self) -> Option<(DefWithBodyId, &Body, &BodySourceMap, Option<&InferenceResult>)> { self.body_or_sig.as_ref().and_then(|it| match it { BodyOrSig::Body { def, body, source_map, infer } => { Some((*def, &**body, &**source_map, infer.as_deref())) @@ -196,7 +194,7 @@ impl<'db> SourceAnalyzer<'db> { }) } - fn infer(&self) -> Option<&InferenceResult<'db>> { + fn infer(&self) -> Option<&InferenceResult> { self.body_or_sig.as_ref().and_then(|it| match it { BodyOrSig::Sig { .. } => None, BodyOrSig::VariantFields { .. } => None, @@ -260,7 +258,7 @@ impl<'db> SourceAnalyzer<'db> { if let Pat::Bind { id, .. } = self.store()?[pat_id.as_pat()?] { Some(id) } else { None } } - pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment<'db>]> { + pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> { // It is safe to omit destructuring assignments here because they have no adjustments (neither // expressions nor patterns). let expr_id = self.expr_id(expr.clone())?.as_expr()?; @@ -326,8 +324,8 @@ impl<'db> SourceAnalyzer<'db> { let coerced = expr_id .as_expr() .and_then(|expr_id| infer.expr_adjustment(expr_id)) - .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target)); - let ty = infer[expr_id]; + .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.as_ref())); + let ty = infer.expr_or_pat_ty(expr_id); let mk_ty = |ty: Ty<'db>| Type::new_with_resolver(db, &self.resolver, ty); Some((mk_ty(ty), coerced.map(mk_ty))) } @@ -342,14 +340,15 @@ impl<'db> SourceAnalyzer<'db> { let coerced = match expr_or_pat_id { ExprOrPatId::ExprId(idx) => infer .expr_adjustment(idx) - .and_then(|adjusts| adjusts.last().cloned()) - .map(|adjust| adjust.target), - ExprOrPatId::PatId(idx) => { - infer.pat_adjustment(idx).and_then(|adjusts| adjusts.last().cloned()) - } + .and_then(|adjusts| adjusts.last()) + .map(|adjust| adjust.target.as_ref()), + ExprOrPatId::PatId(idx) => infer + .pat_adjustment(idx) + .and_then(|adjusts| adjusts.last()) + .map(|adjust| adjust.as_ref()), }; - let ty = infer[expr_or_pat_id]; + let ty = infer.expr_or_pat_ty(expr_or_pat_id); let mk_ty = |ty: Ty<'db>| Type::new_with_resolver(db, &self.resolver, ty); Some((mk_ty(ty), coerced.map(mk_ty))) } @@ -361,7 +360,7 @@ impl<'db> SourceAnalyzer<'db> { ) -> Option> { let binding_id = self.binding_id_of_pat(pat)?; let infer = self.infer()?; - let ty = infer[binding_id]; + let ty = infer.binding_ty(binding_id); let mk_ty = |ty: Ty<'db>| Type::new_with_resolver(db, &self.resolver, ty); Some(mk_ty(ty)) } @@ -372,7 +371,7 @@ impl<'db> SourceAnalyzer<'db> { _param: &ast::SelfParam, ) -> Option> { let binding = self.body()?.self_param?; - let ty = self.infer()?[binding]; + let ty = self.infer()?.binding_ty(binding); Some(Type::new_with_resolver(db, &self.resolver, ty)) } @@ -404,7 +403,7 @@ impl<'db> SourceAnalyzer<'db> { infer .pat_adjustment(pat_id.as_pat()?)? .iter() - .map(|ty| Type::new_with_resolver(db, &self.resolver, *ty)) + .map(|ty| Type::new_with_resolver(db, &self.resolver, ty.as_ref())) .collect(), ) } @@ -482,7 +481,7 @@ impl<'db> SourceAnalyzer<'db> { fn field_subst( &self, field_expr: ExprId, - infer: &InferenceResult<'db>, + infer: &InferenceResult, db: &'db dyn HirDatabase, ) -> Option> { let body = self.store()?; @@ -598,8 +597,7 @@ impl<'db> SourceAnalyzer<'db> { let poll_fn = self.lang_items(db).FuturePoll?; // HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself // doesn't have any generic parameters, so we skip building another subst for `poll()`. - let interner = DbInterner::new_no_crate(db); - let substs = GenericArgs::new_from_iter(interner, [ty.into()]); + let substs = GenericArgs::new_from_slice(&[ty.into()]); Some(self.resolve_impl_method_or_trait_def(db, poll_fn, substs)) } @@ -641,10 +639,9 @@ impl<'db> SourceAnalyzer<'db> { let ty = self.ty_of_expr(prefix_expr.expr()?)?; - let interner = DbInterner::new_no_crate(db); // HACK: subst for all methods coincides with that for their trait because the methods // don't have any generic parameters, so we skip building another subst for the methods. - let substs = GenericArgs::new_from_iter(interner, [ty.into()]); + let substs = GenericArgs::new_from_slice(&[ty.into()]); Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs)) } @@ -674,8 +671,7 @@ impl<'db> SourceAnalyzer<'db> { .unwrap_or(index_fn); // HACK: subst for all methods coincides with that for their trait because the methods // don't have any generic parameters, so we skip building another subst for the methods. - let interner = DbInterner::new_no_crate(db); - let substs = GenericArgs::new_from_iter(interner, [base_ty.into(), index_ty.into()]); + let substs = GenericArgs::new_from_slice(&[base_ty.into(), index_ty.into()]); Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs)) } @@ -694,8 +690,7 @@ impl<'db> SourceAnalyzer<'db> { })?; // HACK: subst for `index()` coincides with that for `Index` because `index()` itself // doesn't have any generic parameters, so we skip building another subst for `index()`. - let interner = DbInterner::new_no_crate(db); - let substs = GenericArgs::new_from_iter(interner, [lhs.into(), rhs.into()]); + let substs = GenericArgs::new_from_slice(&[lhs.into(), rhs.into()]); Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs)) } @@ -710,8 +705,7 @@ impl<'db> SourceAnalyzer<'db> { let op_fn = self.lang_items(db).TryTraitBranch?; // HACK: subst for `branch()` coincides with that for `Try` because `branch()` itself // doesn't have any generic parameters, so we skip building another subst for `branch()`. - let interner = DbInterner::new_no_crate(db); - let substs = GenericArgs::new_from_iter(interner, [ty.into()]); + let substs = GenericArgs::new_from_slice(&[ty.into()]); Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs)) } @@ -751,7 +745,8 @@ impl<'db> SourceAnalyzer<'db> { let variant = self.infer()?.variant_resolution_for_expr_or_pat(expr_id)?; let variant_data = variant.fields(db); let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? }; - let field_ty = (*db.field_types(variant).get(field.local_id)?).instantiate(interner, subst); + let field_ty = + (*db.field_types(variant).get(field.local_id)?).get().instantiate(interner, subst); Some(( field.into(), local, @@ -772,8 +767,9 @@ impl<'db> SourceAnalyzer<'db> { let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?; let variant_data = variant.fields(db); let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? }; - let (adt, subst) = self.infer()?[pat_id.as_pat()?].as_adt()?; - let field_ty = (*db.field_types(variant).get(field.local_id)?).instantiate(interner, subst); + let (adt, subst) = self.infer()?.pat_ty(pat_id.as_pat()?).as_adt()?; + let field_ty = + (*db.field_types(variant).get(field.local_id)?).get().instantiate(interner, subst); Some(( field.into(), Type::new_with_resolver(db, &self.resolver, field_ty), @@ -835,23 +831,24 @@ impl<'db> SourceAnalyzer<'db> { if let Either::Right(container) = &mut container { *container = structurally_normalize_ty(&infcx, *container, trait_env.param_env); } - let handle_variants = - |variant: VariantId, subst: GenericArgs<'db>, container: &mut _| { - let fields = variant.fields(db); - let field = fields.field(&field_name.as_name())?; - let field_types = db.field_types(variant); - *container = Either::Right(field_types[field].instantiate(interner, subst)); - let generic_def = match variant { - VariantId::EnumVariantId(it) => it.loc(db).parent.into(), - VariantId::StructId(it) => it.into(), - VariantId::UnionId(it) => it.into(), - }; - Some(( - Either::Right(Field { parent: variant.into(), id: field }), - generic_def, - subst, - )) + let handle_variants = |variant: VariantId, + subst: GenericArgs<'db>, + container: &mut _| { + let fields = variant.fields(db); + let field = fields.field(&field_name.as_name())?; + let field_types = db.field_types(variant); + *container = Either::Right(field_types[field].get().instantiate(interner, subst)); + let generic_def = match variant { + VariantId::EnumVariantId(it) => it.loc(db).parent.into(), + VariantId::StructId(it) => it.into(), + VariantId::UnionId(it) => it.into(), }; + Some(( + Either::Right(Field { parent: variant.into(), id: field }), + generic_def, + subst, + )) + }; let temp_ty = Ty::new_error(interner, ErrorGuaranteed); let (field_def, generic_def, subst) = match std::mem::replace(&mut container, Either::Right(temp_ty)) { @@ -1173,7 +1170,7 @@ impl<'db> SourceAnalyzer<'db> { self.infer()?.type_of_expr_or_pat(expr_id)? } else if let Some(pat) = ast::Pat::cast(parent) { let pat_id = self.pat_id(&pat)?; - self.infer()?[pat_id] + self.infer()?.expr_or_pat_ty(pat_id) } else { return None; }; @@ -1245,7 +1242,7 @@ impl<'db> SourceAnalyzer<'db> { let infer = self.infer()?; let expr_id = self.expr_id(literal.clone().into())?; - let substs = infer[expr_id].as_adt()?.1; + let substs = infer.expr_or_pat_ty(expr_id).as_adt()?.1; let (variant, missing_fields, _exhaustive) = match expr_id { ExprOrPatId::ExprId(expr_id) => { @@ -1268,7 +1265,7 @@ impl<'db> SourceAnalyzer<'db> { let infer = self.infer()?; let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?; - let substs = infer[pat_id].as_adt()?.1; + let substs = infer.pat_ty(pat_id).as_adt()?.1; let (variant, missing_fields, _exhaustive) = record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?; @@ -1290,7 +1287,7 @@ impl<'db> SourceAnalyzer<'db> { .into_iter() .map(|local_id| { let field = FieldId { parent: variant, local_id }; - let ty = field_types[local_id].instantiate(interner, substs); + let ty = field_types[local_id].get().instantiate(interner, substs); (field.into(), Type::new_with_resolver_inner(db, &self.resolver, ty)) }) .collect() diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs index 951a672d4b79..1317684a0877 100644 --- a/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/crates/ide/src/inlay_hints/implicit_drop.rs @@ -46,7 +46,7 @@ pub(super) fn hints( if !place.projection.is_empty() { continue; // Ignore complex cases for now } - if mir.locals[place.local].ty.as_adt().is_none() { + if mir.locals[place.local].ty.as_ref().as_adt().is_none() { continue; // Arguably only ADTs have significant drop impls } let Some(&binding_idx) = local_to_binding.get(place.local) else { diff --git a/crates/ide/src/interpret.rs b/crates/ide/src/interpret.rs index 791da00bb695..3741822547e4 100644 --- a/crates/ide/src/interpret.rs +++ b/crates/ide/src/interpret.rs @@ -60,7 +60,7 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<(Dura pub(crate) fn render_const_eval_error( db: &RootDatabase, - e: ConstEvalError<'_>, + e: ConstEvalError, display_target: DisplayTarget, ) -> String { let span_formatter = |file_id, text_range: TextRange| { diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml index 81b6703deef5..44621031ab8e 100644 --- a/crates/intern/Cargo.toml +++ b/crates/intern/Cargo.toml @@ -18,6 +18,7 @@ dashmap.workspace = true hashbrown.workspace = true rustc-hash.workspace = true triomphe.workspace = true +smallvec.workspace = true [lints] workspace = true diff --git a/crates/intern/src/intern.rs b/crates/intern/src/intern.rs new file mode 100644 index 000000000000..fdefb936560e --- /dev/null +++ b/crates/intern/src/intern.rs @@ -0,0 +1,331 @@ +//! Interning of single values. + +use std::{ + fmt::{self, Debug, Display}, + hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}, + ops::Deref, + ptr, + sync::OnceLock, +}; + +use dashmap::{DashMap, SharedValue}; +use hashbrown::raw::RawTable; +use rustc_hash::FxHasher; +use triomphe::{Arc, ArcBorrow}; + +type InternMap = DashMap, (), BuildHasherDefault>; +type Guard = dashmap::RwLockWriteGuard<'static, RawTable<(Arc, SharedValue<()>)>>; + +pub struct Interned { + arc: Arc, +} + +unsafe impl Send for Interned {} +unsafe impl Sync for Interned {} + +impl Interned { + #[inline] + pub fn new(obj: T) -> Self { + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &obj); + // Atomically, + // - check if `obj` is already in the map + // - if so, clone its `Arc` and return it + // - if not, box it up, insert it, and return a clone + // This needs to be atomic (locking the shard) to avoid races with other thread, which could + // insert the same object between us looking it up and inserting it. + let bucket = match shard.find_or_find_insert_slot( + hash, + |(other, _)| **other == obj, + |(x, _)| Self::hash(storage, x), + ) { + Ok(bucket) => bucket, + // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. + Err(insert_slot) => unsafe { + shard.insert_in_slot(hash, insert_slot, (Arc::new(obj), SharedValue::new(()))) + }, + }; + // SAFETY: We just retrieved/inserted this bucket. + unsafe { Self { arc: bucket.as_ref().0.clone() } } + } + + #[inline] + pub fn new_gc<'a>(obj: T) -> InternedRef<'a, T> { + const { assert!(T::USE_GC) }; + + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &obj); + // Atomically, + // - check if `obj` is already in the map + // - if so, clone its `Arc` and return it + // - if not, box it up, insert it, and return a clone + // This needs to be atomic (locking the shard) to avoid races with other thread, which could + // insert the same object between us looking it up and inserting it. + let bucket = match shard.find_or_find_insert_slot( + hash, + |(other, _)| **other == obj, + |(x, _)| Self::hash(storage, x), + ) { + Ok(bucket) => bucket, + // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. + Err(insert_slot) => unsafe { + shard.insert_in_slot(hash, insert_slot, (Arc::new(obj), SharedValue::new(()))) + }, + }; + // SAFETY: We just retrieved/inserted this bucket. + unsafe { InternedRef { arc: Arc::borrow_arc(&bucket.as_ref().0) } } + } + + #[inline] + fn select(storage: &'static InternMap, obj: &T) -> (Guard, u64) { + let hash = Self::hash(storage, obj); + let shard_idx = storage.determine_shard(hash as usize); + let shard = &storage.shards()[shard_idx]; + (shard.write(), hash) + } + + #[inline] + fn hash(storage: &'static InternMap, obj: &T) -> u64 { + storage.hasher().hash_one(obj) + } + + /// # Safety + /// + /// The pointer should originate from an `Interned` or an `InternedRef`. + #[inline] + pub unsafe fn from_raw(ptr: *const T) -> Self { + Self { arc: unsafe { Arc::from_raw(ptr) } } + } + + #[inline] + pub fn as_ref(&self) -> InternedRef<'_, T> { + InternedRef { arc: self.arc.borrow_arc() } + } +} + +impl Drop for Interned { + #[inline] + fn drop(&mut self) { + // When the last `Ref` is dropped, remove the object from the global map. + if !T::USE_GC && Arc::count(&self.arc) == 2 { + // Only `self` and the global map point to the object. + + self.drop_slow(); + } + } +} + +impl Interned { + #[cold] + fn drop_slow(&mut self) { + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &self.arc); + + if Arc::count(&self.arc) != 2 { + // Another thread has interned another copy + return; + } + + shard.remove_entry(hash, |(other, _)| **other == **self); + + // Shrink the backing storage if the shard is less than 50% occupied. + if shard.len() * 2 < shard.capacity() { + let len = shard.len(); + shard.shrink_to(len, |(x, _)| Self::hash(storage, x)); + } + } +} + +/// Compares interned `Ref`s using pointer equality. +impl PartialEq for Interned { + // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects. + + #[inline] + fn eq(&self, other: &Self) -> bool { + Arc::ptr_eq(&self.arc, &other.arc) + } +} + +impl Eq for Interned {} + +impl Hash for Interned { + #[inline] + fn hash(&self, state: &mut H) { + state.write_usize(self.arc.as_ptr().addr()) + } +} + +impl AsRef for Interned { + #[inline] + fn as_ref(&self) -> &T { + self + } +} + +impl Deref for Interned { + type Target = T; + + #[inline] + fn deref(&self) -> &Self::Target { + &self.arc + } +} + +impl Clone for Interned { + #[inline] + fn clone(&self) -> Self { + Self { arc: self.arc.clone() } + } +} + +impl Debug for Interned { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + ::fmt(&**self, f) + } +} + +impl Display for Interned { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + ::fmt(&**self, f) + } +} + +#[repr(transparent)] +pub struct InternedRef<'a, T> { + arc: ArcBorrow<'a, T>, +} + +impl<'a, T: Internable> InternedRef<'a, T> { + #[inline] + pub fn as_raw(self) -> *const T { + // Not `ptr::from_ref(&*self.arc)`, because we need to keep the provenance. + self.arc.with_arc(|arc| Arc::as_ptr(arc)) + } + + /// # Safety + /// + /// The pointer needs to originate from `Interned` or `InternedRef`. + #[inline] + pub unsafe fn from_raw(ptr: *const T) -> Self { + Self { arc: unsafe { ArcBorrow::from_ptr(ptr) } } + } + + #[inline] + pub fn to_owned(self) -> Interned { + Interned { arc: self.arc.clone_arc() } + } + + #[inline] + pub fn get(self) -> &'a T { + self.arc.get() + } + + /// # Safety + /// + /// You have to make sure the data is not referenced after the refcount reaches zero; beware the interning + /// map also keeps a reference to the value. + #[inline] + pub unsafe fn decrement_refcount(self) { + unsafe { drop(Arc::from_raw(self.as_raw())) } + } +} + +impl Clone for InternedRef<'_, T> { + #[inline] + fn clone(&self) -> Self { + *self + } +} + +impl Copy for InternedRef<'_, T> {} + +impl Hash for InternedRef<'_, T> { + #[inline] + fn hash(&self, state: &mut H) { + let ptr = ptr::from_ref::(&*self.arc); + state.write_usize(ptr.addr()); + } +} + +impl PartialEq for InternedRef<'_, T> { + #[inline] + fn eq(&self, other: &Self) -> bool { + ArcBorrow::ptr_eq(&self.arc, &other.arc) + } +} + +impl Eq for InternedRef<'_, T> {} + +impl Deref for InternedRef<'_, T> { + type Target = T; + + #[inline] + fn deref(&self) -> &Self::Target { + &self.arc + } +} + +impl Debug for InternedRef<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (*self.arc).fmt(f) + } +} + +impl Display for InternedRef<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (*self.arc).fmt(f) + } +} + +pub struct InternStorage { + map: OnceLock>, +} + +#[allow( + clippy::new_without_default, + reason = "this a const fn, so it can't be default yet. See " +)] +impl InternStorage { + pub const fn new() -> Self { + Self { map: OnceLock::new() } + } +} + +impl InternStorage { + fn get(&self) -> &InternMap { + self.map.get_or_init(DashMap::default) + } +} + +pub trait Internable: Hash + Eq + 'static { + const USE_GC: bool; + + fn storage() -> &'static InternStorage; +} + +/// Implements `Internable` for a given list of types, making them usable with `Interned`. +#[macro_export] +#[doc(hidden)] +macro_rules! _impl_internable { + ( gc; $($t:ty),+ $(,)? ) => { $( + impl $crate::Internable for $t { + const USE_GC: bool = true; + + fn storage() -> &'static $crate::InternStorage { + static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new(); + &STORAGE + } + } + )+ }; + ( $($t:ty),+ $(,)? ) => { $( + impl $crate::Internable for $t { + const USE_GC: bool = false; + + fn storage() -> &'static $crate::InternStorage { + static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new(); + &STORAGE + } + } + )+ }; +} +pub use crate::_impl_internable as impl_internable; diff --git a/crates/intern/src/intern_slice.rs b/crates/intern/src/intern_slice.rs new file mode 100644 index 000000000000..989a21e70cf0 --- /dev/null +++ b/crates/intern/src/intern_slice.rs @@ -0,0 +1,351 @@ +//! Interning of slices, potentially with a header. + +use std::{ + borrow::Borrow, + ffi::c_void, + fmt::{self, Debug}, + hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}, + marker::PhantomData, + mem::ManuallyDrop, + ops::Deref, + ptr::{self, NonNull}, + sync::OnceLock, +}; + +use dashmap::{DashMap, SharedValue}; +use hashbrown::raw::RawTable; +use rustc_hash::FxHasher; +use triomphe::{HeaderSlice, HeaderWithLength, ThinArc}; + +type InternMap = DashMap< + ThinArc<::Header, ::SliceType>, + (), + BuildHasherDefault, +>; +type Guard = dashmap::RwLockWriteGuard< + 'static, + RawTable<( + ThinArc<::Header, ::SliceType>, + SharedValue<()>, + )>, +>; +type Pointee = HeaderSlice< + HeaderWithLength<::Header>, + [::SliceType], +>; + +pub struct InternedSlice { + arc: ThinArc, +} + +impl InternedSlice { + #[inline] + fn new<'a>( + header: T::Header, + slice: impl Borrow<[T::SliceType]> + + IntoIterator, + ) -> InternedSliceRef<'a, T> { + const { assert!(T::USE_GC) }; + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &header, slice.borrow()); + // Atomically, + // - check if `obj` is already in the map + // - if so, clone its `Arc` and return it + // - if not, box it up, insert it, and return a clone + // This needs to be atomic (locking the shard) to avoid races with other thread, which could + // insert the same object between us looking it up and inserting it. + let bucket = match shard.find_or_find_insert_slot( + hash, + |(other, _)| other.header.header == header && other.slice == *slice.borrow(), + |(x, _)| storage.hasher().hash_one(x), + ) { + Ok(bucket) => bucket, + // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. + Err(insert_slot) => unsafe { + shard.insert_in_slot( + hash, + insert_slot, + ( + ThinArc::from_header_and_iter(header, slice.into_iter()), + SharedValue::new(()), + ), + ) + }, + }; + // SAFETY: We just retrieved/inserted this bucket. + unsafe { + InternedSliceRef { + ptr: NonNull::new_unchecked(ThinArc::as_ptr(&bucket.as_ref().0).cast_mut()), + _marker: PhantomData, + } + } + } + + #[inline] + pub fn from_header_and_slice<'a>( + header: T::Header, + slice: &[T::SliceType], + ) -> InternedSliceRef<'a, T> + where + T::SliceType: Clone, + { + return Self::new(header, Iter(slice)); + + struct Iter<'a, T>(&'a [T]); + + impl<'a, T: Clone> IntoIterator for Iter<'a, T> { + type IntoIter = std::iter::Cloned>; + type Item = T; + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.0.iter().cloned() + } + } + + impl Borrow<[T]> for Iter<'_, T> { + #[inline] + fn borrow(&self) -> &[T] { + self.0 + } + } + } + + #[inline] + fn select( + storage: &'static InternMap, + header: &T::Header, + slice: &[T::SliceType], + ) -> (Guard, u64) { + let hash = Self::hash(storage, header, slice); + let shard_idx = storage.determine_shard(hash as usize); + let shard = &storage.shards()[shard_idx]; + (shard.write(), hash) + } + + #[inline] + fn hash(storage: &'static InternMap, header: &T::Header, slice: &[T::SliceType]) -> u64 { + storage.hasher().hash_one(HeaderSlice { + header: HeaderWithLength { header, length: slice.len() }, + slice, + }) + } + + #[inline(always)] + fn ptr(&self) -> *const c_void { + unsafe { ptr::from_ref(&self.arc).read().into_raw() } + } + + #[inline] + pub fn as_ref(&self) -> InternedSliceRef<'_, T> { + InternedSliceRef { + ptr: unsafe { NonNull::new_unchecked(self.ptr().cast_mut()) }, + _marker: PhantomData, + } + } +} + +impl Drop for InternedSlice { + #[inline] + fn drop(&mut self) { + // When the last `Ref` is dropped, remove the object from the global map. + if !T::USE_GC && ThinArc::strong_count(&self.arc) == 2 { + // Only `self` and the global map point to the object. + + self.drop_slow(); + } + } +} + +impl InternedSlice { + #[cold] + fn drop_slow(&mut self) { + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &self.arc.header.header, &self.arc.slice); + + if ThinArc::strong_count(&self.arc) != 2 { + // Another thread has interned another copy + return; + } + + shard.remove_entry(hash, |(other, _)| **other == *self.arc); + + // Shrink the backing storage if the shard is less than 50% occupied. + if shard.len() * 2 < shard.capacity() { + let len = shard.len(); + shard.shrink_to(len, |(x, _)| storage.hasher().hash_one(x)); + } + } +} + +/// Compares interned `Ref`s using pointer equality. +impl PartialEq for InternedSlice { + // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects. + + #[inline] + fn eq(&self, other: &Self) -> bool { + self.arc.as_ptr() == other.arc.as_ptr() + } +} + +impl Eq for InternedSlice {} + +impl Hash for InternedSlice { + #[inline] + fn hash(&self, state: &mut H) { + state.write_usize(self.ptr().addr()) + } +} + +impl Deref for InternedSlice { + type Target = Pointee; + + #[inline] + fn deref(&self) -> &Self::Target { + &self.arc + } +} + +impl Clone for InternedSlice { + #[inline] + fn clone(&self) -> Self { + Self { arc: self.arc.clone() } + } +} + +impl Debug for InternedSlice +where + T: SliceInternable, + T::SliceType: Debug, + T::Header: Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (*self.arc).fmt(f) + } +} + +#[repr(transparent)] +pub struct InternedSliceRef<'a, T> { + ptr: NonNull, + _marker: PhantomData<&'a T>, +} + +unsafe impl Send for InternedSliceRef<'_, T> {} +unsafe impl Sync for InternedSliceRef<'_, T> {} + +impl<'a, T: SliceInternable> InternedSliceRef<'a, T> { + #[inline(always)] + fn arc(self) -> ManuallyDrop> { + unsafe { ManuallyDrop::new(ThinArc::from_raw(self.ptr.as_ptr())) } + } + + #[inline] + pub fn to_owned(self) -> InternedSlice { + InternedSlice { arc: (*self.arc()).clone() } + } + + #[inline] + pub fn get(self) -> &'a Pointee { + unsafe { &*ptr::from_ref::>(&*self.arc()) } + } + + /// # Safety + /// + /// You have to make sure the data is not referenced after the refcount reaches zero; beware the interning + /// map also keeps a reference to the value. + #[inline] + pub unsafe fn decrement_refcount(self) { + drop(ManuallyDrop::into_inner(self.arc())); + } +} + +impl Clone for InternedSliceRef<'_, T> { + #[inline] + fn clone(&self) -> Self { + *self + } +} + +impl Copy for InternedSliceRef<'_, T> {} + +impl Hash for InternedSliceRef<'_, T> { + #[inline] + fn hash(&self, state: &mut H) { + state.write_usize(self.ptr.as_ptr().addr()); + } +} + +impl PartialEq for InternedSliceRef<'_, T> { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.ptr == other.ptr + } +} + +impl Eq for InternedSliceRef<'_, T> {} + +impl Deref for InternedSliceRef<'_, T> { + type Target = Pointee; + + #[inline] + fn deref(&self) -> &Self::Target { + self.get() + } +} + +impl Debug for InternedSliceRef<'_, T> +where + T: SliceInternable, + T::SliceType: Debug, + T::Header: Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (**self).fmt(f) + } +} + +pub struct InternSliceStorage { + map: OnceLock>, +} + +#[allow( + clippy::new_without_default, + reason = "this a const fn, so it can't be default yet. See " +)] +impl InternSliceStorage { + pub const fn new() -> Self { + Self { map: OnceLock::new() } + } +} + +impl InternSliceStorage { + fn get(&self) -> &InternMap { + self.map.get_or_init(DashMap::default) + } +} + +pub trait SliceInternable: Sized + 'static { + const USE_GC: bool; + type Header: Eq + Hash; + type SliceType: Eq + Hash + 'static; + fn storage() -> &'static InternSliceStorage; +} + +/// Implements `SliceInternable` for a given list of types, making them usable with `InternedSlice`. +#[macro_export] +#[doc(hidden)] +macro_rules! _impl_slice_internable { + ( gc; $tag:ident, $h:ty, $t:ty $(,)? ) => { + pub struct $tag; + impl $crate::SliceInternable for $tag { + const USE_GC: bool = true; + type Header = $h; + type SliceType = $t; + fn storage() -> &'static $crate::InternSliceStorage { + static STORAGE: $crate::InternSliceStorage<$tag> = + $crate::InternSliceStorage::new(); + &STORAGE + } + } + }; +} +pub use crate::_impl_slice_internable as impl_slice_internable; diff --git a/crates/intern/src/lib.rs b/crates/intern/src/lib.rs index 398d224c07ad..c5b42a860ac4 100644 --- a/crates/intern/src/lib.rs +++ b/crates/intern/src/lib.rs @@ -2,219 +2,12 @@ //! //! Eventually this should probably be replaced with salsa-based interning. -use std::{ - borrow::Borrow, - fmt::{self, Debug, Display}, - hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}, - ops::Deref, - sync::OnceLock, -}; - -use dashmap::{DashMap, SharedValue}; -use hashbrown::raw::RawTable; -use rustc_hash::FxHasher; -use triomphe::Arc; - -type InternMap = DashMap, (), BuildHasherDefault>; -type Guard = dashmap::RwLockWriteGuard<'static, RawTable<(Arc, SharedValue<()>)>>; - +mod intern; +mod intern_slice; mod symbol; -pub use self::symbol::{Symbol, symbols as sym}; - -pub struct Interned { - arc: Arc, -} - -impl Interned { - #[inline] - pub fn new(obj: T) -> Self { - Self::new_generic(obj) - } -} - -impl Interned { - #[inline] - pub fn new_str(s: &str) -> Self { - Self::new_generic(s) - } -} - -impl Interned { - #[inline] - pub fn new_generic(obj: U) -> Self - where - U: Borrow, - Arc: From, - { - let storage = T::storage().get(); - let (mut shard, hash) = Self::select(storage, obj.borrow()); - // Atomically, - // - check if `obj` is already in the map - // - if so, clone its `Arc` and return it - // - if not, box it up, insert it, and return a clone - // This needs to be atomic (locking the shard) to avoid races with other thread, which could - // insert the same object between us looking it up and inserting it. - let bucket = match shard.find_or_find_insert_slot( - hash, - |(other, _)| **other == *obj.borrow(), - |(x, _)| Self::hash(storage, x), - ) { - Ok(bucket) => bucket, - // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. - Err(insert_slot) => unsafe { - shard.insert_in_slot(hash, insert_slot, (Arc::from(obj), SharedValue::new(()))) - }, - }; - // SAFETY: We just retrieved/inserted this bucket. - unsafe { Self { arc: bucket.as_ref().0.clone() } } - } - - #[inline] - fn select(storage: &'static InternMap, obj: &T) -> (Guard, u64) { - let hash = Self::hash(storage, obj); - let shard_idx = storage.determine_shard(hash as usize); - let shard = &storage.shards()[shard_idx]; - (shard.write(), hash) - } - - #[inline] - fn hash(storage: &'static InternMap, obj: &T) -> u64 { - storage.hasher().hash_one(obj) - } -} - -impl Drop for Interned { - #[inline] - fn drop(&mut self) { - // When the last `Ref` is dropped, remove the object from the global map. - if Arc::count(&self.arc) == 2 { - // Only `self` and the global map point to the object. - - self.drop_slow(); - } - } -} - -impl Interned { - #[cold] - fn drop_slow(&mut self) { - let storage = T::storage().get(); - let (mut shard, hash) = Self::select(storage, &self.arc); - - if Arc::count(&self.arc) != 2 { - // Another thread has interned another copy - return; - } - - shard.remove_entry(hash, |(other, _)| **other == *self.arc); - - // Shrink the backing storage if the shard is less than 50% occupied. - if shard.len() * 2 < shard.capacity() { - let len = shard.len(); - shard.shrink_to(len, |(x, _)| Self::hash(storage, x)); - } - } -} - -/// Compares interned `Ref`s using pointer equality. -impl PartialEq for Interned { - // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects. - - #[inline] - fn eq(&self, other: &Self) -> bool { - Arc::ptr_eq(&self.arc, &other.arc) - } -} -impl Eq for Interned {} - -impl PartialEq for Interned { - fn eq(&self, other: &Self) -> bool { - Arc::ptr_eq(&self.arc, &other.arc) - } -} - -impl Eq for Interned {} - -impl Hash for Interned { - fn hash(&self, state: &mut H) { - // NOTE: Cast disposes vtable pointer / slice/str length. - state.write_usize(Arc::as_ptr(&self.arc) as *const () as usize) - } -} - -impl AsRef for Interned { - #[inline] - fn as_ref(&self) -> &T { - &self.arc - } -} - -impl Deref for Interned { - type Target = T; - - #[inline] - fn deref(&self) -> &Self::Target { - &self.arc - } -} - -impl Clone for Interned { - fn clone(&self) -> Self { - Self { arc: self.arc.clone() } - } -} - -impl Debug for Interned { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - (*self.arc).fmt(f) - } -} - -impl Display for Interned { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - (*self.arc).fmt(f) - } -} - -pub struct InternStorage { - map: OnceLock>, -} - -#[allow( - clippy::new_without_default, - reason = "this a const fn, so it can't be default yet. See " -)] -impl InternStorage { - pub const fn new() -> Self { - Self { map: OnceLock::new() } - } -} - -impl InternStorage { - fn get(&self) -> &InternMap { - self.map.get_or_init(DashMap::default) - } -} - -pub trait Internable: Hash + Eq + 'static { - fn storage() -> &'static InternStorage; -} - -/// Implements `Internable` for a given list of types, making them usable with `Interned`. -#[macro_export] -#[doc(hidden)] -macro_rules! _impl_internable { - ( $($t:path),+ $(,)? ) => { $( - impl $crate::Internable for $t { - fn storage() -> &'static $crate::InternStorage { - static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new(); - &STORAGE - } - } - )+ }; -} - -pub use crate::_impl_internable as impl_internable; - -impl_internable!(str,); +pub use self::intern::{InternStorage, Internable, Interned, InternedRef, impl_internable}; +pub use self::intern_slice::{ + InternSliceStorage, InternedSlice, InternedSliceRef, SliceInternable, impl_slice_internable, +}; +pub use self::symbol::{Symbol, symbols as sym}; diff --git a/crates/macros/src/lib.rs b/crates/macros/src/lib.rs index 3f90ecc8f902..de8c3f2e55f1 100644 --- a/crates/macros/src/lib.rs +++ b/crates/macros/src/lib.rs @@ -25,6 +25,9 @@ decl_derive!( /// visited (and its type is not required to implement `TypeVisitable`). type_visitable_derive ); +decl_derive!( + [GenericTypeVisitable] => generic_type_visitable_derive +); fn type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { if let syn::Data::Union(_) = s.ast().data { @@ -163,6 +166,33 @@ fn has_ignore_attr(attrs: &[syn::Attribute], name: &'static str, meta: &'static ignored } +fn generic_type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { + if let syn::Data::Union(_) = s.ast().data { + panic!("cannot derive on union") + } + + s.add_bounds(synstructure::AddBounds::Fields); + s.bind_with(|_| synstructure::BindStyle::Move); + s.add_impl_generic(parse_quote!(__V: hir_ty::next_solver::interner::WorldExposer)); + let body_visit = s.each(|bind| { + quote! { + ::rustc_type_ir::GenericTypeVisitable::<__V>::generic_visit_with(#bind, __visitor); + } + }); + + s.bound_impl( + quote!(::rustc_type_ir::GenericTypeVisitable<__V>), + quote! { + fn generic_visit_with( + &self, + __visitor: &mut __V + ) { + match self { #body_visit } + } + }, + ) +} + decl_derive!( [UpmapFromRaFixture] => upmap_from_ra_fixture ); diff --git a/crates/query-group-macro/src/queries.rs b/crates/query-group-macro/src/queries.rs index fe932ac133a0..83ce8902d0c9 100644 --- a/crates/query-group-macro/src/queries.rs +++ b/crates/query-group-macro/src/queries.rs @@ -48,8 +48,7 @@ impl ToTokens for TrackedQuery { quote!(#(#options),*) }) .into_iter() - .chain(self.lru.map(|lru| quote!(lru = #lru))) - .chain(Some(quote!(unsafe(non_update_types)))); + .chain(self.lru.map(|lru| quote!(lru = #lru))); let annotation = quote!(#[salsa_macros::tracked( #(#options),* )]); let pat_and_tys = &self.pat_and_tys; diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 1a6cd784cf2f..f863a7ee8d04 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -390,11 +390,12 @@ impl flags::AnalysisStats { all += 1; let Err(e) = db.layout_of_adt( hir_def::AdtId::from(a), - GenericArgs::new_from_iter(interner, []), + GenericArgs::empty(interner).store(), hir_ty::ParamEnvAndCrate { param_env: db.trait_environment(a.into()), krate: a.krate(db).into(), - }, + } + .store(), ) else { continue; }; @@ -830,7 +831,7 @@ impl flags::AnalysisStats { let (previous_exprs, previous_unknown, previous_partially_unknown) = (num_exprs, num_exprs_unknown, num_exprs_partially_unknown); for (expr_id, _) in body.exprs() { - let ty = &inference_result[expr_id]; + let ty = inference_result.expr_ty(expr_id); num_exprs += 1; let unknown_or_partial = if ty.is_ty_error() { num_exprs_unknown += 1; @@ -897,15 +898,15 @@ impl flags::AnalysisStats { start.col, end.line + 1, end.col, - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) )); } else { bar.println(format!( "{}: Expected {}, got {}", name.display(db, Edition::LATEST), - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) )); } } @@ -913,8 +914,8 @@ impl flags::AnalysisStats { println!( r#"{},mismatch,"{}","{}""#, location_csv_expr(db, vfs, &sm(), expr_id), - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) ); } } @@ -934,7 +935,7 @@ impl flags::AnalysisStats { let (previous_pats, previous_unknown, previous_partially_unknown) = (num_pats, num_pats_unknown, num_pats_partially_unknown); for (pat_id, _) in body.pats() { - let ty = &inference_result[pat_id]; + let ty = inference_result.pat_ty(pat_id); num_pats += 1; let unknown_or_partial = if ty.is_ty_error() { num_pats_unknown += 1; @@ -999,15 +1000,15 @@ impl flags::AnalysisStats { start.col, end.line + 1, end.col, - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) )); } else { bar.println(format!( "{}: Expected {}, got {}", name.display(db, Edition::LATEST), - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) )); } } @@ -1015,8 +1016,8 @@ impl flags::AnalysisStats { println!( r#"{},mismatch,"{}","{}""#, location_csv_pat(db, vfs, &sm(), pat_id), - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) ); } } From 36c9f6224c743c2c6858fa69cecd701716888a33 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Wed, 10 Dec 2025 08:44:05 +0200 Subject: [PATCH 2/3] GC support for solver types A GC is triggered every X revisions, and is synchronous, unfortunately. --- Cargo.lock | 1 + crates/hir-ty/src/next_solver/consts.rs | 24 +- crates/hir-ty/src/next_solver/interner.rs | 132 ++++++- crates/hir-ty/src/next_solver/opaques.rs | 5 +- crates/hir-ty/src/next_solver/predicate.rs | 31 +- crates/hir-ty/src/next_solver/region.rs | 8 +- crates/hir-ty/src/next_solver/ty.rs | 8 +- crates/hir/src/lib.rs | 2 +- crates/intern/Cargo.toml | 1 + crates/intern/src/gc.rs | 325 ++++++++++++++++++ crates/intern/src/intern.rs | 11 +- crates/intern/src/intern_slice.rs | 17 +- crates/intern/src/lib.rs | 2 + .../rust-analyzer/src/cli/analysis_stats.rs | 1 + crates/rust-analyzer/src/config.rs | 13 +- crates/rust-analyzer/src/global_state.rs | 13 + docs/book/src/configuration_generated.md | 11 + editors/code/package.json | 11 + 18 files changed, 566 insertions(+), 50 deletions(-) create mode 100644 crates/intern/src/gc.rs diff --git a/Cargo.lock b/Cargo.lock index 081c7d66f1d8..7ada91d2b65b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1214,6 +1214,7 @@ version = "0.0.0" dependencies = [ "dashmap", "hashbrown 0.14.5", + "rayon", "rustc-hash 2.1.1", "smallvec", "triomphe", diff --git a/crates/hir-ty/src/next_solver/consts.rs b/crates/hir-ty/src/next_solver/consts.rs index 8417e145a46d..c2fc4d18bbae 100644 --- a/crates/hir-ty/src/next_solver/consts.rs +++ b/crates/hir-ty/src/next_solver/consts.rs @@ -29,9 +29,9 @@ pub struct Const<'db> { pub(super) interned: InternedRef<'db, ConstInterned>, } -#[derive(PartialEq, Eq, Hash)] +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] #[repr(align(4))] // Required for `GenericArg` bit-tagging. -pub(super) struct ConstInterned(WithCachedTypeInfo>); +pub(super) struct ConstInterned(pub(super) WithCachedTypeInfo>); impl_internable!(gc; ConstInterned); impl_stored_interned!(ConstInterned, Const, StoredConst); @@ -219,15 +219,14 @@ pub struct Valtree<'db> { impl<'db, V: super::WorldExposer> GenericTypeVisitable for Valtree<'db> { fn generic_visit_with(&self, visitor: &mut V) { - visitor.on_interned(self.interned); - self.inner().generic_visit_with(visitor); + if visitor.on_interned(self.interned).is_continue() { + self.inner().generic_visit_with(visitor); + } } } -#[derive(Debug, PartialEq, Eq, Hash)] -struct ValtreeInterned { - bytes: ConstBytes<'static>, -} +#[derive(Debug, PartialEq, Eq, Hash, GenericTypeVisitable)] +pub(super) struct ValtreeInterned(ConstBytes<'static>); impl_internable!(gc; ValtreeInterned); @@ -240,12 +239,12 @@ impl<'db> Valtree<'db> { #[inline] pub fn new(bytes: ConstBytes<'db>) -> Self { let bytes = unsafe { std::mem::transmute::, ConstBytes<'static>>(bytes) }; - Self { interned: Interned::new_gc(ValtreeInterned { bytes }) } + Self { interned: Interned::new_gc(ValtreeInterned(bytes)) } } #[inline] pub fn inner(&self) -> &ConstBytes<'db> { - let inner = &self.interned.bytes; + let inner = &self.interned.0; unsafe { std::mem::transmute::<&ConstBytes<'static>, &ConstBytes<'db>>(inner) } } } @@ -277,8 +276,9 @@ impl<'db> IntoKind for Const<'db> { impl<'db, V: super::WorldExposer> GenericTypeVisitable for Const<'db> { fn generic_visit_with(&self, visitor: &mut V) { - visitor.on_interned(self.interned); - self.kind().generic_visit_with(visitor); + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } } } diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs index 1a946cb90817..f2dacd16dbf9 100644 --- a/crates/hir-ty/src/next_solver/interner.rs +++ b/crates/hir-ty/src/next_solver/interner.rs @@ -1,8 +1,9 @@ //! Things related to the Interner in the next-trait-solver. -use std::fmt; +use std::{fmt, ops::ControlFlow}; use intern::{Interned, InternedRef, InternedSliceRef, impl_internable}; +use macros::GenericTypeVisitable; use rustc_ast_ir::{FloatTy, IntTy, UintTy}; pub use tls_cache::clear_tls_solver_cache; pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db}; @@ -21,8 +22,8 @@ use rustc_hash::FxHashSet; use rustc_index::bit_set::DenseBitSet; use rustc_type_ir::{ AliasTermKind, AliasTyKind, BoundVar, CollectAndApply, CoroutineWitnessTypes, DebruijnIndex, - EarlyBinder, FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, Interner, TraitRef, - TypeFlags, TypeVisitableExt, UniverseIndex, Upcast, Variance, + EarlyBinder, FlagComputation, Flags, GenericArgKind, GenericTypeVisitable, ImplPolarity, + InferTy, Interner, TraitRef, TypeFlags, TypeVisitableExt, UniverseIndex, Upcast, Variance, elaborate::elaborate, error::TypeError, fast_reject, @@ -169,8 +170,9 @@ macro_rules! interned_slice { { #[inline] fn generic_visit_with(&self, visitor: &mut V) { - visitor.on_interned_slice(self.interned); - self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor)); + if visitor.on_interned_slice(self.interned).is_continue() { + self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor)); + } } } }; @@ -293,8 +295,14 @@ macro_rules! impl_stored_interned { pub(crate) use impl_stored_interned; pub trait WorldExposer { - fn on_interned(&mut self, interned: InternedRef<'_, T>); - fn on_interned_slice(&mut self, interned: InternedSliceRef<'_, T>); + fn on_interned( + &mut self, + interned: InternedRef<'_, T>, + ) -> ControlFlow<()>; + fn on_interned_slice( + &mut self, + interned: InternedSliceRef<'_, T>, + ) -> ControlFlow<()>; } #[derive(Debug, Copy, Clone)] @@ -803,7 +811,7 @@ pub struct Pattern<'db> { interned: InternedRef<'db, PatternInterned>, } -#[derive(PartialEq, Eq, Hash)] +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] struct PatternInterned(PatternKind<'static>); impl_internable!(gc; PatternInterned); @@ -884,8 +892,9 @@ impl<'db> rustc_type_ir::TypeVisitable> for Pattern<'db> { impl<'db, V: WorldExposer> rustc_type_ir::GenericTypeVisitable for Pattern<'db> { fn generic_visit_with(&self, visitor: &mut V) { - visitor.on_interned(self.interned); - self.kind().generic_visit_with(visitor); + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } } } @@ -2559,3 +2568,106 @@ mod tls_cache { GLOBAL_CACHE.with_borrow_mut(|handle| *handle = None); } } + +impl WorldExposer for intern::GarbageCollector { + fn on_interned( + &mut self, + interned: InternedRef<'_, T>, + ) -> ControlFlow<()> { + self.mark_interned_alive(interned) + } + + fn on_interned_slice( + &mut self, + interned: InternedSliceRef<'_, T>, + ) -> ControlFlow<()> { + self.mark_interned_slice_alive(interned) + } +} + +/// # Safety +/// +/// This cannot be called if there are some not-yet-recorded type values. Generally, if you have a mutable +/// reference to the database, and there are no other database - then you can call this safely, but you +/// also need to make sure to maintain the mutable reference while this is running. +pub unsafe fn collect_ty_garbage() { + let mut gc = intern::GarbageCollector::default(); + + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + + unsafe { gc.collect() }; +} + +macro_rules! impl_gc_visit { + ( $($ty:ty),* $(,)? ) => { + $( + impl ::intern::GcInternedVisit for $ty { + #[inline] + fn visit_with(&self, gc: &mut ::intern::GarbageCollector) { + self.generic_visit_with(gc); + } + } + )* + }; +} + +impl_gc_visit!( + super::consts::ConstInterned, + super::consts::ValtreeInterned, + PatternInterned, + super::opaques::ExternalConstraintsInterned, + super::predicate::PredicateInterned, + super::region::RegionInterned, + super::ty::TyInterned, + super::predicate::ClausesCachedTypeInfo, +); + +macro_rules! impl_gc_visit_slice { + ( $($ty:ty),* $(,)? ) => { + $( + impl ::intern::GcInternedSliceVisit for $ty { + #[inline] + fn visit_header(header: &::Header, gc: &mut ::intern::GarbageCollector) { + header.generic_visit_with(gc); + } + + #[inline] + fn visit_slice(header: &[::SliceType], gc: &mut ::intern::GarbageCollector) { + header.generic_visit_with(gc); + } + } + )* + }; +} + +impl_gc_visit_slice!( + super::predicate::ClausesStorage, + super::generic_arg::GenericArgsStorage, + BoundVarKindsStorage, + VariancesOfStorage, + CanonicalVarsStorage, + PatListStorage, + super::opaques::PredefinedOpaquesStorage, + super::opaques::SolverDefIdsStorage, + super::predicate::BoundExistentialPredicatesStorage, + super::region::RegionAssumptionsStorage, + super::ty::TysStorage, +); diff --git a/crates/hir-ty/src/next_solver/opaques.rs b/crates/hir-ty/src/next_solver/opaques.rs index 7e1d6b7328cd..230469c21ab8 100644 --- a/crates/hir-ty/src/next_solver/opaques.rs +++ b/crates/hir-ty/src/next_solver/opaques.rs @@ -1,6 +1,7 @@ //! Things related to opaques in the next-trait-solver. use intern::{Interned, InternedRef, impl_internable}; +use macros::GenericTypeVisitable; use rustc_ast_ir::try_visit; use rustc_type_ir::inherent::SliceLike; @@ -30,8 +31,8 @@ pub struct ExternalConstraints<'db> { interned: InternedRef<'db, ExternalConstraintsInterned>, } -#[derive(PartialEq, Eq, Hash)] -struct ExternalConstraintsInterned(ExternalConstraintsData<'static>); +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] +pub(super) struct ExternalConstraintsInterned(ExternalConstraintsData<'static>); impl_internable!(gc; ExternalConstraintsInterned); diff --git a/crates/hir-ty/src/next_solver/predicate.rs b/crates/hir-ty/src/next_solver/predicate.rs index 95b48c73c12f..6d7539575fbc 100644 --- a/crates/hir-ty/src/next_solver/predicate.rs +++ b/crates/hir-ty/src/next_solver/predicate.rs @@ -186,8 +186,8 @@ pub struct Predicate<'db> { interned: InternedRef<'db, PredicateInterned>, } -#[derive(PartialEq, Eq, Hash)] -struct PredicateInterned(WithCachedTypeInfo>>); +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] +pub(super) struct PredicateInterned(WithCachedTypeInfo>>); impl_internable!(gc; PredicateInterned); @@ -252,7 +252,10 @@ impl<'db> std::fmt::Debug for Predicate<'db> { } } -impl_slice_internable!(gc; ClausesStorage, WithCachedTypeInfo<()>, Clause<'static>); +#[derive(Clone, Copy, PartialEq, Eq, Hash, GenericTypeVisitable)] +pub struct ClausesCachedTypeInfo(WithCachedTypeInfo<()>); + +impl_slice_internable!(gc; ClausesStorage, ClausesCachedTypeInfo, Clause<'static>); impl_stored_interned_slice!(ClausesStorage, Clauses, StoredClauses); #[derive(Clone, Copy, PartialEq, Eq, Hash)] @@ -277,11 +280,11 @@ impl<'db> Clauses<'db> { pub fn new_from_slice(slice: &[Clause<'db>]) -> Self { let slice = unsafe { ::std::mem::transmute::<&[Clause<'db>], &[Clause<'static>]>(slice) }; let flags = FlagComputation::>::for_clauses(slice); - let flags = WithCachedTypeInfo { + let flags = ClausesCachedTypeInfo(WithCachedTypeInfo { internee: (), flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, - }; + }); Self { interned: InternedSlice::from_header_and_slice(flags, slice) } } @@ -400,20 +403,21 @@ impl<'db> rustc_type_ir::TypeVisitable> for Clauses<'db> { impl<'db, V: super::WorldExposer> rustc_type_ir::GenericTypeVisitable for Clauses<'db> { fn generic_visit_with(&self, visitor: &mut V) { - visitor.on_interned_slice(self.interned); - self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor)); + if visitor.on_interned_slice(self.interned).is_continue() { + self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor)); + } } } impl<'db> rustc_type_ir::Flags for Clauses<'db> { #[inline] fn flags(&self) -> rustc_type_ir::TypeFlags { - self.interned.header.header.flags + self.interned.header.header.0.flags } #[inline] fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { - self.interned.header.header.outer_exclusive_binder + self.interned.header.header.0.outer_exclusive_binder } } @@ -430,7 +434,9 @@ impl<'db> rustc_type_ir::TypeSuperVisitable> for Clauses<'db> { pub struct Clause<'db>(pub(crate) Predicate<'db>); // We could cram the reveal into the clauses like rustc does, probably -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)] +#[derive( + Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable, GenericTypeVisitable, +)] pub struct ParamEnv<'db> { pub(crate) clauses: Clauses<'db>, } @@ -474,8 +480,9 @@ impl<'db> TypeVisitable> for Predicate<'db> { impl<'db, V: super::WorldExposer> GenericTypeVisitable for Predicate<'db> { fn generic_visit_with(&self, visitor: &mut V) { - visitor.on_interned(self.interned); - self.kind().generic_visit_with(visitor); + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } } } diff --git a/crates/hir-ty/src/next_solver/region.rs b/crates/hir-ty/src/next_solver/region.rs index dee7953ae36c..e34e87601fd7 100644 --- a/crates/hir-ty/src/next_solver/region.rs +++ b/crates/hir-ty/src/next_solver/region.rs @@ -2,6 +2,7 @@ use hir_def::LifetimeParamId; use intern::{Interned, InternedRef, Symbol, impl_internable}; +use macros::GenericTypeVisitable; use rustc_type_ir::{ BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, GenericTypeVisitable, INNERMOST, RegionVid, TypeFlags, TypeFoldable, TypeVisitable, @@ -25,7 +26,7 @@ pub struct Region<'db> { pub(super) interned: InternedRef<'db, RegionInterned>, } -#[derive(PartialEq, Eq, Hash)] +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] #[repr(align(4))] // Required for `GenericArg` bit-tagging. pub(super) struct RegionInterned(RegionKind<'static>); @@ -388,8 +389,9 @@ impl<'db> PlaceholderLike> for PlaceholderRegion { impl<'db, V: super::WorldExposer> GenericTypeVisitable for Region<'db> { fn generic_visit_with(&self, visitor: &mut V) { - visitor.on_interned(self.interned); - self.kind().generic_visit_with(visitor); + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } } } diff --git a/crates/hir-ty/src/next_solver/ty.rs b/crates/hir-ty/src/next_solver/ty.rs index 305da50b6d9f..85534e42a823 100644 --- a/crates/hir-ty/src/next_solver/ty.rs +++ b/crates/hir-ty/src/next_solver/ty.rs @@ -8,6 +8,7 @@ use hir_def::{ }; use hir_def::{TraitId, type_ref::Rawness}; use intern::{Interned, InternedRef, impl_internable}; +use macros::GenericTypeVisitable; use rustc_abi::{Float, Integer, Size}; use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult}; use rustc_type_ir::{ @@ -51,7 +52,7 @@ pub struct Ty<'db> { pub(super) interned: InternedRef<'db, TyInterned>, } -#[derive(PartialEq, Eq, Hash)] +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] #[repr(align(4))] // Required for `GenericArg` bit-tagging. pub(super) struct TyInterned(WithCachedTypeInfo>); @@ -750,8 +751,9 @@ impl<'db> IntoKind for Ty<'db> { impl<'db, V: super::WorldExposer> GenericTypeVisitable for Ty<'db> { fn generic_visit_with(&self, visitor: &mut V) { - visitor.on_interned(self.interned); - self.kind().generic_visit_with(visitor); + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } } } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 3532220b72ae..933dd6af1d52 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -175,7 +175,7 @@ pub use { layout::LayoutError, mir::{MirEvalError, MirLowerError}, next_solver::abi::Safety, - next_solver::clear_tls_solver_cache, + next_solver::{clear_tls_solver_cache, collect_ty_garbage}, }, // FIXME: These are needed for import assets, properly encapsulate them. hir_ty::{method_resolution::TraitImpls, next_solver::SimplifiedType}, diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml index 44621031ab8e..6414f091783c 100644 --- a/crates/intern/Cargo.toml +++ b/crates/intern/Cargo.toml @@ -19,6 +19,7 @@ hashbrown.workspace = true rustc-hash.workspace = true triomphe.workspace = true smallvec.workspace = true +rayon.workspace = true [lints] workspace = true diff --git a/crates/intern/src/gc.rs b/crates/intern/src/gc.rs new file mode 100644 index 000000000000..e559d1dc5710 --- /dev/null +++ b/crates/intern/src/gc.rs @@ -0,0 +1,325 @@ +//! Garbage collection of interned values. + +use std::{marker::PhantomData, ops::ControlFlow}; + +use hashbrown::raw::RawTable; +use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; +use rustc_hash::{FxBuildHasher, FxHashSet}; +use triomphe::{Arc, ThinArc}; + +use crate::{Internable, InternedRef, InternedSliceRef, SliceInternable}; + +trait Storage { + fn len(&self) -> usize; + + fn mark(&self, gc: &mut GarbageCollector); + + fn sweep(&self, gc: &GarbageCollector); +} + +struct InternedStorage(PhantomData T>); + +impl Storage for InternedStorage { + fn len(&self) -> usize { + T::storage().get().len() + } + + fn mark(&self, gc: &mut GarbageCollector) { + let storage = T::storage().get(); + for item in storage { + let item = item.key(); + let addr = Arc::as_ptr(item).addr(); + if Arc::strong_count(item) > 1 { + // The item is referenced from the outside. + gc.alive.insert(addr); + item.visit_with(gc); + } + } + } + + fn sweep(&self, gc: &GarbageCollector) { + let storage = T::storage().get(); + if cfg!(miri) { + storage.shards().iter().for_each(|shard| { + gc.retain_only_alive(&mut *shard.write(), |item| item.0.as_ptr().addr()) + }); + } else { + storage.shards().par_iter().for_each(|shard| { + gc.retain_only_alive(&mut *shard.write(), |item| item.0.as_ptr().addr()) + }); + } + } +} + +struct InternedSliceStorage(PhantomData T>); + +impl Storage for InternedSliceStorage { + fn len(&self) -> usize { + T::storage().get().len() + } + + fn mark(&self, gc: &mut GarbageCollector) { + let storage = T::storage().get(); + for item in storage { + let item = item.key(); + let addr = ThinArc::as_ptr(item).addr(); + if ThinArc::strong_count(item) > 1 { + // The item is referenced from the outside. + gc.alive.insert(addr); + T::visit_header(&item.header.header, gc); + T::visit_slice(&item.slice, gc); + } + } + } + + fn sweep(&self, gc: &GarbageCollector) { + let storage = T::storage().get(); + if cfg!(miri) { + storage.shards().iter().for_each(|shard| { + gc.retain_only_alive(&mut *shard.write(), |item| item.0.as_ptr().addr()) + }); + } else { + storage.shards().par_iter().for_each(|shard| { + gc.retain_only_alive(&mut *shard.write(), |item| item.0.as_ptr().addr()) + }); + } + } +} + +pub trait GcInternedVisit { + fn visit_with(&self, gc: &mut GarbageCollector); +} + +pub trait GcInternedSliceVisit: SliceInternable { + fn visit_header(header: &Self::Header, gc: &mut GarbageCollector); + fn visit_slice(header: &[Self::SliceType], gc: &mut GarbageCollector); +} + +#[derive(Default)] +pub struct GarbageCollector { + alive: FxHashSet, + storages: Vec>, +} + +impl GarbageCollector { + pub fn add_storage(&mut self) { + const { assert!(T::USE_GC) }; + + self.storages.push(Box::new(InternedStorage::(PhantomData))); + } + + pub fn add_slice_storage(&mut self) { + const { assert!(T::USE_GC) }; + + self.storages.push(Box::new(InternedSliceStorage::(PhantomData))); + } + + /// # Safety + /// + /// This cannot be called if there are some not-yet-recorded type values. + pub unsafe fn collect(mut self) { + let total_nodes = self.storages.iter().map(|storage| storage.len()).sum(); + self.alive = FxHashSet::with_capacity_and_hasher(total_nodes, FxBuildHasher); + + let storages = std::mem::take(&mut self.storages); + + for storage in &storages { + storage.mark(&mut self); + } + + if cfg!(miri) { + storages.iter().for_each(|storage| storage.sweep(&self)); + } else { + storages.par_iter().for_each(|storage| storage.sweep(&self)); + } + } + + pub fn mark_interned_alive( + &mut self, + interned: InternedRef<'_, T>, + ) -> ControlFlow<()> { + if interned.strong_count() > 1 { + // It will be visited anyway, so short-circuit + return ControlFlow::Break(()); + } + let addr = interned.as_raw().addr(); + if !self.alive.insert(addr) { ControlFlow::Break(()) } else { ControlFlow::Continue(()) } + } + + pub fn mark_interned_slice_alive( + &mut self, + interned: InternedSliceRef<'_, T>, + ) -> ControlFlow<()> { + if interned.strong_count() > 1 { + // It will be visited anyway, so short-circuit + return ControlFlow::Break(()); + } + let addr = interned.as_raw().addr(); + if !self.alive.insert(addr) { ControlFlow::Break(()) } else { ControlFlow::Continue(()) } + } + + #[inline] + fn retain_only_alive(&self, map: &mut RawTable, mut get_addr: impl FnMut(&T) -> usize) { + unsafe { + // Here we only use `iter` as a temporary, preventing use-after-free + for bucket in map.iter() { + let item = bucket.as_mut(); + let addr = get_addr(item); + if !self.alive.contains(&addr) { + map.erase(bucket); + } + } + } + } +} + +#[cfg(test)] +mod tests { + use crate::{ + GarbageCollector, GcInternedSliceVisit, GcInternedVisit, Interned, InternedSliceRef, + }; + + crate::impl_internable!(String); + + #[test] + fn simple_interned() { + let a = Interned::new("abc".to_owned()); + let b = Interned::new("abc".to_owned()); + assert_eq!(a, b); + assert_eq!(a.as_ref(), b.as_ref()); + assert_eq!(a.as_ref(), a.as_ref()); + assert_eq!(a, a.clone()); + assert_eq!(a, a.clone().clone()); + assert_eq!(b.clone(), a.clone().clone()); + assert_eq!(*a, "abc"); + assert_eq!(*b, "abc"); + assert_eq!(b.as_ref().to_owned(), a); + let c = Interned::new("def".to_owned()); + assert_ne!(a, c); + assert_ne!(b, c); + assert_ne!(b.as_ref(), c.as_ref()); + assert_eq!(*c.as_ref(), "def"); + drop(c); + assert_eq!(*a, "abc"); + assert_eq!(*b, "abc"); + drop(a); + assert_eq!(*b, "abc"); + drop(b); + } + + #[test] + fn simple_gc() { + #[derive(Debug, PartialEq, Eq, Hash)] + struct GcString(String); + + crate::impl_internable!(gc; GcString); + + impl GcInternedVisit for GcString { + fn visit_with(&self, _gc: &mut GarbageCollector) {} + } + + crate::impl_slice_internable!(gc; StringSlice, String, String); + type InternedSlice = crate::InternedSlice; + + impl GcInternedSliceVisit for StringSlice { + fn visit_header(_header: &Self::Header, _gc: &mut GarbageCollector) {} + + fn visit_slice(_header: &[Self::SliceType], _gc: &mut GarbageCollector) {} + } + + let (a, d) = { + let a = Interned::new_gc(GcString("abc".to_owned())).to_owned(); + let b = Interned::new_gc(GcString("abc".to_owned())).to_owned(); + assert_eq!(a, b); + assert_eq!(a.as_ref(), b.as_ref()); + assert_eq!(a.as_ref(), a.as_ref()); + assert_eq!(a, a.clone()); + assert_eq!(a, a.clone().clone()); + assert_eq!(b.clone(), a.clone().clone()); + assert_eq!(a.0, "abc"); + assert_eq!(b.0, "abc"); + assert_eq!(b.as_ref().to_owned(), a); + let c = Interned::new_gc(GcString("def".to_owned())).to_owned(); + assert_ne!(a, c); + assert_ne!(b, c); + assert_ne!(b.as_ref(), c.as_ref()); + assert_eq!(c.as_ref().0, "def"); + + let d = InternedSlice::from_header_and_slice( + "abc".to_owned(), + &["def".to_owned(), "123".to_owned()], + ); + let e = InternedSlice::from_header_and_slice( + "abc".to_owned(), + &["def".to_owned(), "123".to_owned()], + ); + assert_eq!(d, e); + assert_eq!(d.to_owned(), e.to_owned()); + assert_eq!(d.header.length, 2); + assert_eq!(d.header.header, "abc"); + assert_eq!(d.slice, ["def", "123"]); + (a, d.to_owned()) + }; + + let mut gc = GarbageCollector::default(); + gc.add_slice_storage::(); + gc.add_storage::(); + unsafe { gc.collect() }; + + assert_eq!(a.0, "abc"); + assert_eq!(d.header.length, 2); + assert_eq!(d.header.header, "abc"); + assert_eq!(d.slice, ["def", "123"]); + + drop(a); + drop(d); + + let mut gc = GarbageCollector::default(); + gc.add_slice_storage::(); + gc.add_storage::(); + unsafe { gc.collect() }; + } + + #[test] + fn gc_visit() { + #[derive(PartialEq, Eq, Hash)] + struct GcInterned(InternedSliceRef<'static, StringSlice>); + + crate::impl_internable!(gc; GcInterned); + + impl GcInternedVisit for GcInterned { + fn visit_with(&self, gc: &mut GarbageCollector) { + _ = gc.mark_interned_slice_alive(self.0); + } + } + + crate::impl_slice_internable!(gc; StringSlice, String, i32); + type InternedSlice = crate::InternedSlice; + + impl GcInternedSliceVisit for StringSlice { + fn visit_header(_header: &Self::Header, _gc: &mut GarbageCollector) {} + + fn visit_slice(_header: &[Self::SliceType], _gc: &mut GarbageCollector) {} + } + + let outer = { + let inner = InternedSlice::from_header_and_slice("abc".to_owned(), &[123, 456, 789]); + Interned::new_gc(GcInterned(inner)).to_owned() + }; + + let mut gc = GarbageCollector::default(); + gc.add_slice_storage::(); + gc.add_storage::(); + unsafe { gc.collect() }; + + assert_eq!(outer.0.header.header, "abc"); + assert_eq!(outer.0.slice, [123, 456, 789]); + + drop(outer); + + let mut gc = GarbageCollector::default(); + gc.add_slice_storage::(); + gc.add_storage::(); + unsafe { gc.collect() }; + } +} diff --git a/crates/intern/src/intern.rs b/crates/intern/src/intern.rs index fdefb936560e..5d4d00118512 100644 --- a/crates/intern/src/intern.rs +++ b/crates/intern/src/intern.rs @@ -26,6 +26,8 @@ unsafe impl Sync for Interned {} impl Interned { #[inline] pub fn new(obj: T) -> Self { + const { assert!(!T::USE_GC) }; + let storage = T::storage().get(); let (mut shard, hash) = Self::select(storage, &obj); // Atomically, @@ -228,6 +230,11 @@ impl<'a, T: Internable> InternedRef<'a, T> { pub unsafe fn decrement_refcount(self) { unsafe { drop(Arc::from_raw(self.as_raw())) } } + + #[inline] + pub(crate) fn strong_count(self) -> usize { + ArcBorrow::strong_count(&self.arc) + } } impl Clone for InternedRef<'_, T> { @@ -292,12 +299,12 @@ impl InternStorage { } impl InternStorage { - fn get(&self) -> &InternMap { + pub(crate) fn get(&self) -> &InternMap { self.map.get_or_init(DashMap::default) } } -pub trait Internable: Hash + Eq + 'static { +pub trait Internable: Hash + Eq + Send + Sync + 'static { const USE_GC: bool; fn storage() -> &'static InternStorage; diff --git a/crates/intern/src/intern_slice.rs b/crates/intern/src/intern_slice.rs index 989a21e70cf0..7f2159ee664e 100644 --- a/crates/intern/src/intern_slice.rs +++ b/crates/intern/src/intern_slice.rs @@ -256,6 +256,16 @@ impl<'a, T: SliceInternable> InternedSliceRef<'a, T> { pub unsafe fn decrement_refcount(self) { drop(ManuallyDrop::into_inner(self.arc())); } + + #[inline] + pub(crate) fn strong_count(self) -> usize { + ThinArc::strong_count(&self.arc()) + } + + #[inline] + pub(crate) fn as_raw(self) -> *const c_void { + self.arc().as_ptr() + } } impl Clone for InternedSliceRef<'_, T> { @@ -318,15 +328,15 @@ impl InternSliceStorage { } impl InternSliceStorage { - fn get(&self) -> &InternMap { + pub(crate) fn get(&self) -> &InternMap { self.map.get_or_init(DashMap::default) } } pub trait SliceInternable: Sized + 'static { const USE_GC: bool; - type Header: Eq + Hash; - type SliceType: Eq + Hash + 'static; + type Header: Eq + Hash + Send + Sync; + type SliceType: Eq + Hash + Send + Sync + 'static; fn storage() -> &'static InternSliceStorage; } @@ -335,6 +345,7 @@ pub trait SliceInternable: Sized + 'static { #[doc(hidden)] macro_rules! _impl_slice_internable { ( gc; $tag:ident, $h:ty, $t:ty $(,)? ) => { + #[allow(unreachable_pub)] pub struct $tag; impl $crate::SliceInternable for $tag { const USE_GC: bool = true; diff --git a/crates/intern/src/lib.rs b/crates/intern/src/lib.rs index c5b42a860ac4..0c0b12427d21 100644 --- a/crates/intern/src/lib.rs +++ b/crates/intern/src/lib.rs @@ -2,10 +2,12 @@ //! //! Eventually this should probably be replaced with salsa-based interning. +mod gc; mod intern; mod intern_slice; mod symbol; +pub use self::gc::{GarbageCollector, GcInternedSliceVisit, GcInternedVisit}; pub use self::intern::{InternStorage, Internable, Interned, InternedRef, impl_internable}; pub use self::intern_slice::{ InternSliceStorage, InternedSlice, InternedSliceRef, SliceInternable, impl_slice_internable, diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index f863a7ee8d04..76256b0a2253 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -355,6 +355,7 @@ impl flags::AnalysisStats { } hir::clear_tls_solver_cache(); + unsafe { hir::collect_ty_garbage() }; let db = host.raw_database_mut(); db.trigger_lru_eviction(); diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 007725be7406..2371f7a65649 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -98,6 +98,13 @@ config_data! { /// Code's `files.watcherExclude`. files_exclude | files_excludeDirs: Vec = vec![], + /// This config controls the frequency in which rust-analyzer will perform its internal Garbage + /// Collection. It is specified in revisions, roughly equivalent to number of changes. The default + /// is 1000. + /// + /// Setting a smaller value may help limit peak memory usage at the expense of speed. + gc_frequency: usize = 1000, + /// If this is `true`, when "Goto Implementations" and in "Implementations" lens, are triggered on a `struct` or `enum` or `union`, we filter out trait implementations that originate from `derive`s above the type. gotoImplementations_filterAdjacentDerives: bool = false, @@ -1701,9 +1708,11 @@ impl Config { pub fn caps(&self) -> &ClientCapabilities { &self.caps } -} -impl Config { + pub fn gc_freq(&self) -> usize { + *self.gc_frequency() + } + pub fn assist(&self, source_root: Option) -> AssistConfig { AssistConfig { snippet_cap: self.snippet_cap(), diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 7828f5084433..41783584a9ba 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -193,6 +193,8 @@ pub(crate) struct GlobalState { /// which will usually end up causing a bunch of incorrect diagnostics on startup. pub(crate) incomplete_crate_graph: bool, + pub(crate) revisions_until_next_gc: usize, + pub(crate) minicore: MiniCoreRustAnalyzerInternalOnly, } @@ -319,6 +321,8 @@ impl GlobalState { incomplete_crate_graph: false, minicore: MiniCoreRustAnalyzerInternalOnly::default(), + + revisions_until_next_gc: config.gc_freq(), }; // Apply any required database inputs from the config. this.update_configuration(config); @@ -435,6 +439,15 @@ impl GlobalState { }); self.analysis_host.apply_change(change); + + if self.revisions_until_next_gc == 0 { + // SAFETY: Just changed some database inputs, all queries were canceled. + unsafe { hir::collect_ty_garbage() }; + self.revisions_until_next_gc = self.config.gc_freq(); + } else { + self.revisions_until_next_gc -= 1; + } + if !modified_ratoml_files.is_empty() || !self.config.same_source_root_parent_map(&self.local_roots_parent_map) { diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md index 1f5c672233ae..6b7ef049645c 100644 --- a/docs/book/src/configuration_generated.md +++ b/docs/book/src/configuration_generated.md @@ -635,6 +635,17 @@ Default: `"client"` Controls file watching implementation. +## rust-analyzer.gc.frequency {#gc.frequency} + +Default: `1000` + +This config controls the frequency in which rust-analyzer will perform its internal Garbage +Collection. It is specified in revisions, roughly equivalent to number of changes. The default +is 1000. + +Setting a smaller value may help limit peak memory usage at the expense of speed. + + ## rust-analyzer.gotoImplementations.filterAdjacentDerives {#gotoImplementations.filterAdjacentDerives} Default: `false` diff --git a/editors/code/package.json b/editors/code/package.json index 98fe6a558b80..97db1322dd71 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1627,6 +1627,17 @@ } } }, + { + "title": "Gc", + "properties": { + "rust-analyzer.gc.frequency": { + "markdownDescription": "This config controls the frequency in which rust-analyzer will perform its internal Garbage\nCollection. It is specified in revisions, roughly equivalent to number of changes. The default\nis 1000.\n\nSetting a smaller value may help limit peak memory usage at the expense of speed.", + "default": 1000, + "type": "integer", + "minimum": 0 + } + } + }, { "title": "Goto Implementations", "properties": { From 5e2dfbbd020b1d57e3db6022a146da0e9354aa3c Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 18 Dec 2025 17:28:03 +0200 Subject: [PATCH 3/3] Add extensive docs for the unsafe operations --- crates/hir-ty/src/next_solver.rs | 4 + crates/hir-ty/src/next_solver/generic_arg.rs | 19 ++- crates/hir-ty/src/next_solver/interner.rs | 8 ++ crates/intern/src/gc.rs | 67 ++++++----- crates/intern/src/intern.rs | 46 +++++++- crates/intern/src/intern_slice.rs | 117 +++++++------------ 6 files changed, 146 insertions(+), 115 deletions(-) diff --git a/crates/hir-ty/src/next_solver.rs b/crates/hir-ty/src/next_solver.rs index 539d09a8f5df..e91864bd8759 100644 --- a/crates/hir-ty/src/next_solver.rs +++ b/crates/hir-ty/src/next_solver.rs @@ -1,5 +1,9 @@ //! Things relevant to the next trait solver. +// Note: in interned types defined in this module, we generally treat the lifetime as advisory +// and transmute it as needed. This is because no real memory unsafety can be caused from an +// incorrect lifetime here. + pub mod abi; mod binder; mod consts; diff --git a/crates/hir-ty/src/next_solver/generic_arg.rs b/crates/hir-ty/src/next_solver/generic_arg.rs index f31b487eaed5..b600f6000d9e 100644 --- a/crates/hir-ty/src/next_solver/generic_arg.rs +++ b/crates/hir-ty/src/next_solver/generic_arg.rs @@ -1,4 +1,10 @@ -//! Things related to generic args in the next-trait-solver. +//! Things related to generic args in the next-trait-solver (`GenericArg`, `GenericArgs`, `Term`). +//! +//! Implementations of `GenericArg` and `Term` are pointer-tagged instead of an enum (rustc does +//! the same). This is done to save memory (which also helps speed) - one `GenericArg` is a machine +//! word instead of two, while matching on it is basically as cheap. The implementation for both +//! `GenericArg` and `Term` is shared in [`GenericArgImpl`]. This both simplifies the implementation, +//! as well as enables a noop conversion from `Term` to `GenericArg`. use std::{hint::unreachable_unchecked, marker::PhantomData, ptr::NonNull}; @@ -29,10 +35,14 @@ pub type TermKind<'db> = rustc_type_ir::TermKind>; #[derive(Clone, Copy, PartialEq, Eq, Hash)] struct GenericArgImpl<'db> { + /// # Invariant + /// + /// Contains an [`InternedRef`] of a [`Ty`], [`Const`] or [`Region`], bit-tagged as per the consts below. ptr: NonNull<()>, _marker: PhantomData<(Ty<'db>, Const<'db>, Region<'db>)>, } +// SAFETY: We essentially own the `Ty`, `Const` or `Region`, and they are `Send + Sync`. unsafe impl Send for GenericArgImpl<'_> {} unsafe impl Sync for GenericArgImpl<'_> {} @@ -46,6 +56,7 @@ impl<'db> GenericArgImpl<'db> { #[inline] fn new_ty(ty: Ty<'db>) -> Self { Self { + // SAFETY: We create it from an `InternedRef`, and it's never null. ptr: unsafe { NonNull::new_unchecked( ty.interned @@ -62,6 +73,7 @@ impl<'db> GenericArgImpl<'db> { #[inline] fn new_const(ty: Const<'db>) -> Self { Self { + // SAFETY: We create it from an `InternedRef`, and it's never null. ptr: unsafe { NonNull::new_unchecked( ty.interned @@ -78,6 +90,7 @@ impl<'db> GenericArgImpl<'db> { #[inline] fn new_region(ty: Region<'db>) -> Self { Self { + // SAFETY: We create it from an `InternedRef`, and it's never null. ptr: unsafe { NonNull::new_unchecked( ty.interned @@ -94,6 +107,7 @@ impl<'db> GenericArgImpl<'db> { #[inline] fn kind(self) -> GenericArgKind<'db> { let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK); + // SAFETY: We can only be created from a `Ty`, a `Const` or a `Region`, and the tag will match. unsafe { match self.ptr.addr().get() & Self::KIND_MASK { Self::TY_TAG => GenericArgKind::Type(Ty { @@ -113,6 +127,9 @@ impl<'db> GenericArgImpl<'db> { #[inline] fn term_kind(self) -> TermKind<'db> { let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK); + // SAFETY: We can only be created from a `Ty`, a `Const` or a `Region`, and the tag will match. + // It is the caller's responsibility (encapsulated within this module) to only call this with + // `Term`, which cannot be constructed from a `Region`. unsafe { match self.ptr.addr().get() & Self::KIND_MASK { Self::TY_TAG => { diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs index f2dacd16dbf9..6b97d110ee3e 100644 --- a/crates/hir-ty/src/next_solver/interner.rs +++ b/crates/hir-ty/src/next_solver/interner.rs @@ -230,8 +230,10 @@ macro_rules! impl_stored_interned_slice { } } + // SAFETY: It is safe to store this type in queries (but not `$name`). unsafe impl salsa::Update for $stored_name { unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool { + // SAFETY: Comparing by (pointer) equality is safe. unsafe { crate::utils::unsafe_update_eq(old_pointer, new_value) } } } @@ -294,6 +296,8 @@ macro_rules! impl_stored_interned { } pub(crate) use impl_stored_interned; +/// This is a visitor trait that treats any interned thing specifically. Visitables are expected to call +/// the trait's methods when encountering an interned. This is used to implement marking in GC. pub trait WorldExposer { fn on_interned( &mut self, @@ -2613,6 +2617,10 @@ pub unsafe fn collect_ty_garbage() { gc.add_slice_storage::(); gc.add_slice_storage::(); + // SAFETY: + // - By our precondition, there are no unrecorded types. + // - We implement `GcInternedVisit` and `GcInternedSliceVisit` correctly for all types. + // - We added all storages (FIXME: it's too easy to forget to add a new storage here). unsafe { gc.collect() }; } diff --git a/crates/intern/src/gc.rs b/crates/intern/src/gc.rs index e559d1dc5710..0d500a9714e4 100644 --- a/crates/intern/src/gc.rs +++ b/crates/intern/src/gc.rs @@ -1,7 +1,12 @@ //! Garbage collection of interned values. +//! +//! The GC is a simple mark-and-sweep GC: you first mark all storages, then the +//! GC visits them, and each live value they refer, recursively, then removes +//! those not marked. The sweep phase is done in parallel. -use std::{marker::PhantomData, ops::ControlFlow}; +use std::{hash::Hash, marker::PhantomData, ops::ControlFlow}; +use dashmap::DashMap; use hashbrown::raw::RawTable; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; use rustc_hash::{FxBuildHasher, FxHashSet}; @@ -39,15 +44,7 @@ impl Storage for InternedStorage { fn sweep(&self, gc: &GarbageCollector) { let storage = T::storage().get(); - if cfg!(miri) { - storage.shards().iter().for_each(|shard| { - gc.retain_only_alive(&mut *shard.write(), |item| item.0.as_ptr().addr()) - }); - } else { - storage.shards().par_iter().for_each(|shard| { - gc.retain_only_alive(&mut *shard.write(), |item| item.0.as_ptr().addr()) - }); - } + gc.sweep_storage(storage, |item| item.as_ptr().addr()); } } @@ -74,15 +71,7 @@ impl Storage for InternedSliceStorage fn sweep(&self, gc: &GarbageCollector) { let storage = T::storage().get(); - if cfg!(miri) { - storage.shards().iter().for_each(|shard| { - gc.retain_only_alive(&mut *shard.write(), |item| item.0.as_ptr().addr()) - }); - } else { - storage.shards().par_iter().for_each(|shard| { - gc.retain_only_alive(&mut *shard.write(), |item| item.0.as_ptr().addr()) - }); - } + gc.sweep_storage(storage, |item| item.as_ptr().addr()); } } @@ -116,7 +105,10 @@ impl GarbageCollector { /// # Safety /// - /// This cannot be called if there are some not-yet-recorded type values. + /// - This cannot be called if there are some not-yet-recorded type values. + /// - All relevant storages must have been added; that is, within the full graph of values, + /// the added storages must form a DAG. + /// - [`GcInternedVisit`] and [`GcInternedSliceVisit`] must mark all values reachable from the node. pub unsafe fn collect(mut self) { let total_nodes = self.storages.iter().map(|storage| storage.len()).sum(); self.alive = FxHashSet::with_capacity_and_hasher(total_nodes, FxBuildHasher); @@ -127,6 +119,7 @@ impl GarbageCollector { storage.mark(&mut self); } + // Miri doesn't support rayon. if cfg!(miri) { storages.iter().for_each(|storage| storage.sweep(&self)); } else { @@ -158,8 +151,26 @@ impl GarbageCollector { if !self.alive.insert(addr) { ControlFlow::Break(()) } else { ControlFlow::Continue(()) } } + fn sweep_storage( + &self, + storage: &DashMap, + get_addr: impl Fn(&T) -> usize + Send + Sync, + ) { + // Miri doesn't support rayon. + if cfg!(miri) { + storage.shards().iter().for_each(|shard| { + self.retain_only_alive(&mut *shard.write(), |item| get_addr(&item.0)) + }); + } else { + storage.shards().par_iter().for_each(|shard| { + self.retain_only_alive(&mut *shard.write(), |item| get_addr(&item.0)) + }); + } + } + #[inline] fn retain_only_alive(&self, map: &mut RawTable, mut get_addr: impl FnMut(&T) -> usize) { + // This code was copied from DashMap's retain() - which we can't use because we want to run in parallel. unsafe { // Here we only use `iter` as a temporary, preventing use-after-free for bucket in map.iter() { @@ -218,7 +229,7 @@ mod tests { fn visit_with(&self, _gc: &mut GarbageCollector) {} } - crate::impl_slice_internable!(gc; StringSlice, String, String); + crate::impl_slice_internable!(gc; StringSlice, String, u32); type InternedSlice = crate::InternedSlice; impl GcInternedSliceVisit for StringSlice { @@ -245,19 +256,13 @@ mod tests { assert_ne!(b.as_ref(), c.as_ref()); assert_eq!(c.as_ref().0, "def"); - let d = InternedSlice::from_header_and_slice( - "abc".to_owned(), - &["def".to_owned(), "123".to_owned()], - ); - let e = InternedSlice::from_header_and_slice( - "abc".to_owned(), - &["def".to_owned(), "123".to_owned()], - ); + let d = InternedSlice::from_header_and_slice("abc".to_owned(), &[123, 456]); + let e = InternedSlice::from_header_and_slice("abc".to_owned(), &[123, 456]); assert_eq!(d, e); assert_eq!(d.to_owned(), e.to_owned()); assert_eq!(d.header.length, 2); assert_eq!(d.header.header, "abc"); - assert_eq!(d.slice, ["def", "123"]); + assert_eq!(d.slice, [123, 456]); (a, d.to_owned()) }; @@ -269,7 +274,7 @@ mod tests { assert_eq!(a.0, "abc"); assert_eq!(d.header.length, 2); assert_eq!(d.header.header, "abc"); - assert_eq!(d.slice, ["def", "123"]); + assert_eq!(d.slice, [123, 456]); drop(a); drop(d); diff --git a/crates/intern/src/intern.rs b/crates/intern/src/intern.rs index 5d4d00118512..b7acd6624b99 100644 --- a/crates/intern/src/intern.rs +++ b/crates/intern/src/intern.rs @@ -1,8 +1,31 @@ //! Interning of single values. +//! +//! Interning supports two modes: GC and non-GC. +//! +//! In non-GC mode, you create [`Interned`]s, and can create `Copy` handles to them +//! that can still be upgraded back to [`Interned`] ([`InternedRef`]) via [`Interned::as_ref`]. +//! Generally, letting the [`InternedRef`] to outlive the [`Interned`] is a soundness bug and can +//! lead to UB. When all [`Interned`]s of some value are dropped, the value is freed (newer interns +//! may re-create it, not necessarily in the same place). +//! +//! In GC mode, you generally operate on [`InternedRef`]s. They are `Copy` and comfortable. To intern +//! a value you call [`Interned::new_gc`], which returns an [`InternedRef`]. Having all [`Interned`]s +//! of some value be dropped will *not* immediately free the value. Instead, a mark-and-sweep GC can +//! be initiated, which will free all values which have no live [`Interned`]s. +//! +//! Generally, in GC mode, you operate on [`InternedRef`], but when you need to store some long-term +//! value (e.g. a Salsa query output), you convert it to an [`Interned`]. This ensures that an eventual +//! GC will not free it as long as it is alive. +//! +//! Making mistakes is hard due to GC [`InternedRef`] wrappers not implementing `salsa::Update`, meaning +//! Salsa will ensure you do not store them in queries or Salsa-interneds. However it's still *possible* +//! without unsafe code (for example, by storing them in a `static`), which is why triggering GC is unsafe. +//! +//! For more information about GC see [`crate::gc`]. use std::{ fmt::{self, Debug, Display}, - hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}, + hash::{BuildHasher, Hash, Hasher}, ops::Deref, ptr, sync::OnceLock, @@ -10,19 +33,16 @@ use std::{ use dashmap::{DashMap, SharedValue}; use hashbrown::raw::RawTable; -use rustc_hash::FxHasher; +use rustc_hash::FxBuildHasher; use triomphe::{Arc, ArcBorrow}; -type InternMap = DashMap, (), BuildHasherDefault>; +type InternMap = DashMap, (), FxBuildHasher>; type Guard = dashmap::RwLockWriteGuard<'static, RawTable<(Arc, SharedValue<()>)>>; pub struct Interned { arc: Arc, } -unsafe impl Send for Interned {} -unsafe impl Sync for Interned {} - impl Interned { #[inline] pub fn new(obj: T) -> Self { @@ -96,6 +116,7 @@ impl Interned { /// The pointer should originate from an `Interned` or an `InternedRef`. #[inline] pub unsafe fn from_raw(ptr: *const T) -> Self { + // SAFETY: Our precondition. Self { arc: unsafe { Arc::from_raw(ptr) } } } @@ -209,6 +230,7 @@ impl<'a, T: Internable> InternedRef<'a, T> { /// The pointer needs to originate from `Interned` or `InternedRef`. #[inline] pub unsafe fn from_raw(ptr: *const T) -> Self { + // SAFETY: Our precondition. Self { arc: unsafe { ArcBorrow::from_ptr(ptr) } } } @@ -228,6 +250,7 @@ impl<'a, T: Internable> InternedRef<'a, T> { /// map also keeps a reference to the value. #[inline] pub unsafe fn decrement_refcount(self) { + // SAFETY: Our precondition. unsafe { drop(Arc::from_raw(self.as_raw())) } } @@ -235,6 +258,17 @@ impl<'a, T: Internable> InternedRef<'a, T> { pub(crate) fn strong_count(self) -> usize { ArcBorrow::strong_count(&self.arc) } + + /// **Available only on GC mode**. + /// + /// Changes the attached lifetime, as in GC mode, the lifetime is more kind of a lint to prevent misuse + /// than actual soundness check. + #[inline] + pub fn change_lifetime<'b>(self) -> InternedRef<'b, T> { + const { assert!(T::USE_GC) }; + // SAFETY: The lifetime on `InternedRef` is essentially advisory only for GCed types. + unsafe { std::mem::transmute::, InternedRef<'b, T>>(self) } + } } impl Clone for InternedRef<'_, T> { diff --git a/crates/intern/src/intern_slice.rs b/crates/intern/src/intern_slice.rs index 7f2159ee664e..58de6e17bdff 100644 --- a/crates/intern/src/intern_slice.rs +++ b/crates/intern/src/intern_slice.rs @@ -1,10 +1,16 @@ //! Interning of slices, potentially with a header. +//! +//! See [`crate::intern`] for an explanation of interning modes. Note that slice interning is currently +//! available only in GC mode (there is no other need). +//! +//! [`InternedSlice`] and [`InternedSliceRef`] are essentially [`Interned<(Header, Box<[SliceType]>)>`][crate::Interned] +//! and [`InternedRef`][crate::InternedRef] with the same types, but more optimized. There is only one +//! allocation and the pointer is thin. use std::{ - borrow::Borrow, ffi::c_void, fmt::{self, Debug}, - hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}, + hash::{BuildHasher, Hash, Hasher}, marker::PhantomData, mem::ManuallyDrop, ops::Deref, @@ -14,13 +20,13 @@ use std::{ use dashmap::{DashMap, SharedValue}; use hashbrown::raw::RawTable; -use rustc_hash::FxHasher; +use rustc_hash::FxBuildHasher; use triomphe::{HeaderSlice, HeaderWithLength, ThinArc}; type InternMap = DashMap< ThinArc<::Header, ::SliceType>, (), - BuildHasherDefault, + FxBuildHasher, >; type Guard = dashmap::RwLockWriteGuard< 'static, @@ -40,14 +46,14 @@ pub struct InternedSlice { impl InternedSlice { #[inline] - fn new<'a>( + pub fn from_header_and_slice<'a>( header: T::Header, - slice: impl Borrow<[T::SliceType]> - + IntoIterator, + slice: &[T::SliceType], ) -> InternedSliceRef<'a, T> { const { assert!(T::USE_GC) }; + let storage = T::storage().get(); - let (mut shard, hash) = Self::select(storage, &header, slice.borrow()); + let (mut shard, hash) = Self::select(storage, &header, slice); // Atomically, // - check if `obj` is already in the map // - if so, clone its `Arc` and return it @@ -56,7 +62,7 @@ impl InternedSlice { // insert the same object between us looking it up and inserting it. let bucket = match shard.find_or_find_insert_slot( hash, - |(other, _)| other.header.header == header && other.slice == *slice.borrow(), + |(other, _)| other.header.header == header && other.slice == *slice, |(x, _)| storage.hasher().hash_one(x), ) { Ok(bucket) => bucket, @@ -65,51 +71,21 @@ impl InternedSlice { shard.insert_in_slot( hash, insert_slot, - ( - ThinArc::from_header_and_iter(header, slice.into_iter()), - SharedValue::new(()), - ), + (ThinArc::from_header_and_slice(header, slice), SharedValue::new(())), ) }, }; // SAFETY: We just retrieved/inserted this bucket. + // `NonNull::new_unchecked()` is safe because the pointer originates from a `ThinArc`. unsafe { InternedSliceRef { + // INVARIANT: We create it from a `ThinArc`. ptr: NonNull::new_unchecked(ThinArc::as_ptr(&bucket.as_ref().0).cast_mut()), _marker: PhantomData, } } } - #[inline] - pub fn from_header_and_slice<'a>( - header: T::Header, - slice: &[T::SliceType], - ) -> InternedSliceRef<'a, T> - where - T::SliceType: Clone, - { - return Self::new(header, Iter(slice)); - - struct Iter<'a, T>(&'a [T]); - - impl<'a, T: Clone> IntoIterator for Iter<'a, T> { - type IntoIter = std::iter::Cloned>; - type Item = T; - #[inline] - fn into_iter(self) -> Self::IntoIter { - self.0.iter().cloned() - } - } - - impl Borrow<[T]> for Iter<'_, T> { - #[inline] - fn borrow(&self) -> &[T] { - self.0 - } - } - } - #[inline] fn select( storage: &'static InternMap, @@ -132,51 +108,20 @@ impl InternedSlice { #[inline(always)] fn ptr(&self) -> *const c_void { - unsafe { ptr::from_ref(&self.arc).read().into_raw() } + self.arc.as_ptr() } #[inline] pub fn as_ref(&self) -> InternedSliceRef<'_, T> { InternedSliceRef { + // SAFETY: `self.ptr` comes from a valid `ThinArc`, so non null. + // INVARIANT: We create it from a `ThinArc`. ptr: unsafe { NonNull::new_unchecked(self.ptr().cast_mut()) }, _marker: PhantomData, } } } -impl Drop for InternedSlice { - #[inline] - fn drop(&mut self) { - // When the last `Ref` is dropped, remove the object from the global map. - if !T::USE_GC && ThinArc::strong_count(&self.arc) == 2 { - // Only `self` and the global map point to the object. - - self.drop_slow(); - } - } -} - -impl InternedSlice { - #[cold] - fn drop_slow(&mut self) { - let storage = T::storage().get(); - let (mut shard, hash) = Self::select(storage, &self.arc.header.header, &self.arc.slice); - - if ThinArc::strong_count(&self.arc) != 2 { - // Another thread has interned another copy - return; - } - - shard.remove_entry(hash, |(other, _)| **other == *self.arc); - - // Shrink the backing storage if the shard is less than 50% occupied. - if shard.len() * 2 < shard.capacity() { - let len = shard.len(); - shard.shrink_to(len, |(x, _)| storage.hasher().hash_one(x)); - } - } -} - /// Compares interned `Ref`s using pointer equality. impl PartialEq for InternedSlice { // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects. @@ -225,16 +170,22 @@ where #[repr(transparent)] pub struct InternedSliceRef<'a, T> { + /// # Invariant + /// + /// There is no `ThinArcBorrow` unfortunately, so this is basically a `ManuallyDrop`, + /// except that can't be `Copy`, so we store a raw pointer instead. ptr: NonNull, _marker: PhantomData<&'a T>, } +// SAFETY: This is essentially a `ThinArc`, implemented as a raw pointer because there is no `ThinArcBorrowed`. unsafe impl Send for InternedSliceRef<'_, T> {} unsafe impl Sync for InternedSliceRef<'_, T> {} impl<'a, T: SliceInternable> InternedSliceRef<'a, T> { #[inline(always)] fn arc(self) -> ManuallyDrop> { + // SAFETY: `self.ptr`'s invariant. unsafe { ManuallyDrop::new(ThinArc::from_raw(self.ptr.as_ptr())) } } @@ -245,6 +196,7 @@ impl<'a, T: SliceInternable> InternedSliceRef<'a, T> { #[inline] pub fn get(self) -> &'a Pointee { + // SAFETY: This is a lifetime extension, valid because we live for `'a`. unsafe { &*ptr::from_ref::>(&*self.arc()) } } @@ -266,6 +218,17 @@ impl<'a, T: SliceInternable> InternedSliceRef<'a, T> { pub(crate) fn as_raw(self) -> *const c_void { self.arc().as_ptr() } + + /// **Available only on GC mode**. + /// + /// Changes the attached lifetime, as in GC mode, the lifetime is more kind of a lint to prevent misuse + /// than actual soundness check. + #[inline] + pub fn change_lifetime<'b>(self) -> InternedSliceRef<'b, T> { + const { assert!(T::USE_GC) }; + // SAFETY: The lifetime on `InternedSliceRef` is essentially advisory only for GCed types. + unsafe { std::mem::transmute::, InternedSliceRef<'b, T>>(self) } + } } impl Clone for InternedSliceRef<'_, T> { @@ -336,7 +299,7 @@ impl InternSliceStorage { pub trait SliceInternable: Sized + 'static { const USE_GC: bool; type Header: Eq + Hash + Send + Sync; - type SliceType: Eq + Hash + Send + Sync + 'static; + type SliceType: Eq + Hash + Send + Sync + Copy + 'static; fn storage() -> &'static InternSliceStorage; }