Rollup merge of #149516 - mati865:no-msys2, r=jieyouxu

Stop adding MSYS2 to PATH

Rust no longer requires MSYS2 provided tools like make.
diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs
index a821d9e..5151c35 100644
--- a/compiler/rustc_arena/src/lib.rs
+++ b/compiler/rustc_arena/src/lib.rs
@@ -10,13 +10,13 @@
 // tidy-alphabetical-start
 #![allow(clippy::mut_from_ref)] // Arena allocators are one place where this pattern is fine.
 #![allow(internal_features)]
+#![cfg_attr(bootstrap, feature(maybe_uninit_slice))]
 #![cfg_attr(test, feature(test))]
 #![deny(unsafe_op_in_unsafe_fn)]
 #![doc(test(no_crate_inject, attr(deny(warnings), allow(internal_features))))]
 #![feature(core_intrinsics)]
 #![feature(decl_macro)]
 #![feature(dropck_eyepatch)]
-#![feature(maybe_uninit_slice)]
 #![feature(never_type)]
 #![feature(rustc_attrs)]
 #![feature(unwrap_infallible)]
diff --git a/compiler/rustc_error_codes/src/error_codes/E0591.md b/compiler/rustc_error_codes/src/error_codes/E0591.md
index 6ed8370..c32aa95 100644
--- a/compiler/rustc_error_codes/src/error_codes/E0591.md
+++ b/compiler/rustc_error_codes/src/error_codes/E0591.md
@@ -62,14 +62,14 @@
   and do the cast in the fn body (the preferred option)
 - cast the fn item of a fn pointer before calling transmute, as shown here:
 
-    ```
-    # extern "C" fn foo(_: Box<i32>) {}
-    # use std::mem::transmute;
-    # unsafe {
-    let f: extern "C" fn(*mut i32) = transmute(foo as extern "C" fn(_));
-    let f: extern "C" fn(*mut i32) = transmute(foo as usize); // works too
-    # }
-    ```
+```
+# extern "C" fn foo(_: Box<i32>) {}
+# use std::mem::transmute;
+# unsafe {
+let f: extern "C" fn(*mut i32) = transmute(foo as extern "C" fn(_));
+let f: extern "C" fn(*mut i32) = transmute(foo as usize); // works too
+# }
+```
 
 The same applies to transmutes to `*mut fn()`, which were observed in practice.
 Note though that use of this type is generally incorrect.
diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs
index d9132ca1..d08d5a5 100644
--- a/compiler/rustc_errors/src/emitter.rs
+++ b/compiler/rustc_errors/src/emitter.rs
@@ -3544,6 +3544,8 @@ pub fn detect_confusion_type(sm: &SourceMap, suggested: &str, sp: Span) -> Confu
         let mut has_digit_letter_confusable = false;
         let mut has_other_diff = false;
 
+        // Letters whose lowercase version is very similar to the uppercase
+        // version.
         let ascii_confusables = &['c', 'f', 'i', 'k', 'o', 's', 'u', 'v', 'w', 'x', 'y', 'z'];
 
         let digit_letter_confusables = [('0', 'O'), ('1', 'l'), ('5', 'S'), ('8', 'B'), ('9', 'g')];
diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs
index b8dda9e..313cd41 100644
--- a/compiler/rustc_errors/src/lib.rs
+++ b/compiler/rustc_errors/src/lib.rs
@@ -1366,7 +1366,7 @@ pub fn emit_err(self, err: impl Diagnostic<'a>) -> ErrorGuaranteed {
         self.create_err(err).emit()
     }
 
-    /// Ensures that an error is printed. See `Level::DelayedBug`.
+    /// Ensures that an error is printed. See [`Level::DelayedBug`].
     //
     // No `#[rustc_lint_diagnostics]` and no `impl Into<DiagMessage>` because bug messages aren't
     // user-facing.
diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/cmse.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/cmse.rs
index f8af688..81bdfc1 100644
--- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/cmse.rs
+++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/cmse.rs
@@ -86,6 +86,11 @@ fn is_valid_cmse_inputs<'tcx>(
     let fn_sig = tcx.erase_and_anonymize_regions(fn_sig);
 
     for (ty, hir_ty) in fn_sig.inputs().iter().zip(fn_decl.inputs) {
+        if ty.has_infer_types() {
+            let err = LayoutError::Unknown(*ty);
+            return Err((hir_ty.span, tcx.arena.alloc(err)));
+        }
+
         let layout = tcx
             .layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(*ty))
             .map_err(|e| (hir_ty.span, e))?;
@@ -138,6 +143,11 @@ fn is_valid_cmse_output<'tcx>(
         return Ok(());
     }
 
+    if return_type.has_infer_types() {
+        let err = LayoutError::Unknown(return_type);
+        return Err(tcx.arena.alloc(err));
+    }
+
     let typing_env = ty::TypingEnv::fully_monomorphized();
     let layout = tcx.layout_of(typing_env.as_query_input(return_type))?;
 
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
index 1d16c3a..1adcd91 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
@@ -2106,14 +2106,16 @@ pub(crate) fn suggest_missing_unwrap_expect(
             )),
         );
 
-        let (article, kind, variant, sugg_operator) =
-            if self.tcx.is_diagnostic_item(sym::Result, adt.did()) {
-                ("a", "Result", "Err", ret_ty_matches(sym::Result))
-            } else if self.tcx.is_diagnostic_item(sym::Option, adt.did()) {
-                ("an", "Option", "None", ret_ty_matches(sym::Option))
-            } else {
-                return false;
-            };
+        let (article, kind, variant, sugg_operator) = if self.tcx.is_diagnostic_item(sym::Result, adt.did())
+            // Do not suggest `.expect()` in const context where it's not available. rust-lang/rust#149316
+            && !self.tcx.hir_is_inside_const_context(expr.hir_id)
+        {
+            ("a", "Result", "Err", ret_ty_matches(sym::Result))
+        } else if self.tcx.is_diagnostic_item(sym::Option, adt.did()) {
+            ("an", "Option", "None", ret_ty_matches(sym::Option))
+        } else {
+            return false;
+        };
         if is_ctor || !self.may_coerce(args.type_at(0), expected) {
             return false;
         }
diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs
index 9a657ab..4b9ad34 100644
--- a/compiler/rustc_hir_typeck/src/method/suggest.rs
+++ b/compiler/rustc_hir_typeck/src/method/suggest.rs
@@ -3041,14 +3041,16 @@ fn suggest_unwrapping_inner_self(
                             tcx.def_span(pick.item.def_id),
                             format!("the method `{item_name}` exists on the type `{self_ty}`"),
                         );
-                        let (article, kind, variant, question) =
-                            if tcx.is_diagnostic_item(sym::Result, kind.did()) {
-                                ("a", "Result", "Err", ret_ty_matches(sym::Result))
-                            } else if tcx.is_diagnostic_item(sym::Option, kind.did()) {
-                                ("an", "Option", "None", ret_ty_matches(sym::Option))
-                            } else {
-                                return;
-                            };
+                        let (article, kind, variant, question) = if tcx.is_diagnostic_item(sym::Result, kind.did())
+                            // Do not suggest `.expect()` in const context where it's not available. rust-lang/rust#149316
+                            && !tcx.hir_is_inside_const_context(expr.hir_id)
+                        {
+                            ("a", "Result", "Err", ret_ty_matches(sym::Result))
+                        } else if tcx.is_diagnostic_item(sym::Option, kind.did()) {
+                            ("an", "Option", "None", ret_ty_matches(sym::Option))
+                        } else {
+                            return;
+                        };
                         if question {
                             err.span_suggestion_verbose(
                                 expr.span.shrink_to_hi(),
diff --git a/compiler/rustc_infer/src/traits/mod.rs b/compiler/rustc_infer/src/traits/mod.rs
index 79a4859..0536a6c 100644
--- a/compiler/rustc_infer/src/traits/mod.rs
+++ b/compiler/rustc_infer/src/traits/mod.rs
@@ -37,6 +37,8 @@
 #[derive(Clone, TypeFoldable, TypeVisitable)]
 pub struct Obligation<'tcx, T> {
     /// The reason we have to prove this thing.
+    /// FIXME: we shouldn't ignore the cause but instead change the affected visitors
+    /// to only visit predicates manually.
     #[type_foldable(identity)]
     #[type_visitable(ignore)]
     pub cause: ObligationCause<'tcx>,
diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs
index bf5ec8f..b1ff7ff 100644
--- a/compiler/rustc_mir_dataflow/src/value_analysis.rs
+++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs
@@ -24,9 +24,7 @@ pub struct PlaceIndex {}
 
 rustc_index::newtype_index!(
     /// This index uniquely identifies a tracked place and therefore a slot in [`State`].
-    ///
-    /// It is an implementation detail of this module.
-    struct ValueIndex {}
+    pub struct ValueIndex {}
 );
 
 /// See [`State`].
@@ -211,22 +209,9 @@ pub fn insert_value_idx(&mut self, target: PlaceIndex, value: V, map: &Map<'_>)
     /// The target place must have been flooded before calling this method.
     pub fn insert_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map<'_>) {
         let State::Reachable(values) = self else { return };
-
-        // If both places are tracked, we copy the value to the target.
-        // If the target is tracked, but the source is not, we do nothing, as invalidation has
-        // already been performed.
-        if let Some(target_value) = map.places[target].value_index
-            && let Some(source_value) = map.places[source].value_index
-        {
-            values.insert(target_value, values.get(source_value).clone());
-        }
-        for target_child in map.children(target) {
-            // Try to find corresponding child and recurse. Reasoning is similar as above.
-            let projection = map.places[target_child].proj_elem.unwrap();
-            if let Some(source_child) = map.projections.get(&(source, projection)) {
-                self.insert_place_idx(target_child, *source_child, map);
-            }
-        }
+        map.for_each_value_pair(target, source, &mut |target, source| {
+            values.insert(target, values.get(source).clone());
+        });
     }
 
     /// Helper method to interpret `target = result`.
@@ -677,6 +662,26 @@ pub fn find_len(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
         self.find_extra(place, [TrackElem::DerefLen])
     }
 
+    /// Locates the value corresponding to the given place.
+    pub fn value(&self, place: PlaceIndex) -> Option<ValueIndex> {
+        self.places[place].value_index
+    }
+
+    /// Locates the value corresponding to the given place.
+    pub fn find_value(&self, place: PlaceRef<'_>) -> Option<ValueIndex> {
+        self.value(self.find(place)?)
+    }
+
+    /// Locates the value corresponding to the given discriminant.
+    pub fn find_discr_value(&self, place: PlaceRef<'_>) -> Option<ValueIndex> {
+        self.value(self.find_discr(place)?)
+    }
+
+    /// Locates the value corresponding to the given length.
+    pub fn find_len_value(&self, place: PlaceRef<'_>) -> Option<ValueIndex> {
+        self.value(self.find_len(place)?)
+    }
+
     /// Iterate over all direct children.
     fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> {
         Children::new(self, parent)
@@ -689,7 +694,7 @@ fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> {
     ///
     /// `tail_elem` allows to support discriminants that are not a place in MIR, but that we track
     /// as such.
-    fn for_each_aliasing_place(
+    pub fn for_each_aliasing_place(
         &self,
         place: PlaceRef<'_>,
         tail_elem: Option<TrackElem>,
@@ -745,11 +750,15 @@ fn for_each_variant_sibling(
         }
     }
 
+    /// Return the range of value indices inside this place.
+    pub fn values_inside(&self, root: PlaceIndex) -> &[ValueIndex] {
+        let range = self.inner_values[root].clone();
+        &self.inner_values_buffer[range]
+    }
+
     /// Invoke a function on each value in the given place and all descendants.
     fn for_each_value_inside(&self, root: PlaceIndex, f: &mut impl FnMut(ValueIndex)) {
-        let range = self.inner_values[root].clone();
-        let values = &self.inner_values_buffer[range];
-        for &v in values {
+        for &v in self.values_inside(root) {
             f(v)
         }
     }
@@ -778,6 +787,31 @@ pub fn for_each_projection_value<O>(
             }
         }
     }
+
+    /// Recursively iterates on each value contained in `target`, paired with matching projection
+    /// inside `source`.
+    pub fn for_each_value_pair(
+        &self,
+        target: PlaceIndex,
+        source: PlaceIndex,
+        f: &mut impl FnMut(ValueIndex, ValueIndex),
+    ) {
+        // If both places are tracked, we copy the value to the target.
+        // If the target is tracked, but the source is not, we do nothing, as invalidation has
+        // already been performed.
+        if let Some(target_value) = self.places[target].value_index
+            && let Some(source_value) = self.places[source].value_index
+        {
+            f(target_value, source_value)
+        }
+        for target_child in self.children(target) {
+            // Try to find corresponding child and recurse. Reasoning is similar as above.
+            let projection = self.places[target_child].proj_elem.unwrap();
+            if let Some(source_child) = self.projections.get(&(source, projection)) {
+                self.for_each_value_pair(target_child, *source_child, f);
+            }
+        }
+    }
 }
 
 /// This is the information tracked for every [`PlaceIndex`] and is stored by [`Map`].
diff --git a/compiler/rustc_mir_transform/src/jump_threading.rs b/compiler/rustc_mir_transform/src/jump_threading.rs
index 492f5ca..c021e7d 100644
--- a/compiler/rustc_mir_transform/src/jump_threading.rs
+++ b/compiler/rustc_mir_transform/src/jump_threading.rs
@@ -7,47 +7,62 @@
 //! ------------/      \--------              ------------
 //!
 //!
-//! We proceed by walking the cfg backwards starting from each `SwitchInt` terminator,
-//! looking for assignments that will turn the `SwitchInt` into a simple `Goto`.
+//! This implementation is heavily inspired by the work outlined in [libfirm].
 //!
-//! The algorithm maintains a set of replacement conditions:
-//! - `conditions[place]` contains `Condition { value, polarity: Eq, target }`
-//!   if assigning `value` to `place` turns the `SwitchInt` into `Goto { target }`.
-//! - `conditions[place]` contains `Condition { value, polarity: Ne, target }`
-//!   if assigning anything different from `value` to `place` turns the `SwitchInt`
-//!   into `Goto { target }`.
+//! The general algorithm proceeds in two phases: (1) walk the CFG backwards to construct a
+//! graph of threading conditions, and (2) propagate fulfilled conditions forward by duplicating
+//! blocks.
+//!
+//! # 1. Condition graph construction
 //!
 //! In this file, we denote as `place ?= value` the existence of a replacement condition
 //! on `place` with given `value`, irrespective of the polarity and target of that
 //! replacement condition.
 //!
-//! We then walk the CFG backwards transforming the set of conditions.
-//! When we find a fulfilling assignment, we record a `ThreadingOpportunity`.
-//! All `ThreadingOpportunity`s are applied to the body, by duplicating blocks if required.
+//! Inside a block, we associate with each condition `c` a set of targets:
+//! - `Goto(target)` if fulfilling `c` changes the terminator into a `Goto { target }`;
+//! - `Chain(target, c2)` if fulfilling `c` means that `c2` is fulfilled inside `target`.
 //!
-//! The optimization search can be very heavy, as it performs a DFS on MIR starting from
-//! each `SwitchInt` terminator. To manage the complexity, we:
-//! - bound the maximum depth by a constant `MAX_BACKTRACK`;
-//! - we only traverse `Goto` terminators.
+//! Before walking a block `bb`, we construct the exit set of condition from its successors.
+//! For each condition `c` in a successor `s`, we record that fulfilling `c` in `bb` will fulfill
+//! `c` in `s`, as a `Chain(s, c)` condition.
+//!
+//! When encountering a `switchInt(place) -> [value: bb...]` terminator, we also record a
+//! `place == value` condition for each `value`, and associate a `Goto(target)` condition.
+//!
+//! Then, we walk the statements backwards, transforming the set of conditions along the way,
+//! resulting in a set of conditions at the block entry.
 //!
 //! We try to avoid creating irreducible control-flow by not threading through a loop header.
 //!
-//! Likewise, applying the optimisation can create a lot of new MIR, so we bound the instruction
+//! Applying the optimisation can create a lot of new MIR, so we bound the instruction
 //! cost by `MAX_COST`.
+//!
+//! # 2. Block duplication
+//!
+//! We now have the set of fulfilled conditions inside each block and their targets.
+//!
+//! For each block `bb` in reverse postorder, we apply in turn the target associated with each
+//! fulfilled condition:
+//! - for `Goto(target)`, change the terminator of `bb` into a `Goto { target }`;
+//! - for `Chain(target, cond)`, duplicate `target` into a new block which fulfills the same
+//! conditions and also fulfills `cond`. This is made efficient by maintaining a map of duplicates,
+//! `duplicate[(target, cond)]` to avoid cloning blocks multiple times.
+//!
+//! [libfirm]: <https://pp.ipd.kit.edu/uploads/publikationen/priesner17masterarbeit.pdf>
 
-use rustc_arena::DroplessArena;
+use itertools::Itertools as _;
 use rustc_const_eval::const_eval::DummyMachine;
 use rustc_const_eval::interpret::{ImmTy, Immediate, InterpCx, OpTy, Projectable};
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
 use rustc_index::IndexVec;
-use rustc_index::bit_set::DenseBitSet;
+use rustc_index::bit_set::{DenseBitSet, GrowableBitSet};
 use rustc_middle::bug;
 use rustc_middle::mir::interpret::Scalar;
 use rustc_middle::mir::visit::Visitor;
 use rustc_middle::mir::*;
 use rustc_middle::ty::{self, ScalarInt, TyCtxt};
-use rustc_mir_dataflow::lattice::HasBottom;
-use rustc_mir_dataflow::value_analysis::{Map, PlaceIndex, State, TrackElem};
+use rustc_mir_dataflow::value_analysis::{Map, PlaceIndex, TrackElem, ValueIndex};
 use rustc_span::DUMMY_SP;
 use tracing::{debug, instrument, trace};
 
@@ -55,8 +70,7 @@
 
 pub(super) struct JumpThreading;
 
-const MAX_BACKTRACK: usize = 5;
-const MAX_COST: usize = 100;
+const MAX_COST: u8 = 100;
 const MAX_PLACES: usize = 100;
 
 impl<'tcx> crate::MirPass<'tcx> for JumpThreading {
@@ -76,33 +90,54 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
         }
 
         let typing_env = body.typing_env(tcx);
-        let arena = &DroplessArena::default();
         let mut finder = TOFinder {
             tcx,
             typing_env,
             ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
             body,
-            arena,
             map: Map::new(tcx, body, Some(MAX_PLACES)),
             maybe_loop_headers: loops::maybe_loop_headers(body),
-            opportunities: Vec::new(),
+            entry_states: IndexVec::from_elem(ConditionSet::default(), &body.basic_blocks),
         };
 
-        for (bb, _) in traversal::preorder(body) {
-            finder.start_from_switch(bb);
+        for (bb, bbdata) in traversal::postorder(body) {
+            if bbdata.is_cleanup {
+                continue;
+            }
+
+            let mut state = finder.populate_from_outgoing_edges(bb);
+            trace!("output_states[{bb:?}] = {state:?}");
+
+            finder.process_terminator(bb, &mut state);
+            trace!("pre_terminator_states[{bb:?}] = {state:?}");
+
+            for stmt in bbdata.statements.iter().rev() {
+                if state.is_empty() {
+                    break;
+                }
+
+                finder.process_statement(stmt, &mut state);
+
+                // When a statement mutates a place, assignments to that place that happen
+                // above the mutation cannot fulfill a condition.
+                //   _1 = 5 // Whatever happens here, it won't change the result of a `SwitchInt`.
+                //   _1 = 6
+                if let Some((lhs, tail)) = finder.mutated_statement(stmt) {
+                    finder.flood_state(lhs, tail, &mut state);
+                }
+            }
+
+            trace!("entry_states[{bb:?}] = {state:?}");
+            finder.entry_states[bb] = state;
         }
 
-        let opportunities = finder.opportunities;
-        debug!(?opportunities);
-        if opportunities.is_empty() {
-            return;
-        }
+        let mut entry_states = finder.entry_states;
+        simplify_conditions(body, &mut entry_states);
+        remove_costly_conditions(tcx, typing_env, body, &mut entry_states);
 
-        // Verify that we do not thread through a loop header.
-        for to in opportunities.iter() {
-            assert!(to.chain.iter().all(|&block| !finder.maybe_loop_headers.contains(block)));
+        if let Some(opportunities) = OpportunitySet::new(body, entry_states) {
+            opportunities.apply();
         }
-        OpportunitySet::new(body, opportunities).apply(body);
     }
 
     fn is_required(&self) -> bool {
@@ -110,14 +145,6 @@ fn is_required(&self) -> bool {
     }
 }
 
-#[derive(Debug)]
-struct ThreadingOpportunity {
-    /// The list of `BasicBlock`s from the one that found the opportunity to the `SwitchInt`.
-    chain: Vec<BasicBlock>,
-    /// The `SwitchInt` will be replaced by `Goto { target }`.
-    target: BasicBlock,
-}
-
 struct TOFinder<'a, 'tcx> {
     tcx: TyCtxt<'tcx>,
     typing_env: ty::TypingEnv<'tcx>,
@@ -125,192 +152,207 @@ struct TOFinder<'a, 'tcx> {
     body: &'a Body<'tcx>,
     map: Map<'tcx>,
     maybe_loop_headers: DenseBitSet<BasicBlock>,
-    /// We use an arena to avoid cloning the slices when cloning `state`.
-    arena: &'a DroplessArena,
-    opportunities: Vec<ThreadingOpportunity>,
+    /// This stores the state of each visited block on entry,
+    /// and the current state of the block being visited.
+    // Invariant: for each `bb`, each condition in `entry_states[bb]` has a `chain` that
+    // starts with `bb`.
+    entry_states: IndexVec<BasicBlock, ConditionSet>,
+}
+
+rustc_index::newtype_index! {
+    #[derive(Ord, PartialOrd)]
+    #[debug_format = "_c{}"]
+    struct ConditionIndex {}
 }
 
 /// Represent the following statement. If we can prove that the current local is equal/not-equal
 /// to `value`, jump to `target`.
-#[derive(Copy, Clone, Debug)]
+#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
 struct Condition {
+    place: ValueIndex,
     value: ScalarInt,
     polarity: Polarity,
-    target: BasicBlock,
 }
 
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
 enum Polarity {
     Ne,
     Eq,
 }
 
 impl Condition {
-    fn matches(&self, value: ScalarInt) -> bool {
-        (self.value == value) == (self.polarity == Polarity::Eq)
+    fn matches(&self, place: ValueIndex, value: ScalarInt) -> bool {
+        self.place == place && (self.value == value) == (self.polarity == Polarity::Eq)
     }
 }
 
-#[derive(Copy, Clone, Debug)]
-struct ConditionSet<'a>(&'a [Condition]);
+/// Represent the effect of fulfilling a condition.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+enum EdgeEffect {
+    /// If the condition is fulfilled, replace the current block's terminator by a single goto.
+    Goto { target: BasicBlock },
+    /// If the condition is fulfilled, fulfill the condition `succ_condition` in `succ_block`.
+    Chain { succ_block: BasicBlock, succ_condition: ConditionIndex },
+}
 
-impl HasBottom for ConditionSet<'_> {
-    const BOTTOM: Self = ConditionSet(&[]);
+impl EdgeEffect {
+    fn block(self) -> BasicBlock {
+        match self {
+            EdgeEffect::Goto { target: bb } | EdgeEffect::Chain { succ_block: bb, .. } => bb,
+        }
+    }
 
-    fn is_bottom(&self) -> bool {
-        self.0.is_empty()
+    fn replace_block(&mut self, target: BasicBlock, new_target: BasicBlock) {
+        match self {
+            EdgeEffect::Goto { target: bb } | EdgeEffect::Chain { succ_block: bb, .. } => {
+                if *bb == target {
+                    *bb = new_target
+                }
+            }
+        }
     }
 }
 
-impl<'a> ConditionSet<'a> {
-    fn iter(self) -> impl Iterator<Item = Condition> {
-        self.0.iter().copied()
+#[derive(Clone, Debug, Default)]
+struct ConditionSet {
+    active: Vec<(ConditionIndex, Condition)>,
+    fulfilled: Vec<ConditionIndex>,
+    targets: IndexVec<ConditionIndex, Vec<EdgeEffect>>,
+}
+
+impl ConditionSet {
+    fn is_empty(&self) -> bool {
+        self.active.is_empty()
     }
 
-    fn iter_matches(self, value: ScalarInt) -> impl Iterator<Item = Condition> {
-        self.iter().filter(move |c| c.matches(value))
+    #[tracing::instrument(level = "trace", skip(self))]
+    fn push_condition(&mut self, c: Condition, target: BasicBlock) {
+        let index = self.targets.push(vec![EdgeEffect::Goto { target }]);
+        self.active.push((index, c));
     }
 
-    fn map(
-        self,
-        arena: &'a DroplessArena,
-        f: impl Fn(Condition) -> Option<Condition>,
-    ) -> Option<ConditionSet<'a>> {
-        let set = arena.try_alloc_from_iter(self.iter().map(|c| f(c).ok_or(()))).ok()?;
-        Some(ConditionSet(set))
+    /// Register fulfilled condition and remove it from the set.
+    fn fulfill_if(&mut self, f: impl Fn(Condition, &Vec<EdgeEffect>) -> bool) {
+        self.active.retain(|&(index, condition)| {
+            let targets = &self.targets[index];
+            if f(condition, targets) {
+                trace!(?index, ?condition, "fulfill");
+                self.fulfilled.push(index);
+                false
+            } else {
+                true
+            }
+        })
+    }
+
+    /// Register fulfilled condition and remove them from the set.
+    fn fulfill_matches(&mut self, place: ValueIndex, value: ScalarInt) {
+        self.fulfill_if(|c, _| c.matches(place, value))
+    }
+
+    fn retain(&mut self, mut f: impl FnMut(Condition) -> bool) {
+        self.active.retain(|&(_, c)| f(c))
+    }
+
+    fn retain_mut(&mut self, mut f: impl FnMut(Condition) -> Option<Condition>) {
+        self.active.retain_mut(|(_, c)| {
+            if let Some(new) = f(*c) {
+                *c = new;
+                true
+            } else {
+                false
+            }
+        })
+    }
+
+    fn for_each_mut(&mut self, f: impl Fn(&mut Condition)) {
+        for (_, c) in &mut self.active {
+            f(c)
+        }
     }
 }
 
 impl<'a, 'tcx> TOFinder<'a, 'tcx> {
-    fn is_empty(&self, state: &State<ConditionSet<'a>>) -> bool {
-        state.all_bottom()
-    }
-
-    /// Recursion entry point to find threading opportunities.
+    /// Construct the condition set for `bb` from the terminator, without executing its effect.
     #[instrument(level = "trace", skip(self))]
-    fn start_from_switch(&mut self, bb: BasicBlock) {
+    fn populate_from_outgoing_edges(&mut self, bb: BasicBlock) -> ConditionSet {
         let bbdata = &self.body[bb];
-        if bbdata.is_cleanup || self.maybe_loop_headers.contains(bb) {
-            return;
-        }
-        let Some((discr, targets)) = bbdata.terminator().kind.as_switch() else { return };
-        let Some(discr) = discr.place() else { return };
-        debug!(?discr, ?bb);
 
-        let discr_ty = discr.ty(self.body, self.tcx).ty;
-        let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else { return };
+        // This should be the first time we populate `entry_states[bb]`.
+        debug_assert!(self.entry_states[bb].is_empty());
 
-        let Some(discr) = self.map.find(discr.as_ref()) else { return };
-        debug!(?discr);
-
-        let cost = CostChecker::new(self.tcx, self.typing_env, None, self.body);
-        let mut state = State::new_reachable();
-
-        let conds = if let Some((value, then, else_)) = targets.as_static_if() {
-            let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
-            self.arena.alloc_from_iter([
-                Condition { value, polarity: Polarity::Eq, target: then },
-                Condition { value, polarity: Polarity::Ne, target: else_ },
-            ])
-        } else {
-            self.arena.alloc_from_iter(targets.iter().filter_map(|(value, target)| {
-                let value = ScalarInt::try_from_uint(value, discr_layout.size)?;
-                Some(Condition { value, polarity: Polarity::Eq, target })
-            }))
+        let state_len =
+            bbdata.terminator().successors().map(|succ| self.entry_states[succ].active.len()).sum();
+        let mut state = ConditionSet {
+            active: Vec::with_capacity(state_len),
+            targets: IndexVec::with_capacity(state_len),
+            fulfilled: Vec::new(),
         };
-        let conds = ConditionSet(conds);
-        state.insert_value_idx(discr, conds, &self.map);
 
-        self.find_opportunity(bb, state, cost, 0)
+        // Use an index-set to deduplicate conditions coming from different successor blocks.
+        let mut known_conditions =
+            FxIndexSet::with_capacity_and_hasher(state_len, Default::default());
+        let mut insert = |condition, succ_block, succ_condition| {
+            let (index, new) = known_conditions.insert_full(condition);
+            let index = ConditionIndex::from_usize(index);
+            if new {
+                state.active.push((index, condition));
+                let _index = state.targets.push(Vec::new());
+                debug_assert_eq!(_index, index);
+            }
+            let target = EdgeEffect::Chain { succ_block, succ_condition };
+            debug_assert!(
+                !state.targets[index].contains(&target),
+                "duplicate targets for index={index:?} as {target:?} targets={:#?}",
+                &state.targets[index],
+            );
+            state.targets[index].push(target);
+        };
+
+        // A given block may have several times the same successor.
+        let mut seen = FxHashSet::default();
+        for succ in bbdata.terminator().successors() {
+            if !seen.insert(succ) {
+                continue;
+            }
+
+            // Do not thread through loop headers.
+            if self.maybe_loop_headers.contains(succ) {
+                continue;
+            }
+
+            for &(succ_index, cond) in self.entry_states[succ].active.iter() {
+                insert(cond, succ, succ_index);
+            }
+        }
+
+        let num_conditions = known_conditions.len();
+        debug_assert_eq!(num_conditions, state.active.len());
+        debug_assert_eq!(num_conditions, state.targets.len());
+        state.fulfilled.reserve(num_conditions);
+
+        state
     }
 
-    /// Recursively walk statements backwards from this bb's terminator to find threading
-    /// opportunities.
-    #[instrument(level = "trace", skip(self, cost), ret)]
-    fn find_opportunity(
-        &mut self,
-        bb: BasicBlock,
-        mut state: State<ConditionSet<'a>>,
-        mut cost: CostChecker<'_, 'tcx>,
-        depth: usize,
+    /// Remove all conditions in the state that alias given place.
+    fn flood_state(
+        &self,
+        place: Place<'tcx>,
+        extra_elem: Option<TrackElem>,
+        state: &mut ConditionSet,
     ) {
-        // Do not thread through loop headers.
-        if self.maybe_loop_headers.contains(bb) {
+        if state.is_empty() {
             return;
         }
-
-        debug!(cost = ?cost.cost());
-        for (statement_index, stmt) in
-            self.body.basic_blocks[bb].statements.iter().enumerate().rev()
-        {
-            if self.is_empty(&state) {
-                return;
-            }
-
-            cost.visit_statement(stmt, Location { block: bb, statement_index });
-            if cost.cost() > MAX_COST {
-                return;
-            }
-
-            // Attempt to turn the `current_condition` on `lhs` into a condition on another place.
-            self.process_statement(bb, stmt, &mut state);
-
-            // When a statement mutates a place, assignments to that place that happen
-            // above the mutation cannot fulfill a condition.
-            //   _1 = 5 // Whatever happens here, it won't change the result of a `SwitchInt`.
-            //   _1 = 6
-            if let Some((lhs, tail)) = self.mutated_statement(stmt) {
-                state.flood_with_tail_elem(lhs.as_ref(), tail, &self.map, ConditionSet::BOTTOM);
-            }
-        }
-
-        if self.is_empty(&state) || depth >= MAX_BACKTRACK {
+        let mut places_to_exclude = FxHashSet::default();
+        self.map.for_each_aliasing_place(place.as_ref(), extra_elem, &mut |vi| {
+            places_to_exclude.insert(vi);
+        });
+        trace!(?places_to_exclude, "flood_state");
+        if places_to_exclude.is_empty() {
             return;
         }
-
-        let last_non_rec = self.opportunities.len();
-
-        let predecessors = &self.body.basic_blocks.predecessors()[bb];
-        if let &[pred] = &predecessors[..]
-            && bb != START_BLOCK
-        {
-            let term = self.body.basic_blocks[pred].terminator();
-            match term.kind {
-                TerminatorKind::SwitchInt { ref discr, ref targets } => {
-                    self.process_switch_int(discr, targets, bb, &mut state);
-                    self.find_opportunity(pred, state, cost, depth + 1);
-                }
-                _ => self.recurse_through_terminator(pred, || state, &cost, depth),
-            }
-        } else if let &[ref predecessors @ .., last_pred] = &predecessors[..] {
-            for &pred in predecessors {
-                self.recurse_through_terminator(pred, || state.clone(), &cost, depth);
-            }
-            self.recurse_through_terminator(last_pred, || state, &cost, depth);
-        }
-
-        let new_tos = &mut self.opportunities[last_non_rec..];
-        debug!(?new_tos);
-
-        // Try to deduplicate threading opportunities.
-        if new_tos.len() > 1
-            && new_tos.len() == predecessors.len()
-            && predecessors
-                .iter()
-                .zip(new_tos.iter())
-                .all(|(&pred, to)| to.chain == &[pred] && to.target == new_tos[0].target)
-        {
-            // All predecessors have a threading opportunity, and they all point to the same block.
-            debug!(?new_tos, "dedup");
-            let first = &mut new_tos[0];
-            *first = ThreadingOpportunity { chain: vec![bb], target: first.target };
-            self.opportunities.truncate(last_non_rec + 1);
-            return;
-        }
-
-        for op in self.opportunities[last_non_rec..].iter_mut() {
-            op.chain.push(bb);
-        }
+        state.retain(|c| !places_to_exclude.contains(&c.place));
     }
 
     /// Extract the mutated place from a statement.
@@ -353,35 +395,27 @@ fn mutated_statement(
         }
     }
 
-    #[instrument(level = "trace", skip(self))]
-    fn process_immediate(
-        &mut self,
-        bb: BasicBlock,
-        lhs: PlaceIndex,
-        rhs: ImmTy<'tcx>,
-        state: &mut State<ConditionSet<'a>>,
-    ) {
-        let register_opportunity = |c: Condition| {
-            debug!(?bb, ?c.target, "register");
-            self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target })
-        };
-
-        if let Some(conditions) = state.try_get_idx(lhs, &self.map)
+    #[instrument(level = "trace", skip(self, state))]
+    fn process_immediate(&mut self, lhs: PlaceIndex, rhs: ImmTy<'tcx>, state: &mut ConditionSet) {
+        if let Some(lhs) = self.map.value(lhs)
             && let Immediate::Scalar(Scalar::Int(int)) = *rhs
         {
-            conditions.iter_matches(int).for_each(register_opportunity);
+            state.fulfill_matches(lhs, int)
         }
     }
 
     /// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
-    #[instrument(level = "trace", skip(self))]
+    #[instrument(level = "trace", skip(self, state))]
     fn process_constant(
         &mut self,
-        bb: BasicBlock,
         lhs: PlaceIndex,
         constant: OpTy<'tcx>,
-        state: &mut State<ConditionSet<'a>>,
+        state: &mut ConditionSet,
     ) {
+        let values_inside = self.map.values_inside(lhs);
+        if !state.active.iter().any(|&(_, cond)| values_inside.contains(&cond.place)) {
+            return;
+        }
         self.map.for_each_projection_value(
             lhs,
             constant,
@@ -402,28 +436,32 @@ fn process_constant(
                 }
             },
             &mut |place, op| {
-                if let Some(conditions) = state.try_get_idx(place, &self.map)
+                if let Some(place) = self.map.value(place)
                     && let Some(imm) = self.ecx.read_immediate_raw(op).discard_err()
                     && let Some(imm) = imm.right()
                     && let Immediate::Scalar(Scalar::Int(int)) = *imm
                 {
-                    conditions.iter_matches(int).for_each(|c: Condition| {
-                        self.opportunities
-                            .push(ThreadingOpportunity { chain: vec![bb], target: c.target })
-                    })
+                    state.fulfill_matches(place, int)
                 }
             },
         );
     }
 
-    #[instrument(level = "trace", skip(self))]
-    fn process_operand(
-        &mut self,
-        bb: BasicBlock,
-        lhs: PlaceIndex,
-        rhs: &Operand<'tcx>,
-        state: &mut State<ConditionSet<'a>>,
-    ) {
+    #[instrument(level = "trace", skip(self, state))]
+    fn process_copy(&mut self, lhs: PlaceIndex, rhs: PlaceIndex, state: &mut ConditionSet) {
+        let mut renames = FxHashMap::default();
+        self.map.for_each_value_pair(rhs, lhs, &mut |rhs, lhs| {
+            renames.insert(lhs, rhs);
+        });
+        state.for_each_mut(|c| {
+            if let Some(rhs) = renames.get(&c.place) {
+                c.place = *rhs
+            }
+        });
+    }
+
+    #[instrument(level = "trace", skip(self, state))]
+    fn process_operand(&mut self, lhs: PlaceIndex, rhs: &Operand<'tcx>, state: &mut ConditionSet) {
         match rhs {
             // If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
             Operand::Constant(constant) => {
@@ -432,31 +470,30 @@ fn process_operand(
                 else {
                     return;
                 };
-                self.process_constant(bb, lhs, constant, state);
+                self.process_constant(lhs, constant, state);
             }
             // Transfer the conditions on the copied rhs.
             Operand::Move(rhs) | Operand::Copy(rhs) => {
                 let Some(rhs) = self.map.find(rhs.as_ref()) else { return };
-                state.insert_place_idx(rhs, lhs, &self.map);
+                self.process_copy(lhs, rhs, state)
             }
         }
     }
 
-    #[instrument(level = "trace", skip(self))]
+    #[instrument(level = "trace", skip(self, state))]
     fn process_assign(
         &mut self,
-        bb: BasicBlock,
         lhs_place: &Place<'tcx>,
-        rhs: &Rvalue<'tcx>,
-        state: &mut State<ConditionSet<'a>>,
+        rvalue: &Rvalue<'tcx>,
+        state: &mut ConditionSet,
     ) {
         let Some(lhs) = self.map.find(lhs_place.as_ref()) else { return };
-        match rhs {
-            Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state),
+        match rvalue {
+            Rvalue::Use(operand) => self.process_operand(lhs, operand, state),
             // Transfer the conditions on the copy rhs.
             Rvalue::Discriminant(rhs) => {
                 let Some(rhs) = self.map.find_discr(rhs.as_ref()) else { return };
-                state.insert_place_idx(rhs, lhs, &self.map);
+                self.process_copy(lhs, rhs, state)
             }
             // If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
             Rvalue::Aggregate(box kind, operands) => {
@@ -471,7 +508,7 @@ fn process_assign(
                                 .discriminant_for_variant(agg_ty, *variant_index)
                                 .discard_err()
                         {
-                            self.process_immediate(bb, discr_target, discr_value, state);
+                            self.process_immediate(discr_target, discr_value, state);
                         }
                         if let Some(idx) = self.map.apply(lhs, TrackElem::Variant(*variant_index)) {
                             idx
@@ -483,37 +520,36 @@ fn process_assign(
                 };
                 for (field_index, operand) in operands.iter_enumerated() {
                     if let Some(field) = self.map.apply(lhs, TrackElem::Field(field_index)) {
-                        self.process_operand(bb, field, operand, state);
+                        self.process_operand(field, operand, state);
                     }
                 }
             }
             // Transfer the conditions on the copy rhs, after inverting the value of the condition.
-            Rvalue::UnaryOp(UnOp::Not, Operand::Move(place) | Operand::Copy(place)) => {
-                let layout = self.ecx.layout_of(place.ty(self.body, self.tcx).ty).unwrap();
-                let Some(conditions) = state.try_get_idx(lhs, &self.map) else { return };
-                let Some(place) = self.map.find(place.as_ref()) else { return };
-                let Some(conds) = conditions.map(self.arena, |mut cond| {
-                    cond.value = self
-                        .ecx
-                        .unary_op(UnOp::Not, &ImmTy::from_scalar_int(cond.value, layout))
-                        .discard_err()?
-                        .to_scalar_int()
-                        .discard_err()?;
-                    Some(cond)
-                }) else {
-                    return;
-                };
-                state.insert_value_idx(place, conds, &self.map);
+            Rvalue::UnaryOp(UnOp::Not, Operand::Move(operand) | Operand::Copy(operand)) => {
+                let layout = self.ecx.layout_of(operand.ty(self.body, self.tcx).ty).unwrap();
+                let Some(lhs) = self.map.value(lhs) else { return };
+                let Some(operand) = self.map.find_value(operand.as_ref()) else { return };
+                state.retain_mut(|mut c| {
+                    if c.place == lhs {
+                        let value = self
+                            .ecx
+                            .unary_op(UnOp::Not, &ImmTy::from_scalar_int(c.value, layout))
+                            .discard_err()?
+                            .to_scalar_int()
+                            .discard_err()?;
+                        c.place = operand;
+                        c.value = value;
+                    }
+                    Some(c)
+                });
             }
             // We expect `lhs ?= A`. We found `lhs = Eq(rhs, B)`.
             // Create a condition on `rhs ?= B`.
             Rvalue::BinaryOp(
                 op,
-                box (Operand::Move(place) | Operand::Copy(place), Operand::Constant(value))
-                | box (Operand::Constant(value), Operand::Move(place) | Operand::Copy(place)),
+                box (Operand::Move(operand) | Operand::Copy(operand), Operand::Constant(value))
+                | box (Operand::Constant(value), Operand::Move(operand) | Operand::Copy(operand)),
             ) => {
-                let Some(conditions) = state.try_get_idx(lhs, &self.map) else { return };
-                let Some(place) = self.map.find(place.as_ref()) else { return };
                 let equals = match op {
                     BinOp::Eq => ScalarInt::TRUE,
                     BinOp::Ne => ScalarInt::FALSE,
@@ -526,38 +562,29 @@ fn process_assign(
                     // Avoid handling them, though this could be extended in the future.
                     return;
                 }
+                let Some(lhs) = self.map.value(lhs) else { return };
+                let Some(operand) = self.map.find_value(operand.as_ref()) else { return };
                 let Some(value) = value.const_.try_eval_scalar_int(self.tcx, self.typing_env)
                 else {
                     return;
                 };
-                let Some(conds) = conditions.map(self.arena, |c| {
-                    Some(Condition {
-                        value,
-                        polarity: if c.matches(equals) { Polarity::Eq } else { Polarity::Ne },
-                        ..c
-                    })
-                }) else {
-                    return;
-                };
-                state.insert_value_idx(place, conds, &self.map);
+                state.for_each_mut(|c| {
+                    if c.place == lhs {
+                        let polarity =
+                            if c.matches(lhs, equals) { Polarity::Eq } else { Polarity::Ne };
+                        c.place = operand;
+                        c.value = value;
+                        c.polarity = polarity;
+                    }
+                });
             }
 
             _ => {}
         }
     }
 
-    #[instrument(level = "trace", skip(self))]
-    fn process_statement(
-        &mut self,
-        bb: BasicBlock,
-        stmt: &Statement<'tcx>,
-        state: &mut State<ConditionSet<'a>>,
-    ) {
-        let register_opportunity = |c: Condition| {
-            debug!(?bb, ?c.target, "register");
-            self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target })
-        };
-
+    #[instrument(level = "trace", skip(self, state))]
+    fn process_statement(&mut self, stmt: &Statement<'tcx>, state: &mut ConditionSet) {
         // Below, `lhs` is the return value of `mutated_statement`,
         // the place to which `conditions` apply.
 
@@ -575,63 +602,59 @@ fn process_statement(
                 else {
                     return;
                 };
-                self.process_immediate(bb, discr_target, discr, state)
+                self.process_immediate(discr_target, discr, state)
             }
             // If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`.
             StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(
                 Operand::Copy(place) | Operand::Move(place),
             )) => {
-                let Some(conditions) = state.try_get(place.as_ref(), &self.map) else { return };
-                conditions.iter_matches(ScalarInt::TRUE).for_each(register_opportunity)
+                let Some(place) = self.map.find_value(place.as_ref()) else { return };
+                state.fulfill_matches(place, ScalarInt::TRUE);
             }
             StatementKind::Assign(box (lhs_place, rhs)) => {
-                self.process_assign(bb, lhs_place, rhs, state)
+                self.process_assign(lhs_place, rhs, state)
             }
             _ => {}
         }
     }
 
-    #[instrument(level = "trace", skip(self, state, cost))]
-    fn recurse_through_terminator(
-        &mut self,
-        bb: BasicBlock,
-        // Pass a closure that may clone the state, as we don't want to do it each time.
-        state: impl FnOnce() -> State<ConditionSet<'a>>,
-        cost: &CostChecker<'_, 'tcx>,
-        depth: usize,
-    ) {
+    /// Execute the terminator for block `bb` into state `entry_states[bb]`.
+    #[instrument(level = "trace", skip(self, state))]
+    fn process_terminator(&mut self, bb: BasicBlock, state: &mut ConditionSet) {
         let term = self.body.basic_blocks[bb].terminator();
         let place_to_flood = match term.kind {
-            // We come from a target, so those are not possible.
-            TerminatorKind::UnwindResume
-            | TerminatorKind::UnwindTerminate(_)
-            | TerminatorKind::Return
-            | TerminatorKind::TailCall { .. }
-            | TerminatorKind::Unreachable
-            | TerminatorKind::CoroutineDrop => bug!("{term:?} has no terminators"),
             // Disallowed during optimizations.
             TerminatorKind::FalseEdge { .. }
             | TerminatorKind::FalseUnwind { .. }
             | TerminatorKind::Yield { .. } => bug!("{term:?} invalid"),
             // Cannot reason about inline asm.
-            TerminatorKind::InlineAsm { .. } => return,
+            TerminatorKind::InlineAsm { .. } => {
+                state.active.clear();
+                return;
+            }
             // `SwitchInt` is handled specially.
-            TerminatorKind::SwitchInt { .. } => return,
-            // We can recurse, no thing particular to do.
-            TerminatorKind::Goto { .. } => None,
+            TerminatorKind::SwitchInt { ref discr, ref targets } => {
+                return self.process_switch_int(discr, targets, state);
+            }
+            // These do not modify memory.
+            TerminatorKind::UnwindResume
+            | TerminatorKind::UnwindTerminate(_)
+            | TerminatorKind::Return
+            | TerminatorKind::Unreachable
+            | TerminatorKind::CoroutineDrop
+            // Assertions can be no-op at codegen time, so treat them as such.
+            | TerminatorKind::Assert { .. }
+            | TerminatorKind::Goto { .. } => None,
             // Flood the overwritten place, and progress through.
             TerminatorKind::Drop { place: destination, .. }
             | TerminatorKind::Call { destination, .. } => Some(destination),
-            // Ignore, as this can be a no-op at codegen time.
-            TerminatorKind::Assert { .. } => None,
+            TerminatorKind::TailCall { .. } => Some(RETURN_PLACE.into()),
         };
 
-        // We can recurse through this terminator.
-        let mut state = state();
+        // This terminator modifies `place_to_flood`, cleanup the associated conditions.
         if let Some(place_to_flood) = place_to_flood {
-            state.flood_with(place_to_flood.as_ref(), &self.map, ConditionSet::BOTTOM);
+            self.flood_state(place_to_flood, None, state);
         }
-        self.find_opportunity(bb, state, cost.clone(), depth + 1)
     }
 
     #[instrument(level = "trace", skip(self))]
@@ -639,194 +662,420 @@ fn process_switch_int(
         &mut self,
         discr: &Operand<'tcx>,
         targets: &SwitchTargets,
-        target_bb: BasicBlock,
-        state: &mut State<ConditionSet<'a>>,
+        state: &mut ConditionSet,
     ) {
-        debug_assert_ne!(target_bb, START_BLOCK);
-        debug_assert_eq!(self.body.basic_blocks.predecessors()[target_bb].len(), 1);
-
         let Some(discr) = discr.place() else { return };
+        let Some(discr_idx) = self.map.find_value(discr.as_ref()) else { return };
+
         let discr_ty = discr.ty(self.body, self.tcx).ty;
-        let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else {
-            return;
-        };
-        let Some(conditions) = state.try_get(discr.as_ref(), &self.map) else { return };
+        let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else { return };
 
-        if let Some((value, _)) = targets.iter().find(|&(_, target)| target == target_bb) {
-            let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
-            debug_assert_eq!(targets.iter().filter(|&(_, target)| target == target_bb).count(), 1);
+        // Attempt to fulfill a condition using an outgoing branch's condition.
+        // Only support the case where there are no duplicated outgoing edges.
+        if targets.is_distinct() {
+            for &(index, c) in state.active.iter() {
+                if c.place != discr_idx {
+                    continue;
+                }
 
-            // We are inside `target_bb`. Since we have a single predecessor, we know we passed
-            // through the `SwitchInt` before arriving here. Therefore, we know that
-            // `discr == value`. If one condition can be fulfilled by `discr == value`,
-            // that's an opportunity.
-            for c in conditions.iter_matches(value) {
-                debug!(?target_bb, ?c.target, "register");
-                self.opportunities.push(ThreadingOpportunity { chain: vec![], target: c.target });
+                // Set of blocks `t` such that the edge `bb -> t` fulfills `c`.
+                let mut edges_fulfilling_condition = FxHashSet::default();
+
+                // On edge `bb -> tgt`, we know that `discr_idx == branch`.
+                for (branch, tgt) in targets.iter() {
+                    if let Some(branch) = ScalarInt::try_from_uint(branch, discr_layout.size)
+                        && c.matches(discr_idx, branch)
+                    {
+                        edges_fulfilling_condition.insert(tgt);
+                    }
+                }
+
+                // On edge `bb -> otherwise`, we only know that `discr` is different from all the
+                // constants in the switch. That's much weaker information than the equality we
+                // had in the previous arm. All we can conclude is that the replacement condition
+                // `discr != value` can be threaded, and nothing else.
+                if c.polarity == Polarity::Ne
+                    && let Ok(value) = c.value.try_to_bits(discr_layout.size)
+                    && targets.all_values().contains(&value.into())
+                {
+                    edges_fulfilling_condition.insert(targets.otherwise());
+                }
+
+                // Register that jumping to a `t` fulfills condition `c`.
+                // This does *not* mean that `c` is fulfilled in this block: inserting `index` in
+                // `fulfilled` is wrong if we have targets that jump to other blocks.
+                let condition_targets = &state.targets[index];
+
+                let new_edges: Vec<_> = condition_targets
+                    .iter()
+                    .copied()
+                    .filter(|&target| match target {
+                        EdgeEffect::Goto { .. } => false,
+                        EdgeEffect::Chain { succ_block, .. } => {
+                            edges_fulfilling_condition.contains(&succ_block)
+                        }
+                    })
+                    .collect();
+
+                if new_edges.len() == condition_targets.len() {
+                    // If `new_edges == condition_targets`, do not bother creating a new
+                    // `ConditionIndex`, we can use the existing one.
+                    state.fulfilled.push(index);
+                } else {
+                    // Fulfilling `index` may thread conditions that we do not want,
+                    // so create a brand new index to immediately mark fulfilled.
+                    let index = state.targets.push(new_edges);
+                    state.fulfilled.push(index);
+                }
             }
-        } else if let Some((value, _, else_bb)) = targets.as_static_if()
-            && target_bb == else_bb
-        {
-            let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
+        }
 
-            // We only know that `discr != value`. That's much weaker information than
-            // the equality we had in the previous arm. All we can conclude is that
-            // the replacement condition `discr != value` can be threaded, and nothing else.
-            for c in conditions.iter() {
-                if c.value == value && c.polarity == Polarity::Ne {
-                    debug!(?target_bb, ?c.target, "register");
-                    self.opportunities
-                        .push(ThreadingOpportunity { chain: vec![], target: c.target });
+        // Introduce additional conditions of the form `discr ?= value` for each value in targets.
+        let mut mk_condition = |value, polarity, target| {
+            let c = Condition { place: discr_idx, value, polarity };
+            state.push_condition(c, target);
+        };
+        if let Some((value, then_, else_)) = targets.as_static_if() {
+            // We have an `if`, generate both `discr == value` and `discr != value`.
+            let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
+            mk_condition(value, Polarity::Eq, then_);
+            mk_condition(value, Polarity::Ne, else_);
+        } else {
+            // We have a general switch and we cannot express `discr != value0 && discr != value1`,
+            // so we only generate equality predicates.
+            for (value, target) in targets.iter() {
+                if let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) {
+                    mk_condition(value, Polarity::Eq, target);
                 }
             }
         }
     }
 }
 
-struct OpportunitySet {
-    opportunities: Vec<ThreadingOpportunity>,
-    /// For each bb, give the TOs in which it appears. The pair corresponds to the index
-    /// in `opportunities` and the index in `ThreadingOpportunity::chain`.
-    involving_tos: IndexVec<BasicBlock, Vec<(usize, usize)>>,
-    /// Cache the number of predecessors for each block, as we clear the basic block cache..
-    predecessors: IndexVec<BasicBlock, usize>,
+/// Propagate fulfilled conditions forward in the CFG to reduce the amount of duplication.
+#[instrument(level = "debug", skip(body, entry_states))]
+fn simplify_conditions(body: &Body<'_>, entry_states: &mut IndexVec<BasicBlock, ConditionSet>) {
+    let basic_blocks = &body.basic_blocks;
+    let reverse_postorder = basic_blocks.reverse_postorder();
+
+    // Start by computing the number of *incoming edges* for each block.
+    // We do not use the cached `basic_blocks.predecessors` as we only want reachable predecessors.
+    let mut predecessors = IndexVec::from_elem(0, &entry_states);
+    predecessors[START_BLOCK] = 1; // Account for the implicit entry edge.
+    for &bb in reverse_postorder {
+        let term = basic_blocks[bb].terminator();
+        for s in term.successors() {
+            predecessors[s] += 1;
+        }
+    }
+
+    // Compute the number of edges into each block that carry each condition.
+    let mut fulfill_in_pred_count = IndexVec::from_fn_n(
+        |bb: BasicBlock| IndexVec::from_elem_n(0, entry_states[bb].targets.len()),
+        entry_states.len(),
+    );
+
+    // By traversing in RPO, we increase the likelihood to visit predecessors before successors.
+    for &bb in reverse_postorder {
+        let preds = predecessors[bb];
+        trace!(?bb, ?preds);
+
+        // We have removed all the input edges towards this block. Just skip visiting it.
+        if preds == 0 {
+            continue;
+        }
+
+        let state = &mut entry_states[bb];
+        trace!(?state);
+
+        // Conditions that are fulfilled in all the predecessors, are fulfilled in `bb`.
+        trace!(fulfilled_count = ?fulfill_in_pred_count[bb]);
+        for (condition, &cond_preds) in fulfill_in_pred_count[bb].iter_enumerated() {
+            if cond_preds == preds {
+                trace!(?condition);
+                state.fulfilled.push(condition);
+            }
+        }
+
+        // We want to count how many times each condition is fulfilled,
+        // so ensure we are not counting the same edge twice.
+        let mut targets: Vec<_> = state
+            .fulfilled
+            .iter()
+            .flat_map(|&index| state.targets[index].iter().copied())
+            .collect();
+        targets.sort();
+        targets.dedup();
+        trace!(?targets);
+
+        // We may modify the set of successors by applying edges, so track them here.
+        let mut successors = basic_blocks[bb].terminator().successors().collect::<Vec<_>>();
+
+        targets.reverse();
+        while let Some(target) = targets.pop() {
+            match target {
+                EdgeEffect::Goto { target } => {
+                    // We update the count of predecessors. If target or any successor has not been
+                    // processed yet, this increases the likelihood we find something relevant.
+                    predecessors[target] += 1;
+                    for &s in successors.iter() {
+                        predecessors[s] -= 1;
+                    }
+                    // Only process edges that still exist.
+                    targets.retain(|t| t.block() == target);
+                    successors.clear();
+                    successors.push(target);
+                }
+                EdgeEffect::Chain { succ_block, succ_condition } => {
+                    // `predecessors` is the number of incoming *edges* in each block.
+                    // Count the number of edges that apply `succ_condition` into `succ_block`.
+                    let count = successors.iter().filter(|&&s| s == succ_block).count();
+                    fulfill_in_pred_count[succ_block][succ_condition] += count;
+                }
+            }
+        }
+    }
 }
 
-impl OpportunitySet {
-    fn new(body: &Body<'_>, opportunities: Vec<ThreadingOpportunity>) -> OpportunitySet {
-        let mut involving_tos = IndexVec::from_elem(Vec::new(), &body.basic_blocks);
-        for (index, to) in opportunities.iter().enumerate() {
-            for (ibb, &bb) in to.chain.iter().enumerate() {
-                involving_tos[bb].push((index, ibb));
+#[instrument(level = "debug", skip(tcx, typing_env, body, entry_states))]
+fn remove_costly_conditions<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    typing_env: ty::TypingEnv<'tcx>,
+    body: &Body<'tcx>,
+    entry_states: &mut IndexVec<BasicBlock, ConditionSet>,
+) {
+    let basic_blocks = &body.basic_blocks;
+
+    let mut costs = IndexVec::from_elem(None, basic_blocks);
+    let mut cost = |bb: BasicBlock| -> u8 {
+        let c = *costs[bb].get_or_insert_with(|| {
+            let bbdata = &basic_blocks[bb];
+            let mut cost = CostChecker::new(tcx, typing_env, None, body);
+            cost.visit_basic_block_data(bb, bbdata);
+            cost.cost().try_into().unwrap_or(MAX_COST)
+        });
+        trace!("cost[{bb:?}] = {c}");
+        c
+    };
+
+    // Initialize costs with `MAX_COST`: if we have a cycle, the cyclic `bb` has infinite costs.
+    let mut condition_cost = IndexVec::from_fn_n(
+        |bb: BasicBlock| IndexVec::from_elem_n(MAX_COST, entry_states[bb].targets.len()),
+        entry_states.len(),
+    );
+
+    let reverse_postorder = basic_blocks.reverse_postorder();
+
+    for &bb in reverse_postorder.iter().rev() {
+        let state = &entry_states[bb];
+        trace!(?bb, ?state);
+
+        let mut current_costs = IndexVec::from_elem(0u8, &state.targets);
+
+        for (condition, targets) in state.targets.iter_enumerated() {
+            for &target in targets {
+                match target {
+                    // A `Goto` has cost 0.
+                    EdgeEffect::Goto { .. } => {}
+                    // Chaining into an already-fulfilled condition is nop.
+                    EdgeEffect::Chain { succ_block, succ_condition }
+                        if entry_states[succ_block].fulfilled.contains(&succ_condition) => {}
+                    // When chaining, use `cost[succ_block][succ_condition] + cost(succ_block)`.
+                    EdgeEffect::Chain { succ_block, succ_condition } => {
+                        // Cost associated with duplicating `succ_block`.
+                        let duplication_cost = cost(succ_block);
+                        // Cost associated with the rest of the chain.
+                        let target_cost =
+                            *condition_cost[succ_block].get(succ_condition).unwrap_or(&MAX_COST);
+                        let cost = current_costs[condition]
+                            .saturating_add(duplication_cost)
+                            .saturating_add(target_cost);
+                        trace!(?condition, ?succ_block, ?duplication_cost, ?target_cost);
+                        current_costs[condition] = cost;
+                    }
+                }
             }
-            involving_tos[to.target].push((index, to.chain.len()));
         }
-        let predecessors = predecessor_count(body);
-        OpportunitySet { opportunities, involving_tos, predecessors }
+
+        trace!("condition_cost[{bb:?}] = {:?}", current_costs);
+        condition_cost[bb] = current_costs;
+    }
+
+    trace!(?condition_cost);
+
+    for &bb in reverse_postorder {
+        for (index, targets) in entry_states[bb].targets.iter_enumerated_mut() {
+            if condition_cost[bb][index] >= MAX_COST {
+                trace!(?bb, ?index, ?targets, c = ?condition_cost[bb][index], "remove");
+                targets.clear()
+            }
+        }
+    }
+}
+
+struct OpportunitySet<'a, 'tcx> {
+    basic_blocks: &'a mut IndexVec<BasicBlock, BasicBlockData<'tcx>>,
+    entry_states: IndexVec<BasicBlock, ConditionSet>,
+    /// Cache duplicated block. When cloning a basic block `bb` to fulfill a condition `c`,
+    /// record the target of this `bb with c` edge.
+    duplicates: FxHashMap<(BasicBlock, ConditionIndex), BasicBlock>,
+}
+
+impl<'a, 'tcx> OpportunitySet<'a, 'tcx> {
+    fn new(
+        body: &'a mut Body<'tcx>,
+        mut entry_states: IndexVec<BasicBlock, ConditionSet>,
+    ) -> Option<OpportunitySet<'a, 'tcx>> {
+        trace!(def_id = ?body.source.def_id(), "apply");
+
+        if entry_states.iter().all(|state| state.fulfilled.is_empty()) {
+            return None;
+        }
+
+        // Free some memory, because we will need to clone condition sets.
+        for state in entry_states.iter_mut() {
+            state.active = Default::default();
+        }
+        let duplicates = Default::default();
+        let basic_blocks = body.basic_blocks.as_mut();
+        Some(OpportunitySet { basic_blocks, entry_states, duplicates })
     }
 
     /// Apply the opportunities on the graph.
-    fn apply(&mut self, body: &mut Body<'_>) {
-        for i in 0..self.opportunities.len() {
-            self.apply_once(i, body);
-        }
-    }
+    #[instrument(level = "debug", skip(self))]
+    fn apply(mut self) {
+        let mut worklist = Vec::with_capacity(self.basic_blocks.len());
+        worklist.push(START_BLOCK);
 
-    #[instrument(level = "trace", skip(self, body))]
-    fn apply_once(&mut self, index: usize, body: &mut Body<'_>) {
-        debug!(?self.predecessors);
-        debug!(?self.involving_tos);
+        // Use a `GrowableBitSet` and not a `DenseBitSet` as we are adding blocks.
+        let mut visited = GrowableBitSet::with_capacity(self.basic_blocks.len());
 
-        // Check that `predecessors` satisfies its invariant.
-        debug_assert_eq!(self.predecessors, predecessor_count(body));
-
-        // Remove the TO from the vector to allow modifying the other ones later.
-        let op = &mut self.opportunities[index];
-        debug!(?op);
-        let op_chain = std::mem::take(&mut op.chain);
-        let op_target = op.target;
-        debug_assert_eq!(op_chain.len(), op_chain.iter().collect::<FxHashSet<_>>().len());
-
-        let Some((current, chain)) = op_chain.split_first() else { return };
-        let basic_blocks = body.basic_blocks.as_mut();
-
-        // Invariant: the control-flow is well-formed at the end of each iteration.
-        let mut current = *current;
-        for &succ in chain {
-            debug!(?current, ?succ);
-
-            // `succ` must be a successor of `current`. If it is not, this means this TO is not
-            // satisfiable and a previous TO erased this edge, so we bail out.
-            if !basic_blocks[current].terminator().successors().any(|s| s == succ) {
-                debug!("impossible");
-                return;
-            }
-
-            // Fast path: `succ` is only used once, so we can reuse it directly.
-            if self.predecessors[succ] == 1 {
-                debug!("single");
-                current = succ;
+        while let Some(bb) = worklist.pop() {
+            if !visited.insert(bb) {
                 continue;
             }
 
-            let new_succ = basic_blocks.push(basic_blocks[succ].clone());
-            debug!(?new_succ);
+            self.apply_once(bb);
 
-            // Replace `succ` by `new_succ` where it appears.
-            let mut num_edges = 0;
-            basic_blocks[current].terminator_mut().successors_mut(|s| {
-                if *s == succ {
-                    *s = new_succ;
-                    num_edges += 1;
-                }
-            });
-
-            // Update predecessors with the new block.
-            let _new_succ = self.predecessors.push(num_edges);
-            debug_assert_eq!(new_succ, _new_succ);
-            self.predecessors[succ] -= num_edges;
-            self.update_predecessor_count(basic_blocks[new_succ].terminator(), Update::Incr);
-
-            // Replace the `current -> succ` edge by `current -> new_succ` in all the following
-            // TOs. This is necessary to avoid trying to thread through a non-existing edge. We
-            // use `involving_tos` here to avoid traversing the full set of TOs on each iteration.
-            let mut new_involved = Vec::new();
-            for &(to_index, in_to_index) in &self.involving_tos[current] {
-                // That TO has already been applied, do nothing.
-                if to_index <= index {
-                    continue;
-                }
-
-                let other_to = &mut self.opportunities[to_index];
-                if other_to.chain.get(in_to_index) != Some(&current) {
-                    continue;
-                }
-                let s = other_to.chain.get_mut(in_to_index + 1).unwrap_or(&mut other_to.target);
-                if *s == succ {
-                    // `other_to` references the `current -> succ` edge, so replace `succ`.
-                    *s = new_succ;
-                    new_involved.push((to_index, in_to_index + 1));
-                }
-            }
-
-            // The TOs that we just updated now reference `new_succ`. Update `involving_tos`
-            // in case we need to duplicate an edge starting at `new_succ` later.
-            let _new_succ = self.involving_tos.push(new_involved);
-            debug_assert_eq!(new_succ, _new_succ);
-
-            current = new_succ;
-        }
-
-        let current = &mut basic_blocks[current];
-        self.update_predecessor_count(current.terminator(), Update::Decr);
-        current.terminator_mut().kind = TerminatorKind::Goto { target: op_target };
-        self.predecessors[op_target] += 1;
-    }
-
-    fn update_predecessor_count(&mut self, terminator: &Terminator<'_>, incr: Update) {
-        match incr {
-            Update::Incr => {
-                for s in terminator.successors() {
-                    self.predecessors[s] += 1;
-                }
-            }
-            Update::Decr => {
-                for s in terminator.successors() {
-                    self.predecessors[s] -= 1;
-                }
-            }
+            // `apply_once` may have modified the terminator of `bb`.
+            // Only visit actual successors.
+            worklist.extend(self.basic_blocks[bb].terminator().successors());
         }
     }
-}
 
-fn predecessor_count(body: &Body<'_>) -> IndexVec<BasicBlock, usize> {
-    let mut predecessors: IndexVec<_, _> =
-        body.basic_blocks.predecessors().iter().map(|ps| ps.len()).collect();
-    predecessors[START_BLOCK] += 1; // Account for the implicit entry edge.
-    predecessors
-}
+    /// Apply the opportunities on `bb`.
+    #[instrument(level = "debug", skip(self))]
+    fn apply_once(&mut self, bb: BasicBlock) {
+        let state = &mut self.entry_states[bb];
+        trace!(?state);
 
-enum Update {
-    Incr,
-    Decr,
+        // We are modifying the `bb` in-place. Once a `EdgeEffect` has been applied,
+        // it does not need to be applied again.
+        let mut targets: Vec<_> = state
+            .fulfilled
+            .iter()
+            .flat_map(|&index| std::mem::take(&mut state.targets[index]))
+            .collect();
+        targets.sort();
+        targets.dedup();
+        trace!(?targets);
+
+        // Use a while-pop to allow modifying `targets` from inside the loop.
+        targets.reverse();
+        while let Some(target) = targets.pop() {
+            debug!(?target);
+            trace!(term = ?self.basic_blocks[bb].terminator().kind);
+
+            // By construction, `target.block()` is a successor of `bb`.
+            // When applying targets, we may change the set of successors.
+            // The match below updates the set of targets for consistency.
+            debug_assert!(
+                self.basic_blocks[bb].terminator().successors().contains(&target.block()),
+                "missing {target:?} in successors for {bb:?}, term={:?}",
+                self.basic_blocks[bb].terminator(),
+            );
+
+            match target {
+                EdgeEffect::Goto { target } => {
+                    self.apply_goto(bb, target);
+
+                    // We now have `target` as single successor. Drop all other target blocks.
+                    targets.retain(|t| t.block() == target);
+                    // Also do this on targets that may be applied by a duplicate of `bb`.
+                    for ts in self.entry_states[bb].targets.iter_mut() {
+                        ts.retain(|t| t.block() == target);
+                    }
+                }
+                EdgeEffect::Chain { succ_block, succ_condition } => {
+                    let new_succ_block = self.apply_chain(bb, succ_block, succ_condition);
+
+                    // We have a new name for `target`, ensure it is correctly applied.
+                    if let Some(new_succ_block) = new_succ_block {
+                        for t in targets.iter_mut() {
+                            t.replace_block(succ_block, new_succ_block)
+                        }
+                        // Also do this on targets that may be applied by a duplicate of `bb`.
+                        for t in
+                            self.entry_states[bb].targets.iter_mut().flat_map(|ts| ts.iter_mut())
+                        {
+                            t.replace_block(succ_block, new_succ_block)
+                        }
+                    }
+                }
+            }
+
+            trace!(post_term = ?self.basic_blocks[bb].terminator().kind);
+        }
+    }
+
+    #[instrument(level = "debug", skip(self))]
+    fn apply_goto(&mut self, bb: BasicBlock, target: BasicBlock) {
+        self.basic_blocks[bb].terminator_mut().kind = TerminatorKind::Goto { target };
+    }
+
+    #[instrument(level = "debug", skip(self), ret)]
+    fn apply_chain(
+        &mut self,
+        bb: BasicBlock,
+        target: BasicBlock,
+        condition: ConditionIndex,
+    ) -> Option<BasicBlock> {
+        if self.entry_states[target].fulfilled.contains(&condition) {
+            // `target` already fulfills `condition`, so we do not need to thread anything.
+            trace!("fulfilled");
+            return None;
+        }
+
+        // We may be tempted to modify `target` in-place to avoid a clone. This is wrong.
+        // We may still have edges from other blocks to `target` that have not been created yet.
+        // For instance because we may be threading an edge coming from `bb`,
+        // or `target` may be a block duplicate for which we may still create predecessors.
+
+        let new_target = *self.duplicates.entry((target, condition)).or_insert_with(|| {
+            // If we already have a duplicate of `target` which fulfills `condition`, reuse it.
+            // Otherwise, we clone a new bb to such ends.
+            let new_target = self.basic_blocks.push(self.basic_blocks[target].clone());
+            trace!(?target, ?new_target, ?condition, "clone");
+
+            // By definition, `new_target` fulfills the same condition as `target`, with
+            // `condition` added.
+            let mut condition_set = self.entry_states[target].clone();
+            condition_set.fulfilled.push(condition);
+            let _new_target = self.entry_states.push(condition_set);
+            debug_assert_eq!(new_target, _new_target);
+
+            new_target
+        });
+        trace!(?target, ?new_target, ?condition, "reuse");
+
+        // Replace `target` by `new_target` where it appears.
+        // This changes exactly `direct_count` edges.
+        self.basic_blocks[bb].terminator_mut().successors_mut(|s| {
+            if *s == target {
+                *s = new_target;
+            }
+        });
+
+        Some(new_target)
+    }
 }
diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl
index 7055e60..87d1173 100644
--- a/compiler/rustc_parse/messages.ftl
+++ b/compiler/rustc_parse/messages.ftl
@@ -347,6 +347,7 @@
 parse_frontmatter_length_mismatch = frontmatter close does not match the opening
     .label_opening = the opening here has {$len_opening} dashes...
     .label_close = ...while the close has {$len_close} dashes
+parse_frontmatter_too_many_dashes = too many `-` symbols: frontmatter openings may be delimited by up to 255 `-` symbols, but found {$len_opening}
 parse_frontmatter_unclosed = unclosed frontmatter
     .note = frontmatter opening here was not closed
 
@@ -512,7 +513,7 @@
     lifetimes cannot use keyword names
 
 parse_kw_bad_case = keyword `{$kw}` is written in the wrong case
-    .suggestion = write it in the correct case
+    .suggestion = write it in {$case}
 
 parse_label_inner_attr_does_not_annotate_this = the inner attribute doesn't annotate this {$item}
 parse_label_unexpected_token = unexpected token
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 62a333f..698d8f7 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -1,14 +1,15 @@
 // ignore-tidy-filelength
 
 use std::borrow::Cow;
+use std::path::PathBuf;
 
 use rustc_ast::token::Token;
 use rustc_ast::util::parser::ExprPrecedence;
 use rustc_ast::{Path, Visibility};
 use rustc_errors::codes::*;
 use rustc_errors::{
-    Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, Subdiagnostic,
-    SuggestionStyle,
+    Applicability, Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, IntoDiagArg,
+    Level, Subdiagnostic, SuggestionStyle,
 };
 use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
 use rustc_session::errors::ExprParenthesesNeeded;
@@ -823,6 +824,12 @@ pub(crate) struct FrontmatterLengthMismatch {
 }
 
 #[derive(Diagnostic)]
+#[diag(parse_frontmatter_too_many_dashes)]
+pub(crate) struct FrontmatterTooManyDashes {
+    pub len_opening: usize,
+}
+
+#[derive(Diagnostic)]
 #[diag(parse_leading_plus_not_supported)]
 pub(crate) struct LeadingPlusNotSupported {
     #[primary_span]
@@ -3335,6 +3342,24 @@ pub(crate) struct KwBadCase<'a> {
     #[suggestion(code = "{kw}", style = "verbose", applicability = "machine-applicable")]
     pub span: Span,
     pub kw: &'a str,
+    pub case: Case,
+}
+
+pub(crate) enum Case {
+    Upper,
+    Lower,
+    Mixed,
+}
+
+impl IntoDiagArg for Case {
+    fn into_diag_arg(self, path: &mut Option<PathBuf>) -> DiagArgValue {
+        match self {
+            Case::Upper => "uppercase",
+            Case::Lower => "lowercase",
+            Case::Mixed => "the correct case",
+        }
+        .into_diag_arg(path)
+    }
 }
 
 #[derive(Diagnostic)]
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 51019db..c62c8ac 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -665,6 +665,11 @@ fn validate_frontmatter(
             });
         }
 
+        // Only up to 255 `-`s are allowed in code fences
+        if u8::try_from(len_opening).is_err() {
+            self.dcx().emit_err(errors::FrontmatterTooManyDashes { len_opening });
+        }
+
         if !rest.trim_matches(is_horizontal_whitespace).is_empty() {
             let span = self.mk_sp(last_line_start_pos, self.pos);
             self.dcx().emit_err(errors::FrontmatterExtraCharactersAfterClose { span });
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 14a738f..8577ea40 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -606,7 +606,20 @@ fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
             // Do an ASCII case-insensitive match, because all keywords are ASCII.
             && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
         {
-            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
+            let kw = exp.kw.as_str();
+            let is_upper = kw.chars().all(char::is_uppercase);
+            let is_lower = kw.chars().all(char::is_lowercase);
+
+            let case = match (is_upper, is_lower) {
+                (true, true) => {
+                    unreachable!("keyword that is both fully upper- and fully lowercase")
+                }
+                (true, false) => errors::Case::Upper,
+                (false, true) => errors::Case::Lower,
+                (false, false) => errors::Case::Mixed,
+            };
+
+            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw, case });
             self.bump();
             true
         } else {
diff --git a/compiler/rustc_passes/src/stability.rs b/compiler/rustc_passes/src/stability.rs
index b7e6e2d..39830db 100644
--- a/compiler/rustc_passes/src/stability.rs
+++ b/compiler/rustc_passes/src/stability.rs
@@ -54,7 +54,7 @@ fn inherit_const_stability(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
     match def_kind {
         DefKind::AssocFn | DefKind::AssocTy | DefKind::AssocConst => {
             match tcx.def_kind(tcx.local_parent(def_id)) {
-                DefKind::Impl { of_trait: true } => true,
+                DefKind::Impl { .. } => true,
                 _ => false,
             }
         }
diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs
index fe299a6..1cdb4cc 100644
--- a/compiler/rustc_resolve/src/diagnostics.rs
+++ b/compiler/rustc_resolve/src/diagnostics.rs
@@ -1902,7 +1902,7 @@ pub(crate) fn add_typo_suggestion(
             if span.overlaps(def_span) {
                 // Don't suggest typo suggestion for itself like in the following:
                 // error[E0423]: expected function, tuple struct or tuple variant, found struct `X`
-                //   --> $DIR/issue-64792-bad-unicode-ctor.rs:3:14
+                //   --> $DIR/unicode-string-literal-syntax-error-64792.rs:4:14
                 //    |
                 // LL | struct X {}
                 //    | ----------- `X` defined here
diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs
index df620dd..b689765 100644
--- a/compiler/rustc_resolve/src/late.rs
+++ b/compiler/rustc_resolve/src/late.rs
@@ -3689,7 +3689,11 @@ fn resolve_delegation(&mut self, delegation: &'ast Delegation) {
             let ident = Ident::new(kw::SelfLower, span.normalize_to_macro_rules());
             let res = Res::Local(delegation.id);
             this.innermost_rib_bindings(ValueNS).insert(ident, res);
-            this.visit_block(body);
+
+            //As we lower target_expr_template body to a body of a function we need a label rib (#148889)
+            this.with_label_rib(RibKind::FnOrCoroutine, |this| {
+                this.visit_block(body);
+            });
         });
     }
 
diff --git a/compiler/rustc_target/src/spec/base/arm_none.rs b/compiler/rustc_target/src/spec/base/arm_none.rs
new file mode 100644
index 0000000..feb69bd
--- /dev/null
+++ b/compiler/rustc_target/src/spec/base/arm_none.rs
@@ -0,0 +1,32 @@
+// These are the baseline settings for 32-bit bare-metal Arm targets using the EABI or EABIHF ABI.
+
+use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, PanicStrategy, RelocModel, TargetOptions};
+
+pub(crate) fn opts() -> TargetOptions {
+    // See rust-lang/rfcs#1645 for a discussion about these defaults
+    TargetOptions {
+        linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes),
+        // In most cases, LLD is good enough
+        linker: Some("rust-lld".into()),
+        // Because these devices have very little resources having an unwinder is too onerous so we
+        // default to "abort" because the "unwind" strategy is very rare.
+        panic_strategy: PanicStrategy::Abort,
+        // Similarly, one almost always never wants to use relocatable code because of the extra
+        // costs it involves.
+        relocation_model: RelocModel::Static,
+        // When this section is added a volatile load to its start address is also generated. This
+        // volatile load is a footgun as it can end up loading an invalid memory address, depending
+        // on how the user set up their linker scripts. This section adds pretty printer for stuff
+        // like std::Vec, which is not that used in no-std context, so it's best to left it out
+        // until we figure a way to add the pretty printers without requiring a volatile load cf.
+        // rust-lang/rust#44993.
+        emit_debug_gdb_scripts: false,
+        // LLVM is eager to trash the link register when calling `noreturn` functions, which
+        // breaks debugging. Preserve LR by default to prevent that from happening.
+        frame_pointer: FramePointer::Always,
+        // ARM supports multiple ABIs for enums, the linux one matches the default of 32 here
+        // but any arm-none or thumb-none target will be defaulted to 8 on GCC.
+        c_enum_min_bits: Some(8),
+        ..Default::default()
+    }
+}
diff --git a/compiler/rustc_target/src/spec/base/mod.rs b/compiler/rustc_target/src/spec/base/mod.rs
index ca1c964..9e7ff62 100644
--- a/compiler/rustc_target/src/spec/base/mod.rs
+++ b/compiler/rustc_target/src/spec/base/mod.rs
@@ -1,6 +1,7 @@
 pub(crate) mod aix;
 pub(crate) mod android;
 pub mod apple;
+pub(crate) mod arm_none;
 pub(crate) mod avr;
 pub(crate) mod bpf;
 pub(crate) mod cygwin;
@@ -31,7 +32,6 @@
 pub(crate) mod solaris;
 pub(crate) mod solid;
 pub(crate) mod teeos;
-pub(crate) mod thumb;
 pub(crate) mod uefi_msvc;
 pub(crate) mod unikraft_linux_musl;
 pub(crate) mod vxworks;
diff --git a/compiler/rustc_target/src/spec/base/thumb.rs b/compiler/rustc_target/src/spec/base/thumb.rs
deleted file mode 100644
index 03ec679..0000000
--- a/compiler/rustc_target/src/spec/base/thumb.rs
+++ /dev/null
@@ -1,59 +0,0 @@
-// These `thumbv*` targets cover the ARM Cortex-M family of processors which are widely used in
-// microcontrollers. Namely, all these processors:
-//
-// - Cortex-M0
-// - Cortex-M0+
-// - Cortex-M1
-// - Cortex-M3
-// - Cortex-M4(F)
-// - Cortex-M7(F)
-// - Cortex-M23
-// - Cortex-M33
-//
-// We have opted for these instead of one target per processor (e.g., `cortex-m0`, `cortex-m3`,
-// etc) because the differences between some processors like the cortex-m0 and cortex-m1 are almost
-// nonexistent from the POV of codegen so it doesn't make sense to have separate targets for them.
-// And if differences exist between two processors under the same target, rustc flags can be used to
-// optimize for one processor or the other.
-//
-// Also, we have not chosen a single target (`arm-none-eabi`) like GCC does because this makes
-// difficult to integrate Rust code and C code. Targeting the Cortex-M4 requires different gcc flags
-// than the ones you would use for the Cortex-M0 and with a single target it'd be impossible to
-// differentiate one processor from the other.
-//
-// About arm vs thumb in the name. The Cortex-M devices only support the Thumb instruction set,
-// which is more compact (higher code density), and not the ARM instruction set. That's why LLVM
-// triples use thumb instead of arm. We follow suit because having thumb in the name let us
-// differentiate these targets from our other `arm(v7)-*-*-gnueabi(hf)` targets in the context of
-// build scripts / gcc flags.
-
-use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, PanicStrategy, RelocModel, TargetOptions};
-
-pub(crate) fn opts() -> TargetOptions {
-    // See rust-lang/rfcs#1645 for a discussion about these defaults
-    TargetOptions {
-        linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes),
-        // In most cases, LLD is good enough
-        linker: Some("rust-lld".into()),
-        // Because these devices have very little resources having an unwinder is too onerous so we
-        // default to "abort" because the "unwind" strategy is very rare.
-        panic_strategy: PanicStrategy::Abort,
-        // Similarly, one almost always never wants to use relocatable code because of the extra
-        // costs it involves.
-        relocation_model: RelocModel::Static,
-        // When this section is added a volatile load to its start address is also generated. This
-        // volatile load is a footgun as it can end up loading an invalid memory address, depending
-        // on how the user set up their linker scripts. This section adds pretty printer for stuff
-        // like std::Vec, which is not that used in no-std context, so it's best to left it out
-        // until we figure a way to add the pretty printers without requiring a volatile load cf.
-        // rust-lang/rust#44993.
-        emit_debug_gdb_scripts: false,
-        // LLVM is eager to trash the link register when calling `noreturn` functions, which
-        // breaks debugging. Preserve LR by default to prevent that from happening.
-        frame_pointer: FramePointer::Always,
-        // ARM supports multiple ABIs for enums, the linux one matches the default of 32 here
-        // but any arm-none or thumb-none target will be defaulted to 8 on GCC.
-        c_enum_min_bits: Some(8),
-        ..Default::default()
-    }
-}
diff --git a/compiler/rustc_target/src/spec/targets/armv4t_none_eabi.rs b/compiler/rustc_target/src/spec/targets/armv4t_none_eabi.rs
index 129b639..fc66a2f 100644
--- a/compiler/rustc_target/src/spec/targets/armv4t_none_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/armv4t_none_eabi.rs
@@ -9,10 +9,7 @@
 //! The default link script is very likely wrong, so you should use
 //! `-Clink-arg=-Tmy_script.ld` to override that with a correct linker script.
 
-use crate::spec::{
-    Abi, Arch, Cc, FloatAbi, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetMetadata,
-    TargetOptions, cvs,
-};
+use crate::spec::{Abi, Arch, FloatAbi, Target, TargetMetadata, TargetOptions, base, cvs};
 
 pub(crate) fn target() -> Target {
     Target {
@@ -25,35 +22,16 @@ pub(crate) fn target() -> Target {
         },
         pointer_width: 32,
         arch: Arch::Arm,
-        /* Data layout args are '-' separated:
-         * little endian
-         * stack is 64-bit aligned (EABI)
-         * pointers are 32-bit
-         * i64 must be 64-bit aligned (EABI)
-         * mangle names with ELF style
-         * native integers are 32-bit
-         * All other elements are default
-         */
         data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(),
         options: TargetOptions {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
-            linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes),
-            linker: Some("rust-lld".into()),
             asm_args: cvs!["-mthumb-interwork", "-march=armv4t", "-mlittle-endian",],
-            // Force-enable 32-bit atomics, which allows the use of atomic load/store only.
-            // The resulting atomics are ABI incompatible with atomics backed by libatomic.
-            features: "+soft-float,+strict-align,+atomics-32".into(),
-            main_needs_argc_argv: false,
+            features: "+soft-float,+strict-align".into(),
             atomic_cas: false,
+            max_atomic_width: Some(0),
             has_thumb_interworking: true,
-            relocation_model: RelocModel::Static,
-            panic_strategy: PanicStrategy::Abort,
-            // From thumb_base, rust-lang/rust#44993.
-            emit_debug_gdb_scripts: false,
-            // From thumb_base, GCC gives enums a minimum of 8 bits on no-os targets.
-            c_enum_min_bits: Some(8),
-            ..Default::default()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/armv5te_none_eabi.rs b/compiler/rustc_target/src/spec/targets/armv5te_none_eabi.rs
index 7cd571b..8089e9a 100644
--- a/compiler/rustc_target/src/spec/targets/armv5te_none_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/armv5te_none_eabi.rs
@@ -1,8 +1,6 @@
 //! Targets the ARMv5TE, with code as `a32` code by default.
 
-use crate::spec::{
-    Abi, Arch, FloatAbi, FramePointer, Target, TargetMetadata, TargetOptions, base, cvs,
-};
+use crate::spec::{Abi, Arch, FloatAbi, Target, TargetMetadata, TargetOptions, base, cvs};
 
 pub(crate) fn target() -> Target {
     Target {
@@ -15,36 +13,16 @@ pub(crate) fn target() -> Target {
         },
         pointer_width: 32,
         arch: Arch::Arm,
-        /* Data layout args are '-' separated:
-         * little endian
-         * stack is 64-bit aligned (EABI)
-         * pointers are 32-bit
-         * i64 must be 64-bit aligned (EABI)
-         * mangle names with ELF style
-         * native integers are 32-bit
-         * All other elements are default
-         */
         data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(),
-
         options: TargetOptions {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
-            // extra args passed to the external assembler (assuming `arm-none-eabi-as`):
-            // * activate t32/a32 interworking
-            // * use arch ARMv5TE
-            // * use little-endian
             asm_args: cvs!["-mthumb-interwork", "-march=armv5te", "-mlittle-endian",],
-            // minimum extra features, these cannot be disabled via -C
-            // Also force-enable 32-bit atomics, which allows the use of atomic load/store only.
-            // The resulting atomics are ABI incompatible with atomics backed by libatomic.
-            features: "+soft-float,+strict-align,+atomics-32".into(),
-            frame_pointer: FramePointer::MayOmit,
-            main_needs_argc_argv: false,
-            // don't have atomic compare-and-swap
+            features: "+soft-float,+strict-align".into(),
             atomic_cas: false,
+            max_atomic_width: Some(0),
             has_thumb_interworking: true,
-
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv4t_none_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv4t_none_eabi.rs
index 0498c55..50eccbe 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv4t_none_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv4t_none_eabi.rs
@@ -9,10 +9,7 @@
 //! The default link script is very likely wrong, so you should use
 //! `-Clink-arg=-Tmy_script.ld` to override that with a correct linker script.
 
-use crate::spec::{
-    Abi, Arch, FloatAbi, FramePointer, PanicStrategy, RelocModel, Target, TargetMetadata,
-    TargetOptions, base, cvs,
-};
+use crate::spec::{Abi, Arch, FloatAbi, Target, TargetMetadata, TargetOptions, base, cvs};
 
 pub(crate) fn target() -> Target {
     Target {
@@ -25,44 +22,16 @@ pub(crate) fn target() -> Target {
         },
         pointer_width: 32,
         arch: Arch::Arm,
-        /* Data layout args are '-' separated:
-         * little endian
-         * stack is 64-bit aligned (EABI)
-         * pointers are 32-bit
-         * i64 must be 64-bit aligned (EABI)
-         * mangle names with ELF style
-         * native integers are 32-bit
-         * All other elements are default
-         */
         data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(),
         options: TargetOptions {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
-
-            // extra args passed to the external assembler (assuming `arm-none-eabi-as`):
-            // * activate t32/a32 interworking
-            // * use arch ARMv4T
-            // * use little-endian
             asm_args: cvs!["-mthumb-interwork", "-march=armv4t", "-mlittle-endian",],
-
-            // minimum extra features, these cannot be disabled via -C
-            // Also force-enable 32-bit atomics, which allows the use of atomic load/store only.
-            // The resulting atomics are ABI incompatible with atomics backed by libatomic.
-            features: "+soft-float,+strict-align,+atomics-32".into(),
-
-            panic_strategy: PanicStrategy::Abort,
-            relocation_model: RelocModel::Static,
-            // suggested from thumb_base, rust-lang/rust#44993.
-            emit_debug_gdb_scripts: false,
-            frame_pointer: FramePointer::MayOmit,
-
-            main_needs_argc_argv: false,
-
-            // don't have atomic compare-and-swap
+            features: "+soft-float,+strict-align".into(),
             atomic_cas: false,
+            max_atomic_width: Some(0),
             has_thumb_interworking: true,
-
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv5te_none_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv5te_none_eabi.rs
index a07e912..6acb03e 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv5te_none_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv5te_none_eabi.rs
@@ -1,8 +1,6 @@
 //! Targets the ARMv5TE, with code as `t32` code by default.
 
-use crate::spec::{
-    Abi, Arch, FloatAbi, FramePointer, Target, TargetMetadata, TargetOptions, base, cvs,
-};
+use crate::spec::{Abi, Arch, FloatAbi, Target, TargetMetadata, TargetOptions, base, cvs};
 
 pub(crate) fn target() -> Target {
     Target {
@@ -15,36 +13,16 @@ pub(crate) fn target() -> Target {
         },
         pointer_width: 32,
         arch: Arch::Arm,
-        /* Data layout args are '-' separated:
-         * little endian
-         * stack is 64-bit aligned (EABI)
-         * pointers are 32-bit
-         * i64 must be 64-bit aligned (EABI)
-         * mangle names with ELF style
-         * native integers are 32-bit
-         * All other elements are default
-         */
         data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(),
-
         options: TargetOptions {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
-            // extra args passed to the external assembler (assuming `arm-none-eabi-as`):
-            // * activate t32/a32 interworking
-            // * use arch ARMv5TE
-            // * use little-endian
             asm_args: cvs!["-mthumb-interwork", "-march=armv5te", "-mlittle-endian",],
-            // minimum extra features, these cannot be disabled via -C
-            // Also force-enable 32-bit atomics, which allows the use of atomic load/store only.
-            // The resulting atomics are ABI incompatible with atomics backed by libatomic.
-            features: "+soft-float,+strict-align,+atomics-32".into(),
-            frame_pointer: FramePointer::MayOmit,
-            main_needs_argc_argv: false,
-            // don't have atomic compare-and-swap
+            features: "+soft-float,+strict-align".into(),
             atomic_cas: false,
+            max_atomic_width: Some(0),
             has_thumb_interworking: true,
-
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv6m_none_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv6m_none_eabi.rs
index 836b2ff..cc81cb9 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv6m_none_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv6m_none_eabi.rs
@@ -26,7 +26,7 @@ pub(crate) fn target() -> Target {
             // There are no atomic CAS instructions available in the instruction set of the ARMv6-M
             // architecture
             atomic_cas: false,
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv6m_nuttx_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv6m_nuttx_eabi.rs
index fa0154d..8b58d2e 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv6m_nuttx_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv6m_nuttx_eabi.rs
@@ -25,7 +25,7 @@ pub(crate) fn target() -> Target {
             // The ARMv6-M doesn't support hardware atomic operations, use atomic builtins instead.
             features: "+strict-align".into(),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv7a_nuttx_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv7a_nuttx_eabi.rs
index 7c1adc9..17d25a6 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv7a_nuttx_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv7a_nuttx_eabi.rs
@@ -27,7 +27,7 @@ pub(crate) fn target() -> Target {
             // Cortex-A7/A8/A9 with software floating point
             features: "+soft-float,-neon".into(),
             max_atomic_width: Some(64),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv7a_nuttx_eabihf.rs b/compiler/rustc_target/src/spec/targets/thumbv7a_nuttx_eabihf.rs
index 0e6d5b1..a4e1700 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv7a_nuttx_eabihf.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv7a_nuttx_eabihf.rs
@@ -31,7 +31,7 @@ pub(crate) fn target() -> Target {
             // and NEON SIMD instructions
             features: "+vfp3,+neon".into(),
             max_atomic_width: Some(64),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabi.rs
index 9e0f09b..6e6975c 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabi.rs
@@ -28,7 +28,7 @@ pub(crate) fn target() -> Target {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabihf.rs b/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabihf.rs
index acc31cc..6a7fe14 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabihf.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabihf.rs
@@ -36,7 +36,7 @@ pub(crate) fn target() -> Target {
             // ARMv7-M Architecture Reference Manual - A2.5 The optional floating-point extension
             features: "+vfp4d16sp".into(),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv7em_nuttx_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv7em_nuttx_eabi.rs
index 796206d..1017327 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv7em_nuttx_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv7em_nuttx_eabi.rs
@@ -30,7 +30,7 @@ pub(crate) fn target() -> Target {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv7em_nuttx_eabihf.rs b/compiler/rustc_target/src/spec/targets/thumbv7em_nuttx_eabihf.rs
index f85aef1..1f24155 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv7em_nuttx_eabihf.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv7em_nuttx_eabihf.rs
@@ -38,7 +38,7 @@ pub(crate) fn target() -> Target {
             // ARMv7-M Architecture Reference Manual - A2.5 The optional floating-point extension
             features: "+vfp4d16sp".into(),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv7m_none_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv7m_none_eabi.rs
index 8c5807b..8851f7b 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv7m_none_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv7m_none_eabi.rs
@@ -19,7 +19,7 @@ pub(crate) fn target() -> Target {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv7m_nuttx_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv7m_nuttx_eabi.rs
index 3d3d487..9f0261c 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv7m_nuttx_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv7m_nuttx_eabi.rs
@@ -21,7 +21,7 @@ pub(crate) fn target() -> Target {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv8m_base_none_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv8m_base_none_eabi.rs
index 298bad5..7426eb9 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv8m_base_none_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv8m_base_none_eabi.rs
@@ -22,7 +22,7 @@ pub(crate) fn target() -> Target {
             // with +strict-align.
             features: "+strict-align".into(),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv8m_base_nuttx_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv8m_base_nuttx_eabi.rs
index 18bafc7..a74719b 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv8m_base_nuttx_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv8m_base_nuttx_eabi.rs
@@ -24,7 +24,7 @@ pub(crate) fn target() -> Target {
             // with +strict-align.
             features: "+strict-align".into(),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabi.rs
index 90d7df7..540d4bd 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabi.rs
@@ -20,7 +20,7 @@ pub(crate) fn target() -> Target {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabihf.rs b/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabihf.rs
index debdb47..2287cce 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabihf.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabihf.rs
@@ -25,7 +25,7 @@ pub(crate) fn target() -> Target {
             // and 16 D registers.
             features: "+fp-armv8d16sp".into(),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv8m_main_nuttx_eabi.rs b/compiler/rustc_target/src/spec/targets/thumbv8m_main_nuttx_eabi.rs
index f5039f1..ec10729 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv8m_main_nuttx_eabi.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv8m_main_nuttx_eabi.rs
@@ -22,7 +22,7 @@ pub(crate) fn target() -> Target {
             abi: Abi::Eabi,
             llvm_floatabi: Some(FloatAbi::Soft),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_target/src/spec/targets/thumbv8m_main_nuttx_eabihf.rs b/compiler/rustc_target/src/spec/targets/thumbv8m_main_nuttx_eabihf.rs
index 77d23a2..9ff924b 100644
--- a/compiler/rustc_target/src/spec/targets/thumbv8m_main_nuttx_eabihf.rs
+++ b/compiler/rustc_target/src/spec/targets/thumbv8m_main_nuttx_eabihf.rs
@@ -27,7 +27,7 @@ pub(crate) fn target() -> Target {
             // and 16 D registers.
             features: "+fp-armv8d16sp".into(),
             max_atomic_width: Some(32),
-            ..base::thumb::opts()
+            ..base::arm_none::opts()
         },
     }
 }
diff --git a/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs b/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs
index 8482c8a..eef8b87 100644
--- a/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs
+++ b/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs
@@ -177,7 +177,11 @@ fn find_best_leaf_obligation<'tcx>(
                 )
                 .break_value()
                 .ok_or(())
+                // walk around the fact that the cause in `Obligation` is ignored by folders so that
+                // we can properly fudge the infer vars in cause code.
+                .map(|o| (o.cause.clone(), o))
         })
+        .map(|(cause, o)| PredicateObligation { cause, ..o })
         .unwrap_or(obligation);
     deeply_normalize_for_diagnostics(infcx, obligation.param_env, obligation)
 }
diff --git a/compiler/rustc_ty_utils/src/layout/invariant.rs b/compiler/rustc_ty_utils/src/layout/invariant.rs
index d1484ae..01435f7 100644
--- a/compiler/rustc_ty_utils/src/layout/invariant.rs
+++ b/compiler/rustc_ty_utils/src/layout/invariant.rs
@@ -281,10 +281,16 @@ fn check_layout_abi<'tcx>(cx: &LayoutCx<'tcx>, layout: &TyAndLayout<'tcx>) {
                     }
 
                     // Ensure that for niche encoded tags the discriminant coincides with the variant index.
-                    assert_eq!(
-                        layout.ty.discriminant_for_variant(tcx, idx).unwrap().val,
-                        u128::from(idx.as_u32()),
-                    );
+                    let val = layout.ty.discriminant_for_variant(tcx, idx).unwrap().val;
+                    if val != u128::from(idx.as_u32()) {
+                        let adt_def = layout.ty.ty_adt_def().unwrap();
+                        cx.tcx().dcx().span_delayed_bug(
+                            cx.tcx().def_span(adt_def.did()),
+                            format!(
+                                "variant {idx:?} has discriminant {val:?} in niche-encoded type"
+                            ),
+                        );
+                    }
                 }
             }
             for variant in variants.iter() {
diff --git a/library/alloc/src/collections/btree/append.rs b/library/alloc/src/collections/btree/append.rs
index 091376d..66ea22e 100644
--- a/library/alloc/src/collections/btree/append.rs
+++ b/library/alloc/src/collections/btree/append.rs
@@ -104,9 +104,14 @@ impl<K: Ord, V, I> Iterator for MergeIter<K, V, I>
 {
     type Item = (K, V);
 
-    /// If two keys are equal, returns the key-value pair from the right source.
+    /// If two keys are equal, returns the key from the left and the value from the right.
     fn next(&mut self) -> Option<(K, V)> {
         let (a_next, b_next) = self.0.nexts(|a: &(K, V), b: &(K, V)| K::cmp(&a.0, &b.0));
-        b_next.or(a_next)
+        match (a_next, b_next) {
+            (Some((a_k, _)), Some((_, b_v))) => Some((a_k, b_v)),
+            (Some(a), None) => Some(a),
+            (None, Some(b)) => Some(b),
+            (None, None) => None,
+        }
     }
 }
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
index 71ed8ca..79f355a 100644
--- a/library/alloc/src/collections/btree/map.rs
+++ b/library/alloc/src/collections/btree/map.rs
@@ -1181,6 +1181,10 @@ pub fn retain<F>(&mut self, mut f: F)
     ///
     /// If a key from `other` is already present in `self`, the respective
     /// value from `self` will be overwritten with the respective value from `other`.
+    /// Similar to [`insert`], though, the key is not overwritten,
+    /// which matters for types that can be `==` without being identical.
+    ///
+    /// [`insert`]: BTreeMap::insert
     ///
     /// # Examples
     ///
diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs
index 79879d3..a61a2da 100644
--- a/library/alloc/src/collections/btree/map/tests.rs
+++ b/library/alloc/src/collections/btree/map/tests.rs
@@ -11,7 +11,7 @@
 use crate::rc::Rc;
 use crate::string::{String, ToString};
 use crate::testing::crash_test::{CrashTestDummy, Panic};
-use crate::testing::ord_chaos::{Cyclic3, Governed, Governor};
+use crate::testing::ord_chaos::{Cyclic3, Governed, Governor, IdBased};
 use crate::testing::rng::DeterministicRng;
 
 // Minimum number of elements to insert, to guarantee a tree with 2 levels,
@@ -2137,9 +2137,9 @@ fn test_append_drop_leak() {
     let mut left = BTreeMap::new();
     let mut right = BTreeMap::new();
     left.insert(a.spawn(Panic::Never), ());
-    left.insert(b.spawn(Panic::InDrop), ()); // first duplicate key, dropped during append
+    left.insert(b.spawn(Panic::Never), ());
     left.insert(c.spawn(Panic::Never), ());
-    right.insert(b.spawn(Panic::Never), ());
+    right.insert(b.spawn(Panic::InDrop), ()); // first duplicate key, dropped during append
     right.insert(c.spawn(Panic::Never), ());
 
     catch_unwind(move || left.append(&mut right)).unwrap_err();
@@ -2587,3 +2587,31 @@ fn cursor_peek_prev_agrees_with_cursor_mut() {
     let prev = cursor.peek_prev();
     assert_matches!(prev, Some((&3, _)));
 }
+
+#[test]
+fn test_id_based_insert() {
+    let mut lhs = BTreeMap::new();
+    let mut rhs = BTreeMap::new();
+
+    lhs.insert(IdBased { id: 0, name: "lhs_k".to_string() }, "lhs_v".to_string());
+    rhs.insert(IdBased { id: 0, name: "rhs_k".to_string() }, "rhs_v".to_string());
+
+    for (k, v) in rhs.into_iter() {
+        lhs.insert(k, v);
+    }
+
+    assert_eq!(lhs.pop_first().unwrap().0.name, "lhs_k".to_string());
+}
+
+#[test]
+fn test_id_based_append() {
+    let mut lhs = BTreeMap::new();
+    let mut rhs = BTreeMap::new();
+
+    lhs.insert(IdBased { id: 0, name: "lhs_k".to_string() }, "lhs_v".to_string());
+    rhs.insert(IdBased { id: 0, name: "rhs_k".to_string() }, "rhs_v".to_string());
+
+    lhs.append(&mut rhs);
+
+    assert_eq!(lhs.pop_first().unwrap().0.name, "lhs_k".to_string());
+}
diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
index 3f391fe..bf73deb 100644
--- a/library/alloc/src/lib.rs
+++ b/library/alloc/src/lib.rs
@@ -127,7 +127,6 @@
 #![feature(layout_for_ptr)]
 #![feature(legacy_receiver_trait)]
 #![feature(local_waker)]
-#![feature(maybe_uninit_slice)]
 #![feature(maybe_uninit_uninit_array_transpose)]
 #![feature(panic_internals)]
 #![feature(pattern)]
diff --git a/library/alloctests/lib.rs b/library/alloctests/lib.rs
index 73c2567..f6c7105 100644
--- a/library/alloctests/lib.rs
+++ b/library/alloctests/lib.rs
@@ -28,7 +28,6 @@
 #![feature(inplace_iteration)]
 #![feature(iter_advance_by)]
 #![feature(iter_next_chunk)]
-#![feature(maybe_uninit_slice)]
 #![feature(maybe_uninit_uninit_array_transpose)]
 #![feature(ptr_alignment_type)]
 #![feature(ptr_internals)]
diff --git a/library/alloctests/testing/ord_chaos.rs b/library/alloctests/testing/ord_chaos.rs
index 55e1ae5..f90ba1c 100644
--- a/library/alloctests/testing/ord_chaos.rs
+++ b/library/alloctests/testing/ord_chaos.rs
@@ -2,6 +2,8 @@
 use std::cmp::Ordering::{self, *};
 use std::ptr;
 
+use crate::string::String;
+
 // Minimal type with an `Ord` implementation violating transitivity.
 #[derive(Debug)]
 pub(crate) enum Cyclic3 {
@@ -79,3 +81,31 @@ fn eq(&self, other: &Self) -> bool {
 }
 
 impl<T: Eq> Eq for Governed<'_, T> {}
+
+// Comparison based only on the ID, the name is ignored.
+#[derive(Debug)]
+pub(crate) struct IdBased {
+    pub id: u32,
+    #[allow(dead_code)]
+    pub name: String,
+}
+
+impl PartialEq for IdBased {
+    fn eq(&self, other: &Self) -> bool {
+        self.id == other.id
+    }
+}
+
+impl Eq for IdBased {}
+
+impl PartialOrd for IdBased {
+    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+impl Ord for IdBased {
+    fn cmp(&self, other: &Self) -> Ordering {
+        self.id.cmp(&other.id)
+    }
+}
diff --git a/library/core/src/clone/uninit.rs b/library/core/src/clone/uninit.rs
index 8d11850..b6e351f 100644
--- a/library/core/src/clone/uninit.rs
+++ b/library/core/src/clone/uninit.rs
@@ -114,16 +114,10 @@ fn push(&mut self, value: T) {
 impl<'a, T> Drop for InitializingSlice<'a, T> {
     #[cold] // will only be invoked on unwind
     fn drop(&mut self) {
-        let initialized_slice = ptr::slice_from_raw_parts_mut(
-            MaybeUninit::slice_as_mut_ptr(self.data),
-            self.initialized_len,
-        );
         // SAFETY:
         // * the pointer is valid because it was made from a mutable reference
         // * `initialized_len` counts the initialized elements as an invariant of this type,
         //   so each of the pointed-to elements is initialized and may be dropped.
-        unsafe {
-            ptr::drop_in_place::<[T]>(initialized_slice);
-        }
+        unsafe { self.data[..self.initialized_len].assume_init_drop() };
     }
 }
diff --git a/library/core/src/mem/maybe_uninit.rs b/library/core/src/mem/maybe_uninit.rs
index aee28c4..e00cf45 100644
--- a/library/core/src/mem/maybe_uninit.rs
+++ b/library/core/src/mem/maybe_uninit.rs
@@ -1047,7 +1047,7 @@ pub const fn as_mut_ptr(&mut self) -> *mut T {
     /// # Examples
     ///
     /// ```
-    /// #![feature(maybe_uninit_as_bytes, maybe_uninit_slice)]
+    /// #![feature(maybe_uninit_as_bytes)]
     /// use std::mem::MaybeUninit;
     ///
     /// let val = 0x12345678_i32;
@@ -1097,20 +1097,6 @@ pub const fn as_bytes_mut(&mut self) -> &mut [MaybeUninit<u8>] {
             )
         }
     }
-
-    /// Gets a pointer to the first element of the array.
-    #[unstable(feature = "maybe_uninit_slice", issue = "63569")]
-    #[inline(always)]
-    pub const fn slice_as_ptr(this: &[MaybeUninit<T>]) -> *const T {
-        this.as_ptr() as *const T
-    }
-
-    /// Gets a mutable pointer to the first element of the array.
-    #[unstable(feature = "maybe_uninit_slice", issue = "63569")]
-    #[inline(always)]
-    pub const fn slice_as_mut_ptr(this: &mut [MaybeUninit<T>]) -> *mut T {
-        this.as_mut_ptr() as *mut T
-    }
 }
 
 impl<T> [MaybeUninit<T>] {
@@ -1410,7 +1396,7 @@ pub fn write_iter<I>(&mut self, it: I) -> (&mut [T], &mut [MaybeUninit<T>])
     /// # Examples
     ///
     /// ```
-    /// #![feature(maybe_uninit_as_bytes, maybe_uninit_slice)]
+    /// #![feature(maybe_uninit_as_bytes)]
     /// use std::mem::MaybeUninit;
     ///
     /// let uninit = [MaybeUninit::new(0x1234u16), MaybeUninit::new(0x5678u16)];
@@ -1437,7 +1423,7 @@ pub const fn as_bytes(&self) -> &[MaybeUninit<u8>] {
     /// # Examples
     ///
     /// ```
-    /// #![feature(maybe_uninit_as_bytes, maybe_uninit_slice)]
+    /// #![feature(maybe_uninit_as_bytes)]
     /// use std::mem::MaybeUninit;
     ///
     /// let mut uninit = [MaybeUninit::<u16>::uninit(), MaybeUninit::<u16>::uninit()];
@@ -1477,7 +1463,7 @@ pub const fn as_bytes_mut(&mut self) -> &mut [MaybeUninit<u8>] {
     /// requirement the compiler knows about it is that the data pointer must be
     /// non-null. Dropping such a `Vec<T>` however will cause undefined
     /// behaviour.
-    #[unstable(feature = "maybe_uninit_slice", issue = "63569")]
+    #[stable(feature = "maybe_uninit_slice", since = "CURRENT_RUSTC_VERSION")]
     #[inline(always)]
     #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")]
     pub const unsafe fn assume_init_drop(&mut self)
@@ -1499,7 +1485,8 @@ pub const fn as_bytes_mut(&mut self) -> &mut [MaybeUninit<u8>] {
     /// Calling this when the content is not yet fully initialized causes undefined
     /// behavior: it is up to the caller to guarantee that every `MaybeUninit<T>` in
     /// the slice really is in an initialized state.
-    #[unstable(feature = "maybe_uninit_slice", issue = "63569")]
+    #[stable(feature = "maybe_uninit_slice", since = "CURRENT_RUSTC_VERSION")]
+    #[rustc_const_stable(feature = "maybe_uninit_slice", since = "CURRENT_RUSTC_VERSION")]
     #[inline(always)]
     pub const unsafe fn assume_init_ref(&self) -> &[T] {
         // SAFETY: casting `slice` to a `*const [T]` is safe since the caller guarantees that
@@ -1517,7 +1504,8 @@ pub const fn as_bytes_mut(&mut self) -> &mut [MaybeUninit<u8>] {
     /// behavior: it is up to the caller to guarantee that every `MaybeUninit<T>` in the
     /// slice really is in an initialized state. For instance, `.assume_init_mut()` cannot
     /// be used to initialize a `MaybeUninit` slice.
-    #[unstable(feature = "maybe_uninit_slice", issue = "63569")]
+    #[stable(feature = "maybe_uninit_slice", since = "CURRENT_RUSTC_VERSION")]
+    #[rustc_const_stable(feature = "maybe_uninit_slice", since = "CURRENT_RUSTC_VERSION")]
     #[inline(always)]
     pub const unsafe fn assume_init_mut(&mut self) -> &mut [T] {
         // SAFETY: similar to safety notes for `slice_get_ref`, but we have a
diff --git a/library/core/src/num/f128.rs b/library/core/src/num/f128.rs
index e6e258d..9b9cc80 100644
--- a/library/core/src/num/f128.rs
+++ b/library/core/src/num/f128.rs
@@ -1291,6 +1291,38 @@ pub const fn clamp(mut self, min: f128, max: f128) -> f128 {
         self
     }
 
+    /// Clamps this number to a symmetric range centered around zero.
+    ///
+    /// The method clamps the number's magnitude (absolute value) to be at most `limit`.
+    ///
+    /// This is functionally equivalent to `self.clamp(-limit, limit)`, but is more
+    /// explicit about the intent.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `limit` is negative or NaN, as this indicates a logic error.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(f128)]
+    /// #![feature(clamp_magnitude)]
+    /// # #[cfg(all(target_arch = "x86_64", target_os = "linux"))] {
+    /// assert_eq!(5.0f128.clamp_magnitude(3.0), 3.0);
+    /// assert_eq!((-5.0f128).clamp_magnitude(3.0), -3.0);
+    /// assert_eq!(2.0f128.clamp_magnitude(3.0), 2.0);
+    /// assert_eq!((-2.0f128).clamp_magnitude(3.0), -2.0);
+    /// # }
+    /// ```
+    #[inline]
+    #[unstable(feature = "clamp_magnitude", issue = "148519")]
+    #[must_use = "this returns the clamped value and does not modify the original"]
+    pub fn clamp_magnitude(self, limit: f128) -> f128 {
+        assert!(limit >= 0.0, "limit must be non-negative");
+        let limit = limit.abs(); // Canonicalises -0.0 to 0.0
+        self.clamp(-limit, limit)
+    }
+
     /// Computes the absolute value of `self`.
     ///
     /// This function always returns the precise result.
diff --git a/library/core/src/num/f16.rs b/library/core/src/num/f16.rs
index 4739b79..ab765eb 100644
--- a/library/core/src/num/f16.rs
+++ b/library/core/src/num/f16.rs
@@ -1269,6 +1269,38 @@ pub const fn clamp(mut self, min: f16, max: f16) -> f16 {
         self
     }
 
+    /// Clamps this number to a symmetric range centered around zero.
+    ///
+    /// The method clamps the number's magnitude (absolute value) to be at most `limit`.
+    ///
+    /// This is functionally equivalent to `self.clamp(-limit, limit)`, but is more
+    /// explicit about the intent.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `limit` is negative or NaN, as this indicates a logic error.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(f16)]
+    /// #![feature(clamp_magnitude)]
+    /// # #[cfg(all(target_arch = "x86_64", target_os = "linux"))] {
+    /// assert_eq!(5.0f16.clamp_magnitude(3.0), 3.0);
+    /// assert_eq!((-5.0f16).clamp_magnitude(3.0), -3.0);
+    /// assert_eq!(2.0f16.clamp_magnitude(3.0), 2.0);
+    /// assert_eq!((-2.0f16).clamp_magnitude(3.0), -2.0);
+    /// # }
+    /// ```
+    #[inline]
+    #[unstable(feature = "clamp_magnitude", issue = "148519")]
+    #[must_use = "this returns the clamped value and does not modify the original"]
+    pub fn clamp_magnitude(self, limit: f16) -> f16 {
+        assert!(limit >= 0.0, "limit must be non-negative");
+        let limit = limit.abs(); // Canonicalises -0.0 to 0.0
+        self.clamp(-limit, limit)
+    }
+
     /// Computes the absolute value of `self`.
     ///
     /// This function always returns the precise result.
diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs
index 3cbff38..361f273 100644
--- a/library/core/src/num/f32.rs
+++ b/library/core/src/num/f32.rs
@@ -1446,6 +1446,35 @@ pub const fn clamp(mut self, min: f32, max: f32) -> f32 {
         self
     }
 
+    /// Clamps this number to a symmetric range centered around zero.
+    ///
+    /// The method clamps the number's magnitude (absolute value) to be at most `limit`.
+    ///
+    /// This is functionally equivalent to `self.clamp(-limit, limit)`, but is more
+    /// explicit about the intent.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `limit` is negative or NaN, as this indicates a logic error.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(clamp_magnitude)]
+    /// assert_eq!(5.0f32.clamp_magnitude(3.0), 3.0);
+    /// assert_eq!((-5.0f32).clamp_magnitude(3.0), -3.0);
+    /// assert_eq!(2.0f32.clamp_magnitude(3.0), 2.0);
+    /// assert_eq!((-2.0f32).clamp_magnitude(3.0), -2.0);
+    /// ```
+    #[must_use = "this returns the clamped value and does not modify the original"]
+    #[unstable(feature = "clamp_magnitude", issue = "148519")]
+    #[inline]
+    pub fn clamp_magnitude(self, limit: f32) -> f32 {
+        assert!(limit >= 0.0, "limit must be non-negative");
+        let limit = limit.abs(); // Canonicalises -0.0 to 0.0
+        self.clamp(-limit, limit)
+    }
+
     /// Computes the absolute value of `self`.
     ///
     /// This function always returns the precise result.
diff --git a/library/core/src/num/f64.rs b/library/core/src/num/f64.rs
index 60ceff0..17a9086 100644
--- a/library/core/src/num/f64.rs
+++ b/library/core/src/num/f64.rs
@@ -1444,6 +1444,35 @@ pub const fn clamp(mut self, min: f64, max: f64) -> f64 {
         self
     }
 
+    /// Clamps this number to a symmetric range centered around zero.
+    ///
+    /// The method clamps the number's magnitude (absolute value) to be at most `limit`.
+    ///
+    /// This is functionally equivalent to `self.clamp(-limit, limit)`, but is more
+    /// explicit about the intent.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `limit` is negative or NaN, as this indicates a logic error.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(clamp_magnitude)]
+    /// assert_eq!(5.0f64.clamp_magnitude(3.0), 3.0);
+    /// assert_eq!((-5.0f64).clamp_magnitude(3.0), -3.0);
+    /// assert_eq!(2.0f64.clamp_magnitude(3.0), 2.0);
+    /// assert_eq!((-2.0f64).clamp_magnitude(3.0), -2.0);
+    /// ```
+    #[must_use = "this returns the clamped value and does not modify the original"]
+    #[unstable(feature = "clamp_magnitude", issue = "148519")]
+    #[inline]
+    pub fn clamp_magnitude(self, limit: f64) -> f64 {
+        assert!(limit >= 0.0, "limit must be non-negative");
+        let limit = limit.abs(); // Canonicalises -0.0 to 0.0
+        self.clamp(-limit, limit)
+    }
+
     /// Computes the absolute value of `self`.
     ///
     /// This function always returns the precise result.
diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs
index 62f83a3..9966276 100644
--- a/library/core/src/num/int_macros.rs
+++ b/library/core/src/num/int_macros.rs
@@ -3878,5 +3878,32 @@ pub const fn min_value() -> Self {
         pub const fn max_value() -> Self {
             Self::MAX
         }
+
+        /// Clamps this number to a symmetric range centred around zero.
+        ///
+        /// The method clamps the number's magnitude (absolute value) to be at most `limit`.
+        ///
+        /// This is functionally equivalent to `self.clamp(-limit, limit)`, but is more
+        /// explicit about the intent.
+        ///
+        /// # Examples
+        ///
+        /// ```
+        /// #![feature(clamp_magnitude)]
+        #[doc = concat!("assert_eq!(120", stringify!($SelfT), ".clamp_magnitude(100), 100);")]
+        #[doc = concat!("assert_eq!(-120", stringify!($SelfT), ".clamp_magnitude(100), -100);")]
+        #[doc = concat!("assert_eq!(80", stringify!($SelfT), ".clamp_magnitude(100), 80);")]
+        #[doc = concat!("assert_eq!(-80", stringify!($SelfT), ".clamp_magnitude(100), -80);")]
+        /// ```
+        #[must_use = "this returns the clamped value and does not modify the original"]
+        #[unstable(feature = "clamp_magnitude", issue = "148519")]
+        #[inline]
+        pub fn clamp_magnitude(self, limit: $UnsignedT) -> Self {
+            if let Ok(limit) = core::convert::TryInto::<$SelfT>::try_into(limit) {
+                self.clamp(-limit, limit)
+            } else {
+                self
+            }
+        }
     }
 }
diff --git a/library/core/src/slice/sort/stable/merge.rs b/library/core/src/slice/sort/stable/merge.rs
index bb2747b..26d8480 100644
--- a/library/core/src/slice/sort/stable/merge.rs
+++ b/library/core/src/slice/sort/stable/merge.rs
@@ -35,7 +35,7 @@ pub fn merge<T, F: FnMut(&T, &T) -> bool>(
         //  1. Protects integrity of `v` from panics in `is_less`.
         //  2. Fills the remaining gap in `v` if the longer run gets consumed first.
 
-        let buf = MaybeUninit::slice_as_mut_ptr(scratch);
+        let buf = scratch.as_mut_ptr().cast_init();
 
         let v_base = v.as_mut_ptr();
         let v_mid = v_base.add(mid);
diff --git a/library/core/src/slice/sort/stable/quicksort.rs b/library/core/src/slice/sort/stable/quicksort.rs
index 0439ba8..734a495 100644
--- a/library/core/src/slice/sort/stable/quicksort.rs
+++ b/library/core/src/slice/sort/stable/quicksort.rs
@@ -97,7 +97,7 @@ fn stable_partition<T, F: FnMut(&T, &T) -> bool>(
     }
 
     let v_base = v.as_ptr();
-    let scratch_base = MaybeUninit::slice_as_mut_ptr(scratch);
+    let scratch_base = scratch.as_mut_ptr().cast_init();
 
     // The core idea is to write the values that compare as less-than to the left
     // side of `scratch`, while the values that compared as greater or equal than
diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs
index 0c5552a..0601019 100644
--- a/library/core/src/sync/atomic.rs
+++ b/library/core/src/sync/atomic.rs
@@ -130,16 +130,18 @@
 //!
 //! * PowerPC and MIPS platforms with 32-bit pointers do not have `AtomicU64` or
 //!   `AtomicI64` types.
-//! * ARM platforms like `armv5te` that aren't for Linux only provide `load`
-//!   and `store` operations, and do not support Compare and Swap (CAS)
-//!   operations, such as `swap`, `fetch_add`, etc. Additionally on Linux,
-//!   these CAS operations are implemented via [operating system support], which
-//!   may come with a performance penalty.
-//! * ARM targets with `thumbv6m` only provide `load` and `store` operations,
-//!   and do not support Compare and Swap (CAS) operations, such as `swap`,
-//!   `fetch_add`, etc.
+//! * Legacy ARM platforms like ARMv4T and ARMv5TE have very limited hardware
+//!   support for atomics. The bare-metal targets disable this module
+//!   entirely, but the Linux targets [use the kernel] to assist (which comes
+//!   with a performance penalty). It's not until ARMv6K onwards that ARM CPUs
+//!   have support for load/store and Compare and Swap (CAS) atomics in hardware.
+//! * ARMv6-M and ARMv8-M baseline targets (`thumbv6m-*` and
+//!   `thumbv8m.base-*`) only provide `load` and `store` operations, and do
+//!   not support Compare and Swap (CAS) operations, such as `swap`,
+//!   `fetch_add`, etc. Full CAS support is available on ARMv7-M and ARMv8-M
+//!   Mainline (`thumbv7m-*`, `thumbv7em*` and `thumbv8m.main-*`).
 //!
-//! [operating system support]: https://www.kernel.org/doc/Documentation/arm/kernel_user_helpers.txt
+//! [use the kernel]: https://www.kernel.org/doc/Documentation/arm/kernel_user_helpers.txt
 //!
 //! Note that future platforms may be added that also do not have support for
 //! some atomic operations. Maximally portable code will want to be careful
diff --git a/library/coretests/tests/lib.rs b/library/coretests/tests/lib.rs
index 4368566..eb2a216 100644
--- a/library/coretests/tests/lib.rs
+++ b/library/coretests/tests/lib.rs
@@ -16,6 +16,7 @@
 #![feature(cfg_target_has_reliable_f16_f128)]
 #![feature(char_internals)]
 #![feature(char_max_len)]
+#![feature(clamp_magnitude)]
 #![feature(clone_to_uninit)]
 #![feature(const_array)]
 #![feature(const_cell_traits)]
diff --git a/library/coretests/tests/num/clamp_magnitude.rs b/library/coretests/tests/num/clamp_magnitude.rs
new file mode 100644
index 0000000..0f96e55
--- /dev/null
+++ b/library/coretests/tests/num/clamp_magnitude.rs
@@ -0,0 +1,139 @@
+macro_rules! check_int_clamp {
+    ($t:ty, $ut:ty) => {
+        let min = <$t>::MIN;
+        let max = <$t>::MAX;
+        let max_u = <$ut>::MAX;
+
+        // Basic clamping
+        assert_eq!((100 as $t).clamp_magnitude(50), 50);
+        assert_eq!((-100 as $t).clamp_magnitude(50), -50);
+        assert_eq!((30 as $t).clamp_magnitude(50), 30);
+        assert_eq!((-30 as $t).clamp_magnitude(50), -30);
+
+        // Exact boundary
+        assert_eq!((50 as $t).clamp_magnitude(50), 50);
+        assert_eq!((-50 as $t).clamp_magnitude(50), -50);
+
+        // Zero cases
+        assert_eq!((0 as $t).clamp_magnitude(100), 0);
+        assert_eq!((0 as $t).clamp_magnitude(0), 0);
+        assert_eq!((100 as $t).clamp_magnitude(0), 0);
+        assert_eq!((-100 as $t).clamp_magnitude(0), 0);
+
+        // MIN/MAX values
+        // Symmetric range [-MAX, MAX]
+        assert_eq!(max.clamp_magnitude(max as $ut), max);
+        assert_eq!(min.clamp_magnitude(max as $ut), -max);
+
+        // Full range (limit covers MIN)
+        let min_abs = min.unsigned_abs();
+        assert_eq!(min.clamp_magnitude(min_abs), min);
+
+        // Limit larger than type max (uN > iN::MAX)
+        assert_eq!(max.clamp_magnitude(max_u), max);
+        assert_eq!(min.clamp_magnitude(max_u), min);
+    };
+}
+
+#[test]
+fn test_clamp_magnitude_i8() {
+    check_int_clamp!(i8, u8);
+}
+
+#[test]
+fn test_clamp_magnitude_i16() {
+    check_int_clamp!(i16, u16);
+}
+
+#[test]
+fn test_clamp_magnitude_i32() {
+    check_int_clamp!(i32, u32);
+}
+
+#[test]
+fn test_clamp_magnitude_i64() {
+    check_int_clamp!(i64, u64);
+}
+
+#[test]
+fn test_clamp_magnitude_i128() {
+    check_int_clamp!(i128, u128);
+}
+
+#[test]
+fn test_clamp_magnitude_isize() {
+    check_int_clamp!(isize, usize);
+}
+
+macro_rules! check_float_clamp {
+    ($t:ty) => {
+        // Basic clamping
+        assert_eq!((5.0 as $t).clamp_magnitude(3.0), 3.0);
+        assert_eq!((-5.0 as $t).clamp_magnitude(3.0), -3.0);
+        assert_eq!((2.0 as $t).clamp_magnitude(3.0), 2.0);
+        assert_eq!((-2.0 as $t).clamp_magnitude(3.0), -2.0);
+
+        // Exact boundary
+        assert_eq!((3.0 as $t).clamp_magnitude(3.0), 3.0);
+        assert_eq!((-3.0 as $t).clamp_magnitude(3.0), -3.0);
+
+        // Zero cases
+        assert_eq!((0.0 as $t).clamp_magnitude(1.0), 0.0);
+        assert_eq!((-0.0 as $t).clamp_magnitude(1.0), 0.0);
+        assert_eq!((5.0 as $t).clamp_magnitude(0.0), 0.0);
+        assert_eq!((-5.0 as $t).clamp_magnitude(0.0), 0.0);
+
+        // Special values - Infinity
+        let inf = <$t>::INFINITY;
+        let neg_inf = <$t>::NEG_INFINITY;
+        assert_eq!(inf.clamp_magnitude(100.0), 100.0);
+        assert_eq!(neg_inf.clamp_magnitude(100.0), -100.0);
+        assert_eq!(inf.clamp_magnitude(inf), inf);
+
+        // Value with infinite limit
+        assert_eq!((1.0 as $t).clamp_magnitude(inf), 1.0);
+        assert_eq!((-1.0 as $t).clamp_magnitude(inf), -1.0);
+
+        // MIN and MAX
+        let max = <$t>::MAX;
+        let min = <$t>::MIN;
+        // Large limit
+        let huge = 1e30;
+        assert_eq!(max.clamp_magnitude(huge), huge);
+        assert_eq!(min.clamp_magnitude(huge), -huge);
+    };
+}
+
+#[test]
+fn test_clamp_magnitude_f32() {
+    check_float_clamp!(f32);
+}
+
+#[test]
+fn test_clamp_magnitude_f64() {
+    check_float_clamp!(f64);
+}
+
+#[test]
+#[should_panic(expected = "limit must be non-negative")]
+fn test_clamp_magnitude_f32_panic_negative_limit() {
+    let _ = 1.0f32.clamp_magnitude(-1.0);
+}
+
+#[test]
+#[should_panic(expected = "limit must be non-negative")]
+fn test_clamp_magnitude_f64_panic_negative_limit() {
+    let _ = 1.0f64.clamp_magnitude(-1.0);
+}
+
+#[test]
+#[should_panic]
+fn test_clamp_magnitude_f32_panic_nan_limit() {
+    let _ = 1.0f32.clamp_magnitude(f32::NAN);
+}
+
+#[test]
+#[should_panic]
+fn test_clamp_magnitude_f64_panic_nan_limit() {
+    let _ = 1.0f64.clamp_magnitude(f64::NAN);
+}
diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs
index e7b9550..0401e9b 100644
--- a/library/std/src/lib.rs
+++ b/library/std/src/lib.rs
@@ -346,7 +346,6 @@
 #![feature(ip)]
 #![feature(lazy_get)]
 #![feature(maybe_uninit_array_assume_init)]
-#![feature(maybe_uninit_slice)]
 #![feature(panic_can_unwind)]
 #![feature(panic_internals)]
 #![feature(pin_coerce_unsized_trait)]
diff --git a/library/std/src/thread/id.rs b/library/std/src/thread/id.rs
index ba70243..3da0825 100644
--- a/library/std/src/thread/id.rs
+++ b/library/std/src/thread/id.rs
@@ -70,7 +70,9 @@ fn exhausted() -> ! {
 
                 // Acquire lock.
                 let mut spin = 0;
-                while COUNTER_LOCKED.compare_exchange_weak(false, true, Ordering::Acquire, Ordering::Relaxed).is_err() {
+                // Miri doesn't like it when we yield here as it interferes with deterministically
+                // scheduling threads, so avoid `compare_exchange_weak` to avoid spurious yields.
+                while COUNTER_LOCKED.swap(true, Ordering::Acquire) {
                     if spin <= 3 {
                         for _ in 0..(1 << spin) {
                             spin_loop();
@@ -80,6 +82,7 @@ fn exhausted() -> ! {
                     }
                     spin += 1;
                 }
+                // This was `false` before the swap, so we got the lock.
 
                 // SAFETY: we have an exclusive lock on the counter.
                 unsafe {
diff --git a/library/std/tests/path.rs b/library/std/tests/path.rs
index c60edbd..4094b7a 100644
--- a/library/std/tests/path.rs
+++ b/library/std/tests/path.rs
@@ -1,6 +1,5 @@
 // tidy-alphabetical-start
 #![feature(clone_to_uninit)]
-#![feature(maybe_uninit_slice)]
 #![feature(normalize_lexically)]
 #![feature(path_trailing_sep)]
 // tidy-alphabetical-end
diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs
index 0efe0cb..40149ee 100644
--- a/src/bootstrap/src/core/build_steps/dist.rs
+++ b/src/bootstrap/src/core/build_steps/dist.rs
@@ -347,14 +347,14 @@ fn runtime_dll_dist(rust_root: &Path, target: TargetSelection, builder: &Builder
 
     let mut rustc_dlls = vec![];
     // windows-gnu and windows-gnullvm require different runtime libs
-    if target.ends_with("windows-gnu") {
+    if target.is_windows_gnu() {
         rustc_dlls.push("libwinpthread-1.dll");
         if target.starts_with("i686-") {
             rustc_dlls.push("libgcc_s_dw2-1.dll");
         } else {
             rustc_dlls.push("libgcc_s_seh-1.dll");
         }
-    } else if target.ends_with("windows-gnullvm") {
+    } else if target.is_windows_gnullvm() {
         rustc_dlls.push("libunwind.dll");
     } else {
         panic!("Vendoring of runtime DLLs for `{target}` is not supported`");
diff --git a/src/bootstrap/src/core/build_steps/llvm.rs b/src/bootstrap/src/core/build_steps/llvm.rs
index a591be0..db2a76c 100644
--- a/src/bootstrap/src/core/build_steps/llvm.rs
+++ b/src/bootstrap/src/core/build_steps/llvm.rs
@@ -210,7 +210,7 @@ pub(crate) fn is_ci_llvm_available_for_target(
         ("i686-unknown-linux-gnu", false),
         ("x86_64-unknown-linux-gnu", true),
         ("x86_64-apple-darwin", true),
-        ("x86_64-pc-windows-gnu", true),
+        ("x86_64-pc-windows-gnu", false),
         ("x86_64-pc-windows-msvc", true),
         // tier 2 with host tools
         ("aarch64-unknown-linux-musl", false),
@@ -227,7 +227,7 @@ pub(crate) fn is_ci_llvm_available_for_target(
         ("powerpc64le-unknown-linux-musl", false),
         ("riscv64gc-unknown-linux-gnu", false),
         ("s390x-unknown-linux-gnu", false),
-        ("x86_64-pc-windows-gnullvm", true),
+        ("x86_64-pc-windows-gnullvm", false),
         ("x86_64-unknown-freebsd", false),
         ("x86_64-unknown-illumos", false),
         ("x86_64-unknown-linux-musl", false),
@@ -284,8 +284,7 @@ fn run(self, builder: &Builder<'_>) -> LlvmResult {
             LlvmBuildStatus::ShouldBuild(m) => m,
         };
 
-        if builder.llvm_link_shared() && target.is_windows() && !target.ends_with("windows-gnullvm")
-        {
+        if builder.llvm_link_shared() && target.is_windows() && !target.is_windows_gnullvm() {
             panic!("shared linking to LLVM is not currently supported on {}", target.triple);
         }
 
diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml
index ee10e36..e69a658 100644
--- a/src/ci/github-actions/jobs.yml
+++ b/src/ci/github-actions/jobs.yml
@@ -612,16 +612,12 @@
     env:
       SCRIPT: make ci-mingw-x
       RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
-      # There is no dist-x86_64-mingw-alt, so there is no prebuilt LLVM with assertions
-      NO_DOWNLOAD_CI_LLVM: 1
     <<: *job-windows
 
   - name: x86_64-mingw-2
     env:
       SCRIPT: make ci-mingw-bootstrap
       RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
-      # There is no dist-x86_64-mingw-alt, so there is no prebuilt LLVM with assertions
-      NO_DOWNLOAD_CI_LLVM: 1
     <<: *job-windows
 
   - name: dist-x86_64-msvc
diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md
index 4cf95a0..3da3571 100644
--- a/src/doc/rustc/src/SUMMARY.md
+++ b/src/doc/rustc/src/SUMMARY.md
@@ -54,8 +54,8 @@
     - [aarch64_be-unknown-linux-musl](platform-support/aarch64_be-unknown-linux-musl.md)
     - [amdgcn-amd-amdhsa](platform-support/amdgcn-amd-amdhsa.md)
     - [arm-none-eabi](platform-support/arm-none-eabi.md)
-      - [armv4t-none-eabi](platform-support/armv4t-none-eabi.md)
-      - [armv5te-none-eabi](platform-support/armv5te-none-eabi.md)
+      - [{arm,thumb}v4t-none-eabi](platform-support/armv4t-none-eabi.md)
+      - [{arm,thumb}v5te-none-eabi](platform-support/armv5te-none-eabi.md)
       - [armv7a-none-eabi{,hf}](platform-support/armv7a-none-eabi.md)
       - [armv7r-none-eabi{,hf}](platform-support/armv7r-none-eabi.md)
       - [armebv7r-none-eabi{,hf}](platform-support/armebv7r-none-eabi.md)
diff --git a/src/doc/rustc/src/platform-support/armv4t-none-eabi.md b/src/doc/rustc/src/platform-support/armv4t-none-eabi.md
index c6d8876..e5840be 100644
--- a/src/doc/rustc/src/platform-support/armv4t-none-eabi.md
+++ b/src/doc/rustc/src/platform-support/armv4t-none-eabi.md
@@ -8,7 +8,8 @@
 
 Both of these targets can be used on the Game Boy Advance (GBA), among other
 things. On the GBA, one should usually use the `thumb` target to get the best
-overall performance.
+overall performance.  Note that this architecture only supports the old
+Thumb-1 instruction set, not the later Thumb-2 instruction set.
 
 ## Target Maintainers
 
diff --git a/src/doc/rustc/src/platform-support/armv5te-none-eabi.md b/src/doc/rustc/src/platform-support/armv5te-none-eabi.md
index e9f34d4..10a6914 100644
--- a/src/doc/rustc/src/platform-support/armv5te-none-eabi.md
+++ b/src/doc/rustc/src/platform-support/armv5te-none-eabi.md
@@ -4,10 +4,12 @@
 * **Library Support:** core and alloc (bare-metal, `#![no_std]`)
 
 Bare-metal target for any cpu in the Armv5TE architecture family, supporting
-ARM/Thumb code interworking (aka `A32`/`T32`), with `A32` code as the default code
-generation.
+ARM/Thumb code interworking (aka `Arm`/`Thumb`), with `Arm` code as the
+default code generation.
 
-The `thumbv5te-none-eabi` target is the same as this one, but the instruction set defaults to `T32`.
+The `thumbv5te-none-eabi` target is the same as this one, but the instruction
+set defaults to `Thumb`. Note that this architecture only supports the old
+Thumb-1 instruction set, not the later Thumb-2 instruction set.
 
 See [`arm-none-eabi`](arm-none-eabi.md) for information applicable to all
 `arm-none-eabi` targets.
diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml
index 6597c3c..c7c23d3 100644
--- a/src/tools/compiletest/Cargo.toml
+++ b/src/tools/compiletest/Cargo.toml
@@ -9,6 +9,9 @@
 [[bin]]
 name = "compiletest"
 path = "src/bin/main.rs"
+# The compiletest binary crate is a tiny stub that shouldn't contain any unit
+# tests of its own; all of the logic is in the library crate.
+test = false
 
 [dependencies]
 # tidy-alphabetical-start
diff --git a/src/tools/compiletest/src/directives.rs b/src/tools/compiletest/src/directives.rs
index 8e9c28e..fed3041 100644
--- a/src/tools/compiletest/src/directives.rs
+++ b/src/tools/compiletest/src/directives.rs
@@ -34,12 +34,18 @@
 mod tests;
 
 pub struct DirectivesCache {
+    /// "Conditions" used by `ignore-*` and `only-*` directives, prepared in
+    /// advance so that they don't have to be evaluated repeatedly.
+    cfg_conditions: cfg::PreparedConditions,
     needs: CachedNeedsConditions,
 }
 
 impl DirectivesCache {
     pub fn load(config: &Config) -> Self {
-        Self { needs: CachedNeedsConditions::load(config) }
+        Self {
+            cfg_conditions: cfg::prepare_conditions(config),
+            needs: CachedNeedsConditions::load(config),
+        }
     }
 }
 
@@ -1058,8 +1064,8 @@ macro_rules! decision {
                 };
             }
 
-            decision!(cfg::handle_ignore(config, ln));
-            decision!(cfg::handle_only(config, ln));
+            decision!(cfg::handle_ignore(&cache.cfg_conditions, ln));
+            decision!(cfg::handle_only(&cache.cfg_conditions, ln));
             decision!(needs::handle_needs(&cache.needs, config, ln));
             decision!(ignore_llvm(config, ln));
             decision!(ignore_backends(config, ln));
diff --git a/src/tools/compiletest/src/directives/cfg.rs b/src/tools/compiletest/src/directives/cfg.rs
index 531763c..62d10f1 100644
--- a/src/tools/compiletest/src/directives/cfg.rs
+++ b/src/tools/compiletest/src/directives/cfg.rs
@@ -1,12 +1,30 @@
-use std::collections::HashSet;
+use std::collections::{HashMap, HashSet};
+use std::sync::{Arc, LazyLock};
 
 use crate::common::{CompareMode, Config, Debugger};
 use crate::directives::{DirectiveLine, IgnoreDecision};
 
 const EXTRA_ARCHS: &[&str] = &["spirv"];
 
-pub(super) fn handle_ignore(config: &Config, line: &DirectiveLine<'_>) -> IgnoreDecision {
-    let parsed = parse_cfg_name_directive(config, line, "ignore-");
+const EXTERNAL_IGNORES_LIST: &[&str] = &[
+    // tidy-alphabetical-start
+    "ignore-backends",
+    "ignore-gdb-version",
+    "ignore-llvm-version",
+    "ignore-pass",
+    // tidy-alphabetical-end
+];
+
+/// Directive names that begin with `ignore-`, but are disregarded by this
+/// module because they are handled elsewhere.
+pub(crate) static EXTERNAL_IGNORES_SET: LazyLock<HashSet<&str>> =
+    LazyLock::new(|| EXTERNAL_IGNORES_LIST.iter().copied().collect());
+
+pub(super) fn handle_ignore(
+    conditions: &PreparedConditions,
+    line: &DirectiveLine<'_>,
+) -> IgnoreDecision {
+    let parsed = parse_cfg_name_directive(conditions, line, "ignore-");
     let line = line.display();
 
     match parsed.outcome {
@@ -18,13 +36,15 @@ pub(super) fn handle_ignore(config: &Config, line: &DirectiveLine<'_>) -> Ignore
             },
         },
         MatchOutcome::Invalid => IgnoreDecision::Error { message: format!("invalid line: {line}") },
-        MatchOutcome::External => IgnoreDecision::Continue,
-        MatchOutcome::NotADirective => IgnoreDecision::Continue,
+        MatchOutcome::NotHandledHere => IgnoreDecision::Continue,
     }
 }
 
-pub(super) fn handle_only(config: &Config, line: &DirectiveLine<'_>) -> IgnoreDecision {
-    let parsed = parse_cfg_name_directive(config, line, "only-");
+pub(super) fn handle_only(
+    conditions: &PreparedConditions,
+    line: &DirectiveLine<'_>,
+) -> IgnoreDecision {
+    let parsed = parse_cfg_name_directive(conditions, line, "only-");
     let line = line.display();
 
     match parsed.outcome {
@@ -38,278 +58,208 @@ pub(super) fn handle_only(config: &Config, line: &DirectiveLine<'_>) -> IgnoreDe
             },
         },
         MatchOutcome::Invalid => IgnoreDecision::Error { message: format!("invalid line: {line}") },
-        MatchOutcome::External => IgnoreDecision::Continue,
-        MatchOutcome::NotADirective => IgnoreDecision::Continue,
+        MatchOutcome::NotHandledHere => IgnoreDecision::Continue,
     }
 }
 
 /// Parses a name-value directive which contains config-specific information, e.g., `ignore-x86`
 /// or `only-windows`.
 fn parse_cfg_name_directive<'a>(
-    config: &Config,
+    conditions: &PreparedConditions,
     line: &'a DirectiveLine<'a>,
     prefix: &str,
 ) -> ParsedNameDirective<'a> {
     let Some(name) = line.name.strip_prefix(prefix) else {
-        return ParsedNameDirective::not_a_directive();
+        return ParsedNameDirective::not_handled_here();
     };
 
+    if prefix == "ignore-" && EXTERNAL_IGNORES_SET.contains(line.name) {
+        return ParsedNameDirective::not_handled_here();
+    }
+
     // FIXME(Zalathar): This currently allows either a space or a colon, and
     // treats any "value" after a colon as though it were a remark.
     // We should instead forbid the colon syntax for these directives.
-    let comment = line.remark_after_space().or_else(|| line.value_after_colon());
+    let comment = line
+        .remark_after_space()
+        .or_else(|| line.value_after_colon())
+        .map(|c| c.trim().trim_start_matches('-').trim());
 
-    // Some of the matchers might be "" depending on what the target information is. To avoid
-    // problems we outright reject empty directives.
-    if name.is_empty() {
-        return ParsedNameDirective::not_a_directive();
+    if let Some(cond) = conditions.conds.get(name) {
+        ParsedNameDirective {
+            pretty_reason: Some(Arc::clone(&cond.message_when_ignored)),
+            comment,
+            outcome: if cond.value { MatchOutcome::Match } else { MatchOutcome::NoMatch },
+        }
+    } else {
+        ParsedNameDirective { pretty_reason: None, comment, outcome: MatchOutcome::Invalid }
+    }
+}
+
+/// Uses information about the current target (and all targets) to pre-compute
+/// a value (true or false) for a number of "conditions". Those conditions can
+/// then be used by `ignore-*` and `only-*` directives.
+pub(crate) fn prepare_conditions(config: &Config) -> PreparedConditions {
+    let cfgs = config.target_cfgs();
+    let current = &cfgs.current;
+
+    let mut builder = ConditionsBuilder::new();
+
+    // Some condition names overlap (e.g. "macabi" is both an env and an ABI),
+    // so the order in which conditions are added is significant.
+    // Whichever condition registers that name _first_ will take precedence.
+    // (See `ConditionsBuilder::build`.)
+
+    builder.cond("test", true, "always");
+    builder.cond("auxiliary", true, "used by another main test file");
+
+    for target in &cfgs.all_targets {
+        builder.cond(target, *target == config.target, &format!("when the target is {target}"));
+    }
+    for os in &cfgs.all_oses {
+        builder.cond(os, *os == current.os, &format!("when the operating system is {os}"));
+    }
+    for env in &cfgs.all_envs {
+        builder.cond(env, *env == current.env, &format!("when the target environment is {env}"));
+    }
+    for os_and_env in &cfgs.all_oses_and_envs {
+        builder.cond(
+            os_and_env,
+            *os_and_env == current.os_and_env(),
+            &format!("when the operating system and target environment are {os_and_env}"),
+        );
+    }
+    for abi in &cfgs.all_abis {
+        builder.cond(abi, *abi == current.abi, &format!("when the ABI is {abi}"));
+    }
+    for arch in cfgs.all_archs.iter().map(String::as_str).chain(EXTRA_ARCHS.iter().copied()) {
+        builder.cond(arch, *arch == current.arch, &format!("when the architecture is {arch}"));
+    }
+    for n_bit in &cfgs.all_pointer_widths {
+        builder.cond(
+            n_bit,
+            *n_bit == format!("{}bit", current.pointer_width),
+            &format!("when the pointer width is {n_bit}"),
+        );
+    }
+    for family in &cfgs.all_families {
+        builder.cond(
+            family,
+            current.families.contains(family),
+            &format!("when the target family is {family}"),
+        )
     }
 
-    let mut outcome = MatchOutcome::Invalid;
-    let mut message = None;
-
-    macro_rules! condition {
-        (
-            name: $name:expr,
-            $(allowed_names: $allowed_names:expr,)?
-            $(condition: $condition:expr,)?
-            message: $($message:tt)*
-        ) => {{
-            // This is not inlined to avoid problems with macro repetitions.
-            let format_message = || format!($($message)*);
-
-            if outcome != MatchOutcome::Invalid {
-                // Ignore all other matches if we already found one
-            } else if $name.custom_matches(name) {
-                message = Some(format_message());
-                if true $(&& $condition)? {
-                    outcome = MatchOutcome::Match;
-                } else {
-                    outcome = MatchOutcome::NoMatch;
-                }
-            }
-            $(else if $allowed_names.custom_contains(name) {
-                message = Some(format_message());
-                outcome = MatchOutcome::NoMatch;
-            })?
-        }};
-    }
-
-    let target_cfgs = config.target_cfgs();
-    let target_cfg = config.target_cfg();
-
-    condition! {
-        name: "test",
-        message: "always"
-    }
-    condition! {
-        name: "auxiliary",
-        message: "used by another main test file"
-    }
-    condition! {
-        name: &config.target,
-        allowed_names: &target_cfgs.all_targets,
-        message: "when the target is {name}"
-    }
-    condition! {
-        name: &target_cfg.os,
-        allowed_names: &target_cfgs.all_oses,
-        message: "when the operating system is {name}"
-    }
-    condition! {
-        name: &target_cfg.env,
-        allowed_names: &target_cfgs.all_envs,
-        message: "when the target environment is {name}"
-    }
-    condition! {
-        name: &target_cfg.os_and_env(),
-        allowed_names: &target_cfgs.all_oses_and_envs,
-        message: "when the operating system and target environment are {name}"
-    }
-    condition! {
-        name: &target_cfg.abi,
-        allowed_names: &target_cfgs.all_abis,
-        message: "when the ABI is {name}"
-    }
-    condition! {
-        name: &target_cfg.arch,
-        allowed_names: ContainsEither { a: &target_cfgs.all_archs, b: &EXTRA_ARCHS },
-        message: "when the architecture is {name}"
-    }
-    condition! {
-        name: format!("{}bit", target_cfg.pointer_width),
-        allowed_names: &target_cfgs.all_pointer_widths,
-        message: "when the pointer width is {name}"
-    }
-    condition! {
-        name: &*target_cfg.families,
-        allowed_names: &target_cfgs.all_families,
-        message: "when the target family is {name}"
-    }
-
-    condition! {
-        name: "thumb",
-        condition: config.target.starts_with("thumb"),
-        message: "when the architecture is part of the Thumb family"
-    }
+    builder.cond(
+        "thumb",
+        config.target.starts_with("thumb"),
+        "when the architecture is part of the Thumb family",
+    );
 
     // The "arch" of `i586-` targets is "x86", so for more specific matching
     // we have to resort to a string-prefix check.
-    condition! {
-        name: "i586",
-        condition: config.matches_arch("i586"),
-        message: "when the subarchitecture is i586",
-    }
-
-    condition! {
-        name: "apple",
-        condition: config.target.contains("apple"),
-        message: "when the target vendor is Apple"
-    }
-
-    condition! {
-        name: "elf",
-        condition: target_cfg.binary_format == "elf",
-        message: "when the target binary format is ELF"
-    }
-
-    condition! {
-        name: "enzyme",
-        condition: config.has_enzyme,
-        message: "when rustc is built with LLVM Enzyme"
-    }
+    builder.cond("i586", config.matches_arch("i586"), "when the subarchitecture is i586");
+    // FIXME(Zalathar): Use proper target vendor information instead?
+    builder.cond("apple", config.target.contains("apple"), "when the target vendor is Apple");
+    // FIXME(Zalathar): Support all known binary formats, not just ELF?
+    builder.cond("elf", current.binary_format == "elf", "when the target binary format is ELF");
+    builder.cond("enzyme", config.has_enzyme, "when rustc is built with LLVM Enzyme");
 
     // Technically the locally built compiler uses the "dev" channel rather than the "nightly"
     // channel, even though most people don't know or won't care about it. To avoid confusion, we
     // treat the "dev" channel as the "nightly" channel when processing the directive.
-    condition! {
-        name: if config.channel == "dev" { "nightly" } else { &config.channel },
-        allowed_names: &["stable", "beta", "nightly"],
-        message: "when the release channel is {name}",
+    for channel in ["stable", "beta", "nightly"] {
+        let curr_channel = match config.channel.as_str() {
+            "dev" => "nightly",
+            ch => ch,
+        };
+        builder.cond(
+            channel,
+            channel == curr_channel,
+            &format!("when the release channel is {channel}"),
+        );
     }
 
-    condition! {
-        name: "cross-compile",
-        condition: config.target != config.host,
-        message: "when cross-compiling"
+    builder.cond("cross-compile", config.target != config.host, "when cross-compiling");
+    builder.cond("endian-big", config.is_big_endian(), "on big-endian targets");
+
+    for stage in ["stage0", "stage1", "stage2"] {
+        builder.cond(
+            stage,
+            stage == format!("stage{}", config.stage),
+            &format!("when the bootstrapping stage is {stage}"),
+        );
     }
-    condition! {
-        name: "endian-big",
-        condition: config.is_big_endian(),
-        message: "on big-endian targets",
+
+    builder.cond("remote", config.remote_test_client.is_some(), "when running tests remotely");
+    builder.cond(
+        "rustc-debug-assertions",
+        config.with_rustc_debug_assertions,
+        "when rustc is built with debug assertions",
+    );
+    builder.cond(
+        "std-debug-assertions",
+        config.with_std_debug_assertions,
+        "when std is built with debug assertions",
+    );
+
+    for &debugger in Debugger::STR_VARIANTS {
+        builder.cond(
+            debugger,
+            Some(debugger) == config.debugger.as_ref().map(Debugger::to_str),
+            &format!("when the debugger is {debugger}"),
+        );
     }
-    condition! {
-        name: format!("stage{}", config.stage).as_str(),
-        allowed_names: &["stage0", "stage1", "stage2"],
-        message: "when the bootstrapping stage is {name}",
+
+    for &compare_mode in CompareMode::STR_VARIANTS {
+        builder.cond(
+            &format!("compare-mode-{compare_mode}"),
+            Some(compare_mode) == config.compare_mode.as_ref().map(CompareMode::to_str),
+            &format!("when comparing with compare-mode-{compare_mode}"),
+        );
     }
-    condition! {
-        name: "remote",
-        condition: config.remote_test_client.is_some(),
-        message: "when running tests remotely",
-    }
-    condition! {
-        name: "rustc-debug-assertions",
-        condition: config.with_rustc_debug_assertions,
-        message: "when rustc is built with debug assertions",
-    }
-    condition! {
-        name: "std-debug-assertions",
-        condition: config.with_std_debug_assertions,
-        message: "when std is built with debug assertions",
-    }
-    condition! {
-        name: config.debugger.as_ref().map(|d| d.to_str()),
-        allowed_names: &Debugger::STR_VARIANTS,
-        message: "when the debugger is {name}",
-    }
-    condition! {
-        name: config.compare_mode
-            .as_ref()
-            .map(|d| format!("compare-mode-{}", d.to_str())),
-        allowed_names: ContainsPrefixed {
-            prefix: "compare-mode-",
-            inner: CompareMode::STR_VARIANTS,
-        },
-        message: "when comparing with {name}",
-    }
+
     // Coverage tests run the same test file in multiple modes.
     // If a particular test should not be run in one of the modes, ignore it
     // with "ignore-coverage-map" or "ignore-coverage-run".
-    condition! {
-        name: config.mode.to_str(),
-        allowed_names: ["coverage-map", "coverage-run"],
-        message: "when the test mode is {name}",
-    }
-    condition! {
-        name: target_cfg.rustc_abi.as_ref().map(|abi| format!("rustc_abi-{abi}")).unwrap_or_default(),
-        allowed_names: ContainsPrefixed {
-            prefix: "rustc_abi-",
-            inner: target_cfgs.all_rustc_abis.clone(),
-        },
-        message: "when the target `rustc_abi` is {name}",
+    for test_mode in ["coverage-map", "coverage-run"] {
+        builder.cond(
+            test_mode,
+            test_mode == config.mode.to_str(),
+            &format!("when the test mode is {test_mode}"),
+        );
     }
 
-    condition! {
-        name: "dist",
-        condition: std::env::var("COMPILETEST_ENABLE_DIST_TESTS") == Ok("1".to_string()),
-        message: "when performing tests on dist toolchain"
+    for rustc_abi in &cfgs.all_rustc_abis {
+        builder.cond(
+            &format!("rustc_abi-{rustc_abi}"),
+            Some(rustc_abi) == current.rustc_abi.as_ref(),
+            &format!("when the target `rustc_abi` is rustc_abi-{rustc_abi}"),
+        );
     }
 
-    if prefix == "ignore-" && outcome == MatchOutcome::Invalid {
-        // Don't error out for ignore-tidy-* diretives, as those are not handled by compiletest.
-        if name.starts_with("tidy-") {
-            outcome = MatchOutcome::External;
-        }
+    // FIXME(Zalathar): Ideally this should be configured by a command-line
+    // flag, not an environment variable.
+    builder.cond(
+        "dist",
+        std::env::var("COMPILETEST_ENABLE_DIST_TESTS").as_deref() == Ok("1"),
+        "when performing tests on dist toolchain",
+    );
 
-        // Don't error out for ignore-pass, as that is handled elsewhere.
-        if name == "pass" {
-            outcome = MatchOutcome::External;
-        }
-
-        // Don't error out for ignore-llvm-version, that has a custom syntax and is handled
-        // elsewhere.
-        if name == "llvm-version" {
-            outcome = MatchOutcome::External;
-        }
-
-        // Don't error out for ignore-llvm-version, that has a custom syntax and is handled
-        // elsewhere.
-        if name == "gdb-version" {
-            outcome = MatchOutcome::External;
-        }
-
-        // Don't error out for ignore-backends,as it is handled elsewhere.
-        if name == "backends" {
-            outcome = MatchOutcome::External;
-        }
-    }
-
-    ParsedNameDirective {
-        name: Some(name),
-        comment: comment.map(|c| c.trim().trim_start_matches('-').trim()),
-        outcome,
-        pretty_reason: message,
-    }
+    builder.build()
 }
 
 /// The result of parse_cfg_name_directive.
 #[derive(Clone, PartialEq, Debug)]
 pub(super) struct ParsedNameDirective<'a> {
-    pub(super) name: Option<&'a str>,
-    pub(super) pretty_reason: Option<String>,
+    pub(super) pretty_reason: Option<Arc<str>>,
     pub(super) comment: Option<&'a str>,
     pub(super) outcome: MatchOutcome,
 }
 
 impl ParsedNameDirective<'_> {
-    fn not_a_directive() -> Self {
-        Self {
-            name: None,
-            pretty_reason: None,
-            comment: None,
-            outcome: MatchOutcome::NotADirective,
-        }
+    fn not_handled_here() -> Self {
+        Self { pretty_reason: None, comment: None, outcome: MatchOutcome::NotHandledHere }
     }
 }
 
@@ -321,92 +271,59 @@ pub(super) enum MatchOutcome {
     Match,
     /// The directive was invalid.
     Invalid,
-    /// The directive is handled by other parts of our tooling.
-    External,
-    /// The line is not actually a directive.
-    NotADirective,
+    /// The directive should be ignored by this module, because it is handled elsewhere.
+    NotHandledHere,
 }
 
-trait CustomContains {
-    fn custom_contains(&self, item: &str) -> bool;
+#[derive(Debug)]
+pub(crate) struct PreparedConditions {
+    /// Maps the "bare" name of each condition to a structure indicating
+    /// whether the condition is true or false for the target being tested.
+    conds: HashMap<Arc<str>, Cond>,
 }
 
-impl CustomContains for HashSet<String> {
-    fn custom_contains(&self, item: &str) -> bool {
-        self.contains(item)
+#[derive(Debug)]
+struct Cond {
+    /// Bare condition name without an ignore/only prefix, e.g. `aarch64` or `windows`.
+    bare_name: Arc<str>,
+
+    /// Is this condition true or false for the target being tested, based on
+    /// the config that was used to prepare these conditions?
+    ///
+    /// For example, the condition `windows` is true on Windows targets.
+    value: bool,
+
+    /// Message fragment to show when a test is ignored based on this condition
+    /// being true or false, e.g. "when the architecture is aarch64".
+    message_when_ignored: Arc<str>,
+}
+
+struct ConditionsBuilder {
+    conds: Vec<Cond>,
+}
+
+impl ConditionsBuilder {
+    fn new() -> Self {
+        Self { conds: vec![] }
     }
-}
 
-impl CustomContains for &[&str] {
-    fn custom_contains(&self, item: &str) -> bool {
-        self.contains(&item)
+    fn cond(&mut self, bare_name: &str, value: bool, message_when_ignored: &str) {
+        self.conds.push(Cond {
+            bare_name: Arc::<str>::from(bare_name),
+            value,
+            message_when_ignored: Arc::<str>::from(message_when_ignored),
+        });
     }
-}
 
-impl<const N: usize> CustomContains for [&str; N] {
-    fn custom_contains(&self, item: &str) -> bool {
-        self.contains(&item)
-    }
-}
-
-struct ContainsPrefixed<T: CustomContains> {
-    prefix: &'static str,
-    inner: T,
-}
-
-impl<T: CustomContains> CustomContains for ContainsPrefixed<T> {
-    fn custom_contains(&self, item: &str) -> bool {
-        match item.strip_prefix(self.prefix) {
-            Some(stripped) => self.inner.custom_contains(stripped),
-            None => false,
-        }
-    }
-}
-
-struct ContainsEither<'a, A: CustomContains, B: CustomContains> {
-    a: &'a A,
-    b: &'a B,
-}
-
-impl<A: CustomContains, B: CustomContains> CustomContains for ContainsEither<'_, A, B> {
-    fn custom_contains(&self, item: &str) -> bool {
-        self.a.custom_contains(item) || self.b.custom_contains(item)
-    }
-}
-
-trait CustomMatches {
-    fn custom_matches(&self, name: &str) -> bool;
-}
-
-impl CustomMatches for &str {
-    fn custom_matches(&self, name: &str) -> bool {
-        name == *self
-    }
-}
-
-impl CustomMatches for String {
-    fn custom_matches(&self, name: &str) -> bool {
-        name == self
-    }
-}
-
-impl<T: CustomMatches> CustomMatches for &[T] {
-    fn custom_matches(&self, name: &str) -> bool {
-        self.iter().any(|m| m.custom_matches(name))
-    }
-}
-
-impl<const N: usize, T: CustomMatches> CustomMatches for [T; N] {
-    fn custom_matches(&self, name: &str) -> bool {
-        self.iter().any(|m| m.custom_matches(name))
-    }
-}
-
-impl<T: CustomMatches> CustomMatches for Option<T> {
-    fn custom_matches(&self, name: &str) -> bool {
-        match self {
-            Some(inner) => inner.custom_matches(name),
-            None => false,
-        }
+    fn build(self) -> PreparedConditions {
+        let conds = self
+            .conds
+            .into_iter()
+            // Build the map in reverse order, so that conditions declared
+            // earlier have priority over ones declared later.
+            .rev()
+            .map(|cond| (Arc::clone(&cond.bare_name), cond))
+            .collect::<HashMap<_, _>>();
+        PreparedConditions { conds }
     }
 }
diff --git a/src/tools/compiletest/src/directives/tests.rs b/src/tools/compiletest/src/directives/tests.rs
index bb8002d..90e2cb7 100644
--- a/src/tools/compiletest/src/directives/tests.rs
+++ b/src/tools/compiletest/src/directives/tests.rs
@@ -26,6 +26,19 @@ fn handler_names() {
     );
 }
 
+#[test]
+fn external_ignores() {
+    let unknown_names = directives::cfg::EXTERNAL_IGNORES_SET
+        .difference(&KNOWN_DIRECTIVE_NAMES_SET)
+        .into_iter()
+        .collect::<BTreeSet<_>>();
+
+    assert!(
+        unknown_names.is_empty(),
+        "Directive names not in `directive_names.rs`: {unknown_names:#?}"
+    );
+}
+
 fn make_test_description(
     config: &Config,
     name: String,
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index fe839f2..efe56cb 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -845,6 +845,7 @@
 name = "hir-expand"
 version = "0.0.0"
 dependencies = [
+ "arrayvec",
  "base-db",
  "cfg",
  "cov-mark",
@@ -863,6 +864,7 @@
  "stdx",
  "syntax",
  "syntax-bridge",
+ "thin-vec",
  "tracing",
  "triomphe",
  "tt",
@@ -905,6 +907,7 @@
  "syntax",
  "test-fixture",
  "test-utils",
+ "thin-vec",
  "tracing",
  "tracing-subscriber",
  "tracing-tree",
@@ -1475,6 +1478,7 @@
  "parser",
  "ra-ap-rustc_lexer",
  "rustc-hash 2.1.1",
+ "salsa",
  "smallvec",
  "span",
  "stdx",
@@ -2270,9 +2274,9 @@
 
 [[package]]
 name = "rowan"
-version = "0.15.15"
+version = "0.15.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
+checksum = "d4f1e4a001f863f41ea8d0e6a0c34b356d5b733db50dadab3efef640bafb779b"
 dependencies = [
  "countme",
  "hashbrown 0.14.5",
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index 946e54b..6991eee 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -4,7 +4,7 @@
 resolver = "2"
 
 [workspace.package]
-rust-version = "1.88"
+rust-version = "1.91"
 edition = "2024"
 license = "MIT OR Apache-2.0"
 authors = ["rust-analyzer team"]
@@ -52,7 +52,7 @@
 # local crates
 macros = { path = "./crates/macros", version = "0.0.0" }
 base-db = { path = "./crates/base-db", version = "0.0.0" }
-cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] }
+cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt", "syntax"] }
 hir = { path = "./crates/hir", version = "0.0.0" }
 hir-def = { path = "./crates/hir-def", version = "0.0.0" }
 hir-expand = { path = "./crates/hir-expand", version = "0.0.0" }
@@ -132,7 +132,7 @@
 pulldown-cmark-to-cmark = "10.0.4"
 pulldown-cmark = { version = "0.9.6", default-features = false }
 rayon = "1.10.0"
-rowan = "=0.15.15"
+rowan = "=0.15.17"
 # Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
 # on impls without it
 salsa = { version = "0.24.0", default-features = false, features = [
@@ -170,6 +170,7 @@
 triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
 url = "2.5.4"
 xshell = "0.2.7"
+thin-vec = "0.2.14"
 petgraph = { version = "0.8.2", default-features = false }
 
 # We need to freeze the version of the crate, as the raw-api feature is considered unstable
diff --git a/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs b/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs
new file mode 100644
index 0000000..e2791ff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs
@@ -0,0 +1,302 @@
+//! Defines [`EditionedFileId`], an interned wrapper around [`span::EditionedFileId`] that
+//! is interned (so queries can take it) and remembers its crate.
+
+use core::fmt;
+use std::hash::{Hash, Hasher};
+
+use span::Edition;
+use vfs::FileId;
+
+use crate::{Crate, RootQueryDb};
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct EditionedFileId(
+    salsa::Id,
+    std::marker::PhantomData<&'static salsa::plumbing::interned::Value<EditionedFileId>>,
+);
+
+const _: () = {
+    use salsa::plumbing as zalsa_;
+    use zalsa_::interned as zalsa_struct_;
+    type Configuration_ = EditionedFileId;
+
+    #[derive(Debug, Clone, PartialEq, Eq)]
+    pub struct EditionedFileIdData {
+        editioned_file_id: span::EditionedFileId,
+        krate: Crate,
+    }
+
+    /// We like to include the origin crate in an `EditionedFileId` (for use in the item tree),
+    /// but this poses us a problem.
+    ///
+    /// Spans contain `EditionedFileId`s, and we don't want to make them store the crate too
+    /// because that will increase their size, which will increase memory usage significantly.
+    /// Furthermore, things using spans do not generally need the crate: they are using the
+    /// file id for queries like `ast_id_map` or `parse`, which do not care about the crate.
+    ///
+    /// To solve this, we hash **only the `span::EditionedFileId`**, but on still compare
+    /// the crate in equality check. This preserves the invariant of `Hash` and `Eq` -
+    /// although same hashes can be used for different items, same file ids used for multiple
+    /// crates is a rare thing, and different items always have different hashes. Then,
+    /// when we only have a `span::EditionedFileId`, we use the `intern()` method to
+    /// reuse existing file ids, and create new one only if needed. See [`from_span_guess_origin`].
+    ///
+    /// See this for more info: https://rust-lang.zulipchat.com/#narrow/channel/185405-t-compiler.2Frust-analyzer/topic/Letting.20EditionedFileId.20know.20its.20crate/near/530189401
+    ///
+    /// [`from_span_guess_origin`]: EditionedFileId::from_span_guess_origin
+    #[derive(Hash, PartialEq, Eq)]
+    struct WithoutCrate {
+        editioned_file_id: span::EditionedFileId,
+    }
+
+    impl Hash for EditionedFileIdData {
+        #[inline]
+        fn hash<H: Hasher>(&self, state: &mut H) {
+            let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
+            editioned_file_id.hash(state);
+        }
+    }
+
+    impl zalsa_struct_::HashEqLike<WithoutCrate> for EditionedFileIdData {
+        #[inline]
+        fn hash<H: Hasher>(&self, state: &mut H) {
+            Hash::hash(self, state);
+        }
+
+        #[inline]
+        fn eq(&self, data: &WithoutCrate) -> bool {
+            let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
+            editioned_file_id == data.editioned_file_id
+        }
+    }
+
+    impl zalsa_::HasJar for EditionedFileId {
+        type Jar = zalsa_struct_::JarImpl<EditionedFileId>;
+        const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct;
+    }
+
+    zalsa_::register_jar! {
+        zalsa_::ErasedJar::erase::<EditionedFileId>()
+    }
+
+    impl zalsa_struct_::Configuration for EditionedFileId {
+        const LOCATION: salsa::plumbing::Location =
+            salsa::plumbing::Location { file: file!(), line: line!() };
+        const DEBUG_NAME: &'static str = "EditionedFileId";
+        const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
+        const PERSIST: bool = false;
+
+        type Fields<'a> = EditionedFileIdData;
+        type Struct<'db> = EditionedFileId;
+
+        fn serialize<S>(_: &Self::Fields<'_>, _: S) -> Result<S::Ok, S::Error>
+        where
+            S: zalsa_::serde::Serializer,
+        {
+            unimplemented!("attempted to serialize value that set `PERSIST` to false")
+        }
+
+        fn deserialize<'de, D>(_: D) -> Result<Self::Fields<'static>, D::Error>
+        where
+            D: zalsa_::serde::Deserializer<'de>,
+        {
+            unimplemented!("attempted to deserialize value that cannot set `PERSIST` to false");
+        }
+    }
+
+    impl Configuration_ {
+        pub fn ingredient(zalsa: &zalsa_::Zalsa) -> &zalsa_struct_::IngredientImpl<Self> {
+            static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<EditionedFileId>> =
+                zalsa_::IngredientCache::new();
+
+            // SAFETY: `lookup_jar_by_type` returns a valid ingredient index, and the only
+            // ingredient created by our jar is the struct ingredient.
+            unsafe {
+                CACHE.get_or_create(zalsa, || {
+                    zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>()
+                })
+            }
+        }
+    }
+
+    impl zalsa_::AsId for EditionedFileId {
+        fn as_id(&self) -> salsa::Id {
+            self.0.as_id()
+        }
+    }
+    impl zalsa_::FromId for EditionedFileId {
+        fn from_id(id: salsa::Id) -> Self {
+            Self(<salsa::Id>::from_id(id), std::marker::PhantomData)
+        }
+    }
+
+    unsafe impl Send for EditionedFileId {}
+    unsafe impl Sync for EditionedFileId {}
+
+    impl std::fmt::Debug for EditionedFileId {
+        fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+            Self::default_debug_fmt(*self, f)
+        }
+    }
+
+    impl zalsa_::SalsaStructInDb for EditionedFileId {
+        type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
+
+        fn lookup_ingredient_index(aux: &zalsa_::Zalsa) -> salsa::plumbing::IngredientIndices {
+            aux.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>().into()
+        }
+
+        fn entries(zalsa: &zalsa_::Zalsa) -> impl Iterator<Item = zalsa_::DatabaseKeyIndex> + '_ {
+            let _ingredient_index =
+                zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>();
+            <EditionedFileId>::ingredient(zalsa).entries(zalsa).map(|entry| entry.key())
+        }
+
+        #[inline]
+        fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option<Self> {
+            if type_id == std::any::TypeId::of::<EditionedFileId>() {
+                Some(<Self as salsa::plumbing::FromId>::from_id(id))
+            } else {
+                None
+            }
+        }
+
+        #[inline]
+        unsafe fn memo_table(
+            zalsa: &zalsa_::Zalsa,
+            id: zalsa_::Id,
+            current_revision: zalsa_::Revision,
+        ) -> zalsa_::MemoTableWithTypes<'_> {
+            // SAFETY: Guaranteed by caller.
+            unsafe {
+                zalsa.table().memos::<zalsa_struct_::Value<EditionedFileId>>(id, current_revision)
+            }
+        }
+    }
+
+    unsafe impl zalsa_::Update for EditionedFileId {
+        unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
+            if unsafe { *old_pointer } != new_value {
+                unsafe { *old_pointer = new_value };
+                true
+            } else {
+                false
+            }
+        }
+    }
+
+    impl EditionedFileId {
+        pub fn from_span(
+            db: &(impl salsa::Database + ?Sized),
+            editioned_file_id: span::EditionedFileId,
+            krate: Crate,
+        ) -> Self {
+            let (zalsa, zalsa_local) = db.zalsas();
+            Configuration_::ingredient(zalsa).intern(
+                zalsa,
+                zalsa_local,
+                EditionedFileIdData { editioned_file_id, krate },
+                |_, data| data,
+            )
+        }
+
+        /// Guesses the crate for the file.
+        ///
+        /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
+        ///
+        ///  1. The file is not in the module tree.
+        ///  2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
+        ///     (e.g. on enter feature, folding, etc.).
+        pub fn from_span_guess_origin(
+            db: &dyn RootQueryDb,
+            editioned_file_id: span::EditionedFileId,
+        ) -> Self {
+            let (zalsa, zalsa_local) = db.zalsas();
+            Configuration_::ingredient(zalsa).intern(
+                zalsa,
+                zalsa_local,
+                WithoutCrate { editioned_file_id },
+                |_, _| {
+                    // FileId not in the database.
+                    let krate = db
+                        .relevant_crates(editioned_file_id.file_id())
+                        .first()
+                        .copied()
+                        .or_else(|| db.all_crates().first().copied())
+                        .unwrap_or_else(|| {
+                            // What we're doing here is a bit fishy. We rely on the fact that we only need
+                            // the crate in the item tree, and we should not create an `EditionedFileId`
+                            // without a crate except in cases where it does not matter. The chances that
+                            // `all_crates()` will be empty are also very slim, but it can occur during startup.
+                            // In the very unlikely case that there is a bug and we'll use this crate, Salsa
+                            // will panic.
+
+                            // SAFETY: 0 is less than `Id::MAX_U32`.
+                            salsa::plumbing::FromId::from_id(unsafe { salsa::Id::from_index(0) })
+                        });
+                    EditionedFileIdData { editioned_file_id, krate }
+                },
+            )
+        }
+
+        pub fn editioned_file_id(self, db: &dyn salsa::Database) -> span::EditionedFileId {
+            let zalsa = db.zalsa();
+            let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
+            fields.editioned_file_id
+        }
+
+        pub fn krate(self, db: &dyn salsa::Database) -> Crate {
+            let zalsa = db.zalsa();
+            let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
+            fields.krate
+        }
+
+        /// Default debug formatting for this struct (may be useful if you define your own `Debug` impl)
+        pub fn default_debug_fmt(this: Self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+            zalsa_::with_attached_database(|db| {
+                let zalsa = db.zalsa();
+                let fields = Configuration_::ingredient(zalsa).fields(zalsa, this);
+                fmt::Debug::fmt(fields, f)
+            })
+            .unwrap_or_else(|| {
+                f.debug_tuple("EditionedFileId").field(&zalsa_::AsId::as_id(&this)).finish()
+            })
+        }
+    }
+};
+
+impl EditionedFileId {
+    #[inline]
+    pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition, krate: Crate) -> Self {
+        EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition), krate)
+    }
+
+    /// Attaches the current edition and guesses the crate for the file.
+    ///
+    /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
+    ///
+    ///  1. The file is not in the module tree.
+    ///  2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
+    ///     (e.g. on enter feature, folding, etc.).
+    #[inline]
+    pub fn current_edition_guess_origin(db: &dyn RootQueryDb, file_id: FileId) -> Self {
+        Self::from_span_guess_origin(db, span::EditionedFileId::current_edition(file_id))
+    }
+
+    #[inline]
+    pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
+        let id = self.editioned_file_id(db);
+        id.file_id()
+    }
+
+    #[inline]
+    pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
+        let id = self.editioned_file_id(db);
+        (id.file_id(), id.edition())
+    }
+
+    #[inline]
+    pub fn edition(self, db: &dyn salsa::Database) -> Edition {
+        self.editioned_file_id(db).edition()
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
index 5149d2d..1b41386 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/input.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -857,9 +857,10 @@ pub fn shrink_to_fit(&mut self) {
     }
 }
 
-impl BuiltCrateData {
-    pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
-        EditionedFileId::new(db, self.root_file_id, self.edition)
+impl Crate {
+    pub fn root_file_id(self, db: &dyn salsa::Database) -> EditionedFileId {
+        let data = self.data(db);
+        EditionedFileId::new(db, data.root_file_id, data.edition, self)
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
index 3629a00..9793892 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
@@ -5,6 +5,7 @@
 
 // FIXME: Rename this crate, base db is non descriptive
 mod change;
+mod editioned_file_id;
 mod input;
 pub mod target;
 
@@ -17,6 +18,7 @@
 
 pub use crate::{
     change::FileChange,
+    editioned_file_id::EditionedFileId,
     input::{
         BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
         CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
@@ -29,7 +31,6 @@
 use rustc_hash::FxHasher;
 use salsa::{Durability, Setter};
 pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
-use span::Edition;
 use syntax::{Parse, SyntaxError, ast};
 use triomphe::Arc;
 pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
@@ -175,42 +176,6 @@ pub fn set_file_source_root_with_durability(
     }
 }
 
-#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
-#[derive(PartialOrd, Ord)]
-pub struct EditionedFileId {
-    pub editioned_file_id: span::EditionedFileId,
-}
-
-impl EditionedFileId {
-    // Salsa already uses the name `new`...
-    #[inline]
-    pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
-        EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
-    }
-
-    #[inline]
-    pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
-        EditionedFileId::new(db, file_id, Edition::CURRENT)
-    }
-
-    #[inline]
-    pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
-        let id = self.editioned_file_id(db);
-        id.file_id()
-    }
-
-    #[inline]
-    pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
-        let id = self.editioned_file_id(db);
-        (id.file_id(), id.edition())
-    }
-
-    #[inline]
-    pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
-        self.editioned_file_id(db).edition()
-    }
-}
-
 #[salsa_macros::input(debug)]
 pub struct FileText {
     #[returns(ref)]
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
index e17969b..9e2a95d 100644
--- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -18,6 +18,7 @@
 
 # locals deps
 tt = { workspace = true, optional = true }
+syntax = { workspace = true, optional = true }
 intern.workspace = true
 
 [dev-dependencies]
diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
index 7a21015..76e0aba 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
@@ -63,6 +63,8 @@ fn from(atom: CfgAtom) -> Self {
 }
 
 impl CfgExpr {
+    // FIXME: Parsing from `tt` is only used in a handful of places, reconsider
+    // if we should switch them to AST.
     #[cfg(feature = "tt")]
     pub fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> CfgExpr {
         next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid)
@@ -73,6 +75,13 @@ pub fn parse_from_iter<S: Copy>(tt: &mut tt::iter::TtIter<'_, S>) -> CfgExpr {
         next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid)
     }
 
+    #[cfg(feature = "syntax")]
+    pub fn parse_from_ast(
+        ast: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
+    ) -> CfgExpr {
+        next_cfg_expr_from_ast(ast).unwrap_or(CfgExpr::Invalid)
+    }
+
     /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
     pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
         match self {
@@ -89,6 +98,56 @@ pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
     }
 }
 
+#[cfg(feature = "syntax")]
+fn next_cfg_expr_from_ast(
+    it: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
+) -> Option<CfgExpr> {
+    use intern::sym;
+    use syntax::{NodeOrToken, SyntaxKind, T, ast};
+
+    let name = match it.next() {
+        None => return None,
+        Some(NodeOrToken::Token(ident)) if ident.kind().is_any_identifier() => {
+            Symbol::intern(ident.text())
+        }
+        Some(_) => return Some(CfgExpr::Invalid),
+    };
+
+    let ret = match it.peek() {
+        Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
+            it.next();
+            if let Some(NodeOrToken::Token(literal)) = it.peek()
+                && matches!(literal.kind(), SyntaxKind::STRING)
+            {
+                let literal = tt::token_to_literal(literal.text(), ()).symbol;
+                it.next();
+                CfgAtom::KeyValue { key: name, value: literal.clone() }.into()
+            } else {
+                return Some(CfgExpr::Invalid);
+            }
+        }
+        Some(NodeOrToken::Node(subtree)) => {
+            let mut subtree_iter = ast::TokenTreeChildren::new(subtree).peekable();
+            it.next();
+            let mut subs = std::iter::from_fn(|| next_cfg_expr_from_ast(&mut subtree_iter));
+            match name {
+                s if s == sym::all => CfgExpr::All(subs.collect()),
+                s if s == sym::any => CfgExpr::Any(subs.collect()),
+                s if s == sym::not => {
+                    CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid)))
+                }
+                _ => CfgExpr::Invalid,
+            }
+        }
+        _ => CfgAtom::Flag(name).into(),
+    };
+
+    // Eat comma separator
+    while it.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
+
+    Some(ret)
+}
+
 #[cfg(feature = "tt")]
 fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> {
     use intern::sym;
diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
index 6766748..52c581d 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
@@ -1,7 +1,10 @@
 use arbitrary::{Arbitrary, Unstructured};
 use expect_test::{Expect, expect};
 use intern::Symbol;
-use syntax::{AstNode, Edition, ast};
+use syntax::{
+    AstNode, Edition,
+    ast::{self, TokenTreeChildren},
+};
 use syntax_bridge::{
     DocCommentDesugarMode,
     dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
@@ -10,24 +13,33 @@
 
 use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
 
+#[track_caller]
+fn parse_ast_cfg(tt: &ast::TokenTree) -> CfgExpr {
+    CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable())
+}
+
+#[track_caller]
 fn assert_parse_result(input: &str, expected: CfgExpr) {
     let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
-    let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+    let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
     let tt = syntax_node_to_token_tree(
-        tt.syntax(),
+        tt_ast.syntax(),
         DummyTestSpanMap,
         DUMMY,
         DocCommentDesugarMode::ProcMacro,
     );
     let cfg = CfgExpr::parse(&tt);
     assert_eq!(cfg, expected);
+    let cfg = parse_ast_cfg(&tt_ast);
+    assert_eq!(cfg, expected);
 }
 
+#[track_caller]
 fn check_dnf(input: &str, expect: Expect) {
     let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
-    let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+    let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
     let tt = syntax_node_to_token_tree(
-        tt.syntax(),
+        tt_ast.syntax(),
         DummyTestSpanMap,
         DUMMY,
         DocCommentDesugarMode::ProcMacro,
@@ -35,13 +47,17 @@ fn check_dnf(input: &str, expect: Expect) {
     let cfg = CfgExpr::parse(&tt);
     let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
     expect.assert_eq(&actual);
+    let cfg = parse_ast_cfg(&tt_ast);
+    let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
+    expect.assert_eq(&actual);
 }
 
+#[track_caller]
 fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
     let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
-    let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+    let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
     let tt = syntax_node_to_token_tree(
-        tt.syntax(),
+        tt_ast.syntax(),
         DummyTestSpanMap,
         DUMMY,
         DocCommentDesugarMode::ProcMacro,
@@ -50,14 +66,18 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
     let dnf = DnfExpr::new(&cfg);
     let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
     expect.assert_eq(&why_inactive);
+    let cfg = parse_ast_cfg(&tt_ast);
+    let dnf = DnfExpr::new(&cfg);
+    let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
+    expect.assert_eq(&why_inactive);
 }
 
 #[track_caller]
 fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
     let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
-    let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+    let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
     let tt = syntax_node_to_token_tree(
-        tt.syntax(),
+        tt_ast.syntax(),
         DummyTestSpanMap,
         DUMMY,
         DocCommentDesugarMode::ProcMacro,
@@ -66,6 +86,10 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
     let dnf = DnfExpr::new(&cfg);
     let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
     assert_eq!(hints, expected_hints);
+    let cfg = parse_ast_cfg(&tt_ast);
+    let dnf = DnfExpr::new(&cfg);
+    let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
+    assert_eq!(hints, expected_hints);
 }
 
 #[test]
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
index e174ca5..a9b51e3 100644
--- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -44,7 +44,8 @@
 cfg.workspace = true
 tt.workspace = true
 span.workspace = true
-thin-vec = "0.2.14"
+thin-vec.workspace = true
+syntax-bridge.workspace = true
 
 [dev-dependencies]
 expect-test.workspace = true
@@ -52,7 +53,6 @@
 # local deps
 test-utils.workspace = true
 test-fixture.workspace = true
-syntax-bridge.workspace = true
 
 [features]
 in-rust-tree = ["hir-expand/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
deleted file mode 100644
index b4fcfa1..0000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
+++ /dev/null
@@ -1,901 +0,0 @@
-//! A higher level attributes based on TokenTree, with also some shortcuts.
-
-use std::{borrow::Cow, convert::identity, hash::Hash, ops};
-
-use base_db::Crate;
-use cfg::{CfgExpr, CfgOptions};
-use either::Either;
-use hir_expand::{
-    HirFileId, InFile,
-    attrs::{Attr, AttrId, RawAttrs, collect_attrs},
-    span_map::SpanMapRef,
-};
-use intern::{Symbol, sym};
-use la_arena::{ArenaMap, Idx, RawIdx};
-use mbe::DelimiterKind;
-use rustc_abi::ReprOptions;
-use span::AstIdNode;
-use syntax::{
-    AstPtr,
-    ast::{self, HasAttrs},
-};
-use triomphe::Arc;
-use tt::iter::{TtElement, TtIter};
-
-use crate::{
-    AdtId, AstIdLoc, AttrDefId, GenericParamId, HasModule, LocalFieldId, Lookup, MacroId,
-    VariantId,
-    db::DefDatabase,
-    item_tree::block_item_tree_query,
-    lang_item::LangItem,
-    nameres::{ModuleOrigin, ModuleSource},
-    src::{HasChildSource, HasSource},
-};
-
-/// Desugared attributes of an item post `cfg_attr` expansion.
-#[derive(Default, Debug, Clone, PartialEq, Eq)]
-pub struct Attrs(RawAttrs);
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct AttrsWithOwner {
-    attrs: Attrs,
-    owner: AttrDefId,
-}
-
-impl Attrs {
-    pub fn new(
-        db: &dyn DefDatabase,
-        owner: &dyn ast::HasAttrs,
-        span_map: SpanMapRef<'_>,
-        cfg_options: &CfgOptions,
-    ) -> Self {
-        Attrs(RawAttrs::new_expanded(db, owner, span_map, cfg_options))
-    }
-
-    pub fn get(&self, id: AttrId) -> Option<&Attr> {
-        (**self).iter().find(|attr| attr.id == id)
-    }
-
-    pub(crate) fn expand_cfg_attr(
-        db: &dyn DefDatabase,
-        krate: Crate,
-        raw_attrs: RawAttrs,
-    ) -> Attrs {
-        Attrs(raw_attrs.expand_cfg_attr(db, krate))
-    }
-
-    pub(crate) fn is_cfg_enabled_for(
-        db: &dyn DefDatabase,
-        owner: &dyn ast::HasAttrs,
-        span_map: SpanMapRef<'_>,
-        cfg_options: &CfgOptions,
-    ) -> Result<(), CfgExpr> {
-        RawAttrs::attrs_iter_expanded::<false>(db, owner, span_map, cfg_options)
-            .filter_map(|attr| attr.cfg())
-            .find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) {
-                true => None,
-                false => Some(cfg),
-            })
-            .map_or(Ok(()), Err)
-    }
-}
-
-impl ops::Deref for Attrs {
-    type Target = [Attr];
-
-    fn deref(&self) -> &[Attr] {
-        &self.0
-    }
-}
-
-impl ops::Deref for AttrsWithOwner {
-    type Target = Attrs;
-
-    fn deref(&self) -> &Attrs {
-        &self.attrs
-    }
-}
-
-impl Attrs {
-    pub const EMPTY: Self = Self(RawAttrs::EMPTY);
-
-    pub(crate) fn fields_attrs_query(
-        db: &dyn DefDatabase,
-        v: VariantId,
-    ) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
-        let _p = tracing::info_span!("fields_attrs_query").entered();
-        let mut res = ArenaMap::default();
-        let (fields, file_id, krate) = match v {
-            VariantId::EnumVariantId(it) => {
-                let loc = it.lookup(db);
-                let krate = loc.parent.lookup(db).container.krate;
-                let source = loc.source(db);
-                (source.value.field_list(), source.file_id, krate)
-            }
-            VariantId::StructId(it) => {
-                let loc = it.lookup(db);
-                let krate = loc.container.krate;
-                let source = loc.source(db);
-                (source.value.field_list(), source.file_id, krate)
-            }
-            VariantId::UnionId(it) => {
-                let loc = it.lookup(db);
-                let krate = loc.container.krate;
-                let source = loc.source(db);
-                (
-                    source.value.record_field_list().map(ast::FieldList::RecordFieldList),
-                    source.file_id,
-                    krate,
-                )
-            }
-        };
-        let Some(fields) = fields else {
-            return Arc::new(res);
-        };
-
-        let cfg_options = krate.cfg_options(db);
-        let span_map = db.span_map(file_id);
-
-        match fields {
-            ast::FieldList::RecordFieldList(fields) => {
-                let mut idx = 0;
-                for field in fields.fields() {
-                    let attrs =
-                        Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
-                    if attrs.is_cfg_enabled(cfg_options).is_ok() {
-                        res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
-                        idx += 1;
-                    }
-                }
-            }
-            ast::FieldList::TupleFieldList(fields) => {
-                let mut idx = 0;
-                for field in fields.fields() {
-                    let attrs =
-                        Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
-                    if attrs.is_cfg_enabled(cfg_options).is_ok() {
-                        res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
-                        idx += 1;
-                    }
-                }
-            }
-        }
-
-        res.shrink_to_fit();
-        Arc::new(res)
-    }
-}
-
-impl Attrs {
-    #[inline]
-    pub fn by_key(&self, key: Symbol) -> AttrQuery<'_> {
-        AttrQuery { attrs: self, key }
-    }
-
-    #[inline]
-    pub fn rust_analyzer_tool(&self) -> impl Iterator<Item = &Attr> {
-        self.iter()
-            .filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer))
-    }
-
-    #[inline]
-    pub fn cfg(&self) -> Option<CfgExpr> {
-        let mut cfgs = self.by_key(sym::cfg).tt_values().map(CfgExpr::parse);
-        let first = cfgs.next()?;
-        match cfgs.next() {
-            Some(second) => {
-                let cfgs = [first, second].into_iter().chain(cfgs);
-                Some(CfgExpr::All(cfgs.collect()))
-            }
-            None => Some(first),
-        }
-    }
-
-    #[inline]
-    pub fn cfgs(&self) -> impl Iterator<Item = CfgExpr> + '_ {
-        self.by_key(sym::cfg).tt_values().map(CfgExpr::parse)
-    }
-
-    #[inline]
-    pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Result<(), CfgExpr> {
-        self.cfgs().try_for_each(|cfg| {
-            if cfg_options.check(&cfg) != Some(false) { Ok(()) } else { Err(cfg) }
-        })
-    }
-
-    #[inline]
-    pub fn lang(&self) -> Option<&Symbol> {
-        self.by_key(sym::lang).string_value()
-    }
-
-    #[inline]
-    pub fn lang_item(&self) -> Option<LangItem> {
-        self.by_key(sym::lang).string_value().and_then(LangItem::from_symbol)
-    }
-
-    #[inline]
-    pub fn has_doc_hidden(&self) -> bool {
-        self.by_key(sym::doc).tt_values().any(|tt| {
-            tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
-                matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden)
-        })
-    }
-
-    #[inline]
-    pub fn has_doc_notable_trait(&self) -> bool {
-        self.by_key(sym::doc).tt_values().any(|tt| {
-            tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
-                matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait)
-        })
-    }
-
-    #[inline]
-    pub fn doc_exprs(&self) -> impl Iterator<Item = DocExpr> + '_ {
-        self.by_key(sym::doc).tt_values().map(DocExpr::parse)
-    }
-
-    #[inline]
-    pub fn doc_aliases(&self) -> impl Iterator<Item = Symbol> + '_ {
-        self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
-    }
-
-    #[inline]
-    pub fn export_name(&self) -> Option<&Symbol> {
-        self.by_key(sym::export_name).string_value()
-    }
-
-    #[inline]
-    pub fn is_proc_macro(&self) -> bool {
-        self.by_key(sym::proc_macro).exists()
-    }
-
-    #[inline]
-    pub fn is_proc_macro_attribute(&self) -> bool {
-        self.by_key(sym::proc_macro_attribute).exists()
-    }
-
-    #[inline]
-    pub fn is_proc_macro_derive(&self) -> bool {
-        self.by_key(sym::proc_macro_derive).exists()
-    }
-
-    #[inline]
-    pub fn is_test(&self) -> bool {
-        self.iter().any(|it| {
-            it.path()
-                .segments()
-                .iter()
-                .rev()
-                .zip([sym::core, sym::prelude, sym::v1, sym::test].iter().rev())
-                .all(|it| it.0 == it.1)
-        })
-    }
-
-    #[inline]
-    pub fn is_ignore(&self) -> bool {
-        self.by_key(sym::ignore).exists()
-    }
-
-    #[inline]
-    pub fn is_bench(&self) -> bool {
-        self.by_key(sym::bench).exists()
-    }
-
-    #[inline]
-    pub fn is_unstable(&self) -> bool {
-        self.by_key(sym::unstable).exists()
-    }
-
-    #[inline]
-    pub fn rustc_legacy_const_generics(&self) -> Option<Box<Box<[u32]>>> {
-        self.by_key(sym::rustc_legacy_const_generics)
-            .tt_values()
-            .next()
-            .map(parse_rustc_legacy_const_generics)
-            .filter(|it| !it.is_empty())
-            .map(Box::new)
-    }
-
-    #[inline]
-    pub fn repr(&self) -> Option<ReprOptions> {
-        self.by_key(sym::repr).tt_values().filter_map(parse_repr_tt).fold(None, |acc, repr| {
-            acc.map_or(Some(repr), |mut acc| {
-                merge_repr(&mut acc, repr);
-                Some(acc)
-            })
-        })
-    }
-}
-
-fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> {
-    let mut indices = Vec::new();
-    let mut iter = tt.iter();
-    while let (Some(first), second) = (iter.next(), iter.next()) {
-        match first {
-            TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
-                Ok(index) => indices.push(index),
-                Err(_) => break,
-            },
-            _ => break,
-        }
-
-        if let Some(comma) = second {
-            match comma {
-                TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
-                _ => break,
-            }
-        }
-    }
-
-    indices.into_boxed_slice()
-}
-
-fn merge_repr(this: &mut ReprOptions, other: ReprOptions) {
-    let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this;
-    flags.insert(other.flags);
-    *align = (*align).max(other.align);
-    *pack = match (*pack, other.pack) {
-        (Some(pack), None) | (None, Some(pack)) => Some(pack),
-        _ => (*pack).min(other.pack),
-    };
-    if other.int.is_some() {
-        *int = other.int;
-    }
-}
-
-fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option<ReprOptions> {
-    use crate::builtin_type::{BuiltinInt, BuiltinUint};
-    use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
-
-    match tt.top_subtree().delimiter {
-        tt::Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
-        _ => return None,
-    }
-
-    let mut acc = ReprOptions::default();
-    let mut tts = tt.iter();
-    while let Some(tt) = tts.next() {
-        let TtElement::Leaf(tt::Leaf::Ident(ident)) = tt else {
-            continue;
-        };
-        let repr = match &ident.sym {
-            s if *s == sym::packed => {
-                let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
-                    tts.next();
-                    if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() {
-                        lit.symbol.as_str().parse().unwrap_or_default()
-                    } else {
-                        0
-                    }
-                } else {
-                    0
-                };
-                let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE));
-                ReprOptions { pack, ..Default::default() }
-            }
-            s if *s == sym::align => {
-                let mut align = None;
-                if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
-                    tts.next();
-                    if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next()
-                        && let Ok(a) = lit.symbol.as_str().parse()
-                    {
-                        align = Align::from_bytes(a).ok();
-                    }
-                }
-                ReprOptions { align, ..Default::default() }
-            }
-            s if *s == sym::C => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() },
-            s if *s == sym::transparent => {
-                ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() }
-            }
-            s if *s == sym::simd => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() },
-            repr => {
-                let mut int = None;
-                if let Some(builtin) = BuiltinInt::from_suffix_sym(repr)
-                    .map(Either::Left)
-                    .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right))
-                {
-                    int = Some(match builtin {
-                        Either::Left(bi) => match bi {
-                            BuiltinInt::Isize => IntegerType::Pointer(true),
-                            BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
-                            BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
-                            BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
-                            BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
-                            BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
-                        },
-                        Either::Right(bu) => match bu {
-                            BuiltinUint::Usize => IntegerType::Pointer(false),
-                            BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
-                            BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
-                            BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
-                            BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
-                            BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
-                        },
-                    });
-                }
-                ReprOptions { int, ..Default::default() }
-            }
-        };
-        merge_repr(&mut acc, repr);
-    }
-
-    Some(acc)
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum DocAtom {
-    /// eg. `#[doc(hidden)]`
-    Flag(Symbol),
-    /// eg. `#[doc(alias = "it")]`
-    ///
-    /// Note that a key can have multiple values that are all considered "active" at the same time.
-    /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
-    KeyValue { key: Symbol, value: Symbol },
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum DocExpr {
-    Invalid,
-    /// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]`
-    Atom(DocAtom),
-    /// eg. `#[doc(alias("x", "y"))]`
-    Alias(Vec<Symbol>),
-}
-
-impl From<DocAtom> for DocExpr {
-    fn from(atom: DocAtom) -> Self {
-        DocExpr::Atom(atom)
-    }
-}
-
-impl DocExpr {
-    fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> DocExpr {
-        next_doc_expr(tt.iter()).unwrap_or(DocExpr::Invalid)
-    }
-
-    pub fn aliases(&self) -> &[Symbol] {
-        match self {
-            DocExpr::Atom(DocAtom::KeyValue { key, value }) if *key == sym::alias => {
-                std::slice::from_ref(value)
-            }
-            DocExpr::Alias(aliases) => aliases,
-            _ => &[],
-        }
-    }
-}
-
-fn next_doc_expr<S: Copy>(mut it: TtIter<'_, S>) -> Option<DocExpr> {
-    let name = match it.next() {
-        None => return None,
-        Some(TtElement::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
-        Some(_) => return Some(DocExpr::Invalid),
-    };
-
-    // Peek
-    let ret = match it.peek() {
-        Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
-            it.next();
-            match it.next() {
-                Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
-                    symbol: text,
-                    kind: tt::LitKind::Str,
-                    ..
-                }))) => DocAtom::KeyValue { key: name, value: text.clone() }.into(),
-                _ => return Some(DocExpr::Invalid),
-            }
-        }
-        Some(TtElement::Subtree(_, subtree_iter)) => {
-            it.next();
-            let subs = parse_comma_sep(subtree_iter);
-            match &name {
-                s if *s == sym::alias => DocExpr::Alias(subs),
-                _ => DocExpr::Invalid,
-            }
-        }
-        _ => DocAtom::Flag(name).into(),
-    };
-    Some(ret)
-}
-
-fn parse_comma_sep<S>(iter: TtIter<'_, S>) -> Vec<Symbol> {
-    iter.filter_map(|tt| match tt {
-        TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
-            kind: tt::LitKind::Str, symbol, ..
-        })) => Some(symbol.clone()),
-        _ => None,
-    })
-    .collect()
-}
-
-impl AttrsWithOwner {
-    pub fn new(db: &dyn DefDatabase, owner: AttrDefId) -> Self {
-        Self { attrs: db.attrs(owner), owner }
-    }
-
-    pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
-        let _p = tracing::info_span!("attrs_query").entered();
-        // FIXME: this should use `Trace` to avoid duplication in `source_map` below
-        match def {
-            AttrDefId::ModuleId(module) => {
-                let def_map = module.def_map(db);
-                let mod_data = &def_map[module.local_id];
-
-                let raw_attrs = match mod_data.origin {
-                    ModuleOrigin::File { definition, declaration_tree_id, declaration, .. } => {
-                        let decl_attrs = declaration_tree_id
-                            .item_tree(db)
-                            .raw_attrs(declaration.upcast())
-                            .clone();
-                        let tree = db.file_item_tree(definition.into());
-                        let def_attrs = tree.top_level_raw_attrs().clone();
-                        decl_attrs.merge(def_attrs)
-                    }
-                    ModuleOrigin::CrateRoot { definition } => {
-                        let tree = db.file_item_tree(definition.into());
-                        tree.top_level_raw_attrs().clone()
-                    }
-                    ModuleOrigin::Inline { definition_tree_id, definition } => {
-                        definition_tree_id.item_tree(db).raw_attrs(definition.upcast()).clone()
-                    }
-                    ModuleOrigin::BlockExpr { id, .. } => {
-                        let tree = block_item_tree_query(db, id);
-                        tree.top_level_raw_attrs().clone()
-                    }
-                };
-                Attrs::expand_cfg_attr(db, module.krate, raw_attrs)
-            }
-            AttrDefId::FieldId(it) => db.fields_attrs(it.parent)[it.local_id].clone(),
-            AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
-            AttrDefId::AdtId(it) => match it {
-                AdtId::StructId(it) => attrs_from_ast_id_loc(db, it),
-                AdtId::EnumId(it) => attrs_from_ast_id_loc(db, it),
-                AdtId::UnionId(it) => attrs_from_ast_id_loc(db, it),
-            },
-            AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it),
-            AttrDefId::MacroId(it) => match it {
-                MacroId::Macro2Id(it) => attrs_from_ast_id_loc(db, it),
-                MacroId::MacroRulesId(it) => attrs_from_ast_id_loc(db, it),
-                MacroId::ProcMacroId(it) => attrs_from_ast_id_loc(db, it),
-            },
-            AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it),
-            AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it),
-            AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it),
-            AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it),
-            AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it),
-            AttrDefId::GenericParamId(it) => match it {
-                GenericParamId::ConstParamId(it) => {
-                    let src = it.parent().child_source(db);
-                    // FIXME: We should be never getting `None` here.
-                    Attrs(match src.value.get(it.local_id()) {
-                        Some(val) => RawAttrs::new_expanded(
-                            db,
-                            val,
-                            db.span_map(src.file_id).as_ref(),
-                            def.krate(db).cfg_options(db),
-                        ),
-                        None => RawAttrs::EMPTY,
-                    })
-                }
-                GenericParamId::TypeParamId(it) => {
-                    let src = it.parent().child_source(db);
-                    // FIXME: We should be never getting `None` here.
-                    Attrs(match src.value.get(it.local_id()) {
-                        Some(val) => RawAttrs::new_expanded(
-                            db,
-                            val,
-                            db.span_map(src.file_id).as_ref(),
-                            def.krate(db).cfg_options(db),
-                        ),
-                        None => RawAttrs::EMPTY,
-                    })
-                }
-                GenericParamId::LifetimeParamId(it) => {
-                    let src = it.parent.child_source(db);
-                    // FIXME: We should be never getting `None` here.
-                    Attrs(match src.value.get(it.local_id) {
-                        Some(val) => RawAttrs::new_expanded(
-                            db,
-                            val,
-                            db.span_map(src.file_id).as_ref(),
-                            def.krate(db).cfg_options(db),
-                        ),
-                        None => RawAttrs::EMPTY,
-                    })
-                }
-            },
-            AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it),
-            AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
-            AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
-        }
-    }
-
-    pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap {
-        let owner = match self.owner {
-            AttrDefId::ModuleId(module) => {
-                // Modules can have 2 attribute owners (the `mod x;` item, and the module file itself).
-
-                let def_map = module.def_map(db);
-                let mod_data = &def_map[module.local_id];
-                match mod_data.declaration_source(db) {
-                    Some(it) => {
-                        let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value));
-                        if let InFile { file_id, value: ModuleSource::SourceFile(file) } =
-                            mod_data.definition_source(db)
-                        {
-                            map.append_module_inline_attrs(AttrSourceMap::new(InFile::new(
-                                file_id, &file,
-                            )));
-                        }
-                        return map;
-                    }
-                    None => {
-                        let InFile { file_id, value } = mod_data.definition_source(db);
-                        let attrs_owner = match &value {
-                            ModuleSource::SourceFile(file) => file as &dyn ast::HasAttrs,
-                            ModuleSource::Module(module) => module as &dyn ast::HasAttrs,
-                            ModuleSource::BlockExpr(block) => block as &dyn ast::HasAttrs,
-                        };
-                        return AttrSourceMap::new(InFile::new(file_id, attrs_owner));
-                    }
-                }
-            }
-            AttrDefId::FieldId(id) => {
-                let map = db.fields_attrs_source_map(id.parent);
-                let file_id = id.parent.file_id(db);
-                let root = db.parse_or_expand(file_id);
-                let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root));
-                InFile::new(file_id, owner)
-            }
-            AttrDefId::AdtId(adt) => match adt {
-                AdtId::StructId(id) => any_has_attrs(db, id),
-                AdtId::UnionId(id) => any_has_attrs(db, id),
-                AdtId::EnumId(id) => any_has_attrs(db, id),
-            },
-            AttrDefId::FunctionId(id) => any_has_attrs(db, id),
-            AttrDefId::EnumVariantId(id) => any_has_attrs(db, id),
-            AttrDefId::StaticId(id) => any_has_attrs(db, id),
-            AttrDefId::ConstId(id) => any_has_attrs(db, id),
-            AttrDefId::TraitId(id) => any_has_attrs(db, id),
-            AttrDefId::TypeAliasId(id) => any_has_attrs(db, id),
-            AttrDefId::MacroId(id) => match id {
-                MacroId::Macro2Id(id) => any_has_attrs(db, id),
-                MacroId::MacroRulesId(id) => any_has_attrs(db, id),
-                MacroId::ProcMacroId(id) => any_has_attrs(db, id),
-            },
-            AttrDefId::ImplId(id) => any_has_attrs(db, id),
-            AttrDefId::GenericParamId(id) => match id {
-                GenericParamId::ConstParamId(id) => id
-                    .parent()
-                    .child_source(db)
-                    .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
-                GenericParamId::TypeParamId(id) => id
-                    .parent()
-                    .child_source(db)
-                    .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
-                GenericParamId::LifetimeParamId(id) => id
-                    .parent
-                    .child_source(db)
-                    .map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
-            },
-            AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
-            AttrDefId::ExternCrateId(id) => any_has_attrs(db, id),
-            AttrDefId::UseId(id) => any_has_attrs(db, id),
-        };
-
-        AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
-    }
-}
-
-#[derive(Debug)]
-pub struct AttrSourceMap {
-    source: Vec<Either<ast::Attr, ast::Comment>>,
-    file_id: HirFileId,
-    /// If this map is for a module, this will be the [`HirFileId`] of the module's definition site,
-    /// while `file_id` will be the one of the module declaration site.
-    /// The usize is the index into `source` from which point on the entries reside in the def site
-    /// file.
-    mod_def_site_file_id: Option<(HirFileId, usize)>,
-}
-
-impl AttrSourceMap {
-    fn new(owner: InFile<&dyn ast::HasAttrs>) -> Self {
-        Self {
-            source: collect_attrs(owner.value).map(|(_, it)| it).collect(),
-            file_id: owner.file_id,
-            mod_def_site_file_id: None,
-        }
-    }
-
-    /// Append a second source map to this one, this is required for modules, whose outline and inline
-    /// attributes can reside in different files
-    fn append_module_inline_attrs(&mut self, other: Self) {
-        assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none());
-        let len = self.source.len();
-        self.source.extend(other.source);
-        if other.file_id != self.file_id {
-            self.mod_def_site_file_id = Some((other.file_id, len));
-        }
-    }
-
-    /// Maps the lowered `Attr` back to its original syntax node.
-    ///
-    /// `attr` must come from the `owner` used for AttrSourceMap
-    ///
-    /// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of
-    /// the attribute represented by `Attr`.
-    pub fn source_of(&self, attr: &Attr) -> InFile<&Either<ast::Attr, ast::Comment>> {
-        self.source_of_id(attr.id)
-    }
-
-    pub fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
-        let ast_idx = id.ast_index();
-        let file_id = match self.mod_def_site_file_id {
-            Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
-            _ => self.file_id,
-        };
-
-        self.source
-            .get(ast_idx)
-            .map(|it| InFile::new(file_id, it))
-            .unwrap_or_else(|| panic!("cannot find attr at index {id:?}"))
-    }
-}
-
-#[derive(Debug, Clone)]
-pub struct AttrQuery<'attr> {
-    attrs: &'attr Attrs,
-    key: Symbol,
-}
-
-impl<'attr> AttrQuery<'attr> {
-    #[inline]
-    pub fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
-        self.attrs().filter_map(|attr| attr.token_tree_value())
-    }
-
-    #[inline]
-    pub fn string_value(self) -> Option<&'attr Symbol> {
-        self.attrs().find_map(|attr| attr.string_value())
-    }
-
-    #[inline]
-    pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
-        self.attrs().find_map(|attr| attr.string_value_with_span())
-    }
-
-    #[inline]
-    pub fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
-        self.attrs().find_map(|attr| attr.string_value_unescape())
-    }
-
-    #[inline]
-    pub fn exists(self) -> bool {
-        self.attrs().next().is_some()
-    }
-
-    #[inline]
-    pub fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
-        let key = self.key;
-        self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
-    }
-
-    /// Find string value for a specific key inside token tree
-    ///
-    /// ```ignore
-    /// #[doc(html_root_url = "url")]
-    ///       ^^^^^^^^^^^^^ key
-    /// ```
-    #[inline]
-    pub fn find_string_value_in_tt(self, key: Symbol) -> Option<&'attr str> {
-        self.tt_values().find_map(|tt| {
-            let name = tt.iter()
-                .skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == key))
-                .nth(2);
-
-            match name {
-                Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal{  symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()),
-                _ => None
-            }
-        })
-    }
-}
-
-fn any_has_attrs<'db>(
-    db: &(dyn DefDatabase + 'db),
-    id: impl Lookup<Database = dyn DefDatabase, Data = impl HasSource<Value = impl ast::HasAttrs>>,
-) -> InFile<ast::AnyHasAttrs> {
-    id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
-}
-
-fn attrs_from_ast_id_loc<'db, N: AstIdNode + HasAttrs>(
-    db: &(dyn DefDatabase + 'db),
-    lookup: impl Lookup<Database = dyn DefDatabase, Data = impl AstIdLoc<Ast = N> + HasModule>,
-) -> Attrs {
-    let loc = lookup.lookup(db);
-    let source = loc.source(db);
-    let span_map = db.span_map(source.file_id);
-    let cfg_options = loc.krate(db).cfg_options(db);
-    Attrs(RawAttrs::new_expanded(db, &source.value, span_map.as_ref(), cfg_options))
-}
-
-pub(crate) fn fields_attrs_source_map(
-    db: &dyn DefDatabase,
-    def: VariantId,
-) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>> {
-    let mut res = ArenaMap::default();
-    let child_source = def.child_source(db);
-
-    for (idx, variant) in child_source.value.iter() {
-        res.insert(
-            idx,
-            variant
-                .as_ref()
-                .either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()),
-        );
-    }
-
-    Arc::new(res)
-}
-
-#[cfg(test)]
-mod tests {
-    //! This module contains tests for doc-expression parsing.
-    //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
-
-    use intern::Symbol;
-    use span::EditionedFileId;
-    use triomphe::Arc;
-
-    use hir_expand::span_map::{RealSpanMap, SpanMap};
-    use span::FileId;
-    use syntax::{AstNode, TextRange, ast};
-    use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
-
-    use crate::attr::{DocAtom, DocExpr};
-
-    fn assert_parse_result(input: &str, expected: DocExpr) {
-        let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
-        let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
-        let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(
-            EditionedFileId::current_edition(FileId::from_raw(0)),
-        )));
-        let tt = syntax_node_to_token_tree(
-            tt.syntax(),
-            map.as_ref(),
-            map.span_for_range(TextRange::empty(0.into())),
-            DocCommentDesugarMode::ProcMacro,
-        );
-        let cfg = DocExpr::parse(&tt);
-        assert_eq!(cfg, expected);
-    }
-
-    #[test]
-    fn test_doc_expr_parser() {
-        assert_parse_result("#![doc(hidden)]", DocAtom::Flag(Symbol::intern("hidden")).into());
-
-        assert_parse_result(
-            r#"#![doc(alias = "foo")]"#,
-            DocAtom::KeyValue { key: Symbol::intern("alias"), value: Symbol::intern("foo") }.into(),
-        );
-
-        assert_parse_result(
-            r#"#![doc(alias("foo"))]"#,
-            DocExpr::Alias([Symbol::intern("foo")].into()),
-        );
-        assert_parse_result(
-            r#"#![doc(alias("foo", "bar", "baz"))]"#,
-            DocExpr::Alias(
-                [Symbol::intern("foo"), Symbol::intern("bar"), Symbol::intern("baz")].into(),
-            ),
-        );
-
-        assert_parse_result(
-            r#"
-        #[doc(alias("Bar", "Qux"))]
-        struct Foo;"#,
-            DocExpr::Alias([Symbol::intern("Bar"), Symbol::intern("Qux")].into()),
-        );
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs
new file mode 100644
index 0000000..ab36b70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs
@@ -0,0 +1,1618 @@
+//! Attributes for anything that is not name resolution.
+//!
+//! The fundamental idea of this module stems from the observation that most "interesting"
+//! attributes have a more memory-compact form than storing their full syntax, and
+//! that most of the attributes are flags, and those that are not are rare. Therefore,
+//! this module defines [`AttrFlags`], which is a bitflag enum that contains only a yes/no
+//! answer to whether an attribute is present on an item. For most attributes, that's all
+//! that is interesting us; for the rest of them, we define another query that extracts
+//! their data. A key part is that every one of those queries will have a wrapper method
+//! that queries (or is given) the `AttrFlags` and checks for the presence of the attribute;
+//! if it is not present, we do not call the query, to prevent Salsa from needing to record
+//! its value. This way, queries are only called on items that have the attribute, which is
+//! usually only a few.
+//!
+//! An exception to this model that is also defined in this module is documentation (doc
+//! comments and `#[doc = "..."]` attributes). But it also has a more compact form than
+//! the attribute: a concatenated string of the full docs as well as a source map
+//! to map it back to AST (which is needed for things like resolving links in doc comments
+//! and highlight injection). The lowering and upmapping of doc comments is a bit complicated,
+//! but it is encapsulated in the [`Docs`] struct.
+
+use std::{
+    convert::Infallible,
+    iter::Peekable,
+    ops::{ControlFlow, Range},
+};
+
+use base_db::Crate;
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{
+    HirFileId, InFile, Lookup,
+    attrs::{Meta, expand_cfg_attr, expand_cfg_attr_with_doc_comments},
+};
+use intern::Symbol;
+use itertools::Itertools;
+use la_arena::ArenaMap;
+use rustc_abi::ReprOptions;
+use rustc_hash::FxHashSet;
+use smallvec::SmallVec;
+use syntax::{
+    AstNode, AstToken, NodeOrToken, SmolStr, SyntaxNode, SyntaxToken, T,
+    ast::{self, AttrDocCommentIter, HasAttrs, IsString, TokenTreeChildren},
+};
+use tt::{TextRange, TextSize};
+
+use crate::{
+    AdtId, AstIdLoc, AttrDefId, FieldId, FunctionId, GenericDefId, HasModule, InternedModuleId,
+    LifetimeParamId, LocalFieldId, MacroId, TypeOrConstParamId, VariantId,
+    db::DefDatabase,
+    hir::generics::{GenericParams, LocalLifetimeParamId, LocalTypeOrConstParamId},
+    nameres::ModuleOrigin,
+    src::{HasChildSource, HasSource},
+};
+
+#[inline]
+fn attrs_from_ast_id_loc<N: AstNode + Into<ast::AnyHasAttrs>>(
+    db: &dyn DefDatabase,
+    lookup: impl Lookup<Database = dyn DefDatabase, Data = impl AstIdLoc<Ast = N> + HasModule>,
+) -> (InFile<ast::AnyHasAttrs>, Crate) {
+    let loc = lookup.lookup(db);
+    let source = loc.source(db);
+    let krate = loc.krate(db);
+    (source.map(|it| it.into()), krate)
+}
+
+#[inline]
+fn extract_doc_tt_attr(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
+    for atom in DocAtom::parse(tt) {
+        match atom {
+            DocAtom::Flag(flag) => match &*flag {
+                "notable_trait" => attr_flags.insert(AttrFlags::IS_DOC_NOTABLE_TRAIT),
+                "hidden" => attr_flags.insert(AttrFlags::IS_DOC_HIDDEN),
+                _ => {}
+            },
+            DocAtom::KeyValue { key, value: _ } => match &*key {
+                "alias" => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES),
+                "keyword" => attr_flags.insert(AttrFlags::HAS_DOC_KEYWORD),
+                _ => {}
+            },
+            DocAtom::Alias(_) => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES),
+        }
+    }
+}
+
+fn extract_ra_completions(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
+    let tt = TokenTreeChildren::new(&tt);
+    if let Ok(NodeOrToken::Token(option)) = Itertools::exactly_one(tt)
+        && option.kind().is_any_identifier()
+    {
+        match option.text() {
+            "ignore_flyimport" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT),
+            "ignore_methods" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_METHODS),
+            "ignore_flyimport_methods" => {
+                attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS)
+            }
+            _ => {}
+        }
+    }
+}
+
+fn extract_rustc_skip_during_method_dispatch(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
+    let iter = TokenTreeChildren::new(&tt);
+    for kind in iter {
+        if let NodeOrToken::Token(kind) = kind
+            && kind.kind().is_any_identifier()
+        {
+            match kind.text() {
+                "array" => attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH),
+                "boxed_slice" => {
+                    attr_flags.insert(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH)
+                }
+                _ => {}
+            }
+        }
+    }
+}
+
+#[inline]
+fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow<Infallible> {
+    match attr {
+        Meta::NamedKeyValue { name: Some(name), value, .. } => match name.text() {
+            "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
+            "lang" => attr_flags.insert(AttrFlags::LANG_ITEM),
+            "path" => attr_flags.insert(AttrFlags::HAS_PATH),
+            "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
+            "export_name" => {
+                if let Some(value) = value
+                    && let Some(value) = ast::String::cast(value)
+                    && let Ok(value) = value.value()
+                    && *value == *"main"
+                {
+                    attr_flags.insert(AttrFlags::IS_EXPORT_NAME_MAIN);
+                }
+            }
+            _ => {}
+        },
+        Meta::TokenTree { path, tt } => match path.segments.len() {
+            1 => match path.segments[0].text() {
+                "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
+                "cfg" => attr_flags.insert(AttrFlags::HAS_CFG),
+                "doc" => extract_doc_tt_attr(attr_flags, tt),
+                "repr" => attr_flags.insert(AttrFlags::HAS_REPR),
+                "target_feature" => attr_flags.insert(AttrFlags::HAS_TARGET_FEATURE),
+                "proc_macro_derive" | "rustc_builtin_macro" => {
+                    attr_flags.insert(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO)
+                }
+                "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
+                "rustc_layout_scalar_valid_range_start" | "rustc_layout_scalar_valid_range_end" => {
+                    attr_flags.insert(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE)
+                }
+                "rustc_legacy_const_generics" => {
+                    attr_flags.insert(AttrFlags::HAS_LEGACY_CONST_GENERICS)
+                }
+                "rustc_skip_during_method_dispatch" => {
+                    extract_rustc_skip_during_method_dispatch(attr_flags, tt)
+                }
+                _ => {}
+            },
+            2 => match path.segments[0].text() {
+                "rust_analyzer" => match path.segments[1].text() {
+                    "completions" => extract_ra_completions(attr_flags, tt),
+                    _ => {}
+                },
+                _ => {}
+            },
+            _ => {}
+        },
+        Meta::Path { path } => {
+            match path.segments.len() {
+                1 => match path.segments[0].text() {
+                    "rustc_has_incoherent_inherent_impls" => {
+                        attr_flags.insert(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
+                    }
+                    "rustc_allow_incoherent_impl" => {
+                        attr_flags.insert(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
+                    }
+                    "fundamental" => attr_flags.insert(AttrFlags::FUNDAMENTAL),
+                    "no_std" => attr_flags.insert(AttrFlags::IS_NO_STD),
+                    "may_dangle" => attr_flags.insert(AttrFlags::MAY_DANGLE),
+                    "rustc_paren_sugar" => attr_flags.insert(AttrFlags::RUSTC_PAREN_SUGAR),
+                    "rustc_coinductive" => attr_flags.insert(AttrFlags::RUSTC_COINDUCTIVE),
+                    "rustc_force_inline" => attr_flags.insert(AttrFlags::RUSTC_FORCE_INLINE),
+                    "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
+                    "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
+                    "macro_export" => attr_flags.insert(AttrFlags::IS_MACRO_EXPORT),
+                    "no_mangle" => attr_flags.insert(AttrFlags::NO_MANGLE),
+                    "non_exhaustive" => attr_flags.insert(AttrFlags::NON_EXHAUSTIVE),
+                    "ignore" => attr_flags.insert(AttrFlags::IS_IGNORE),
+                    "bench" => attr_flags.insert(AttrFlags::IS_BENCH),
+                    "rustc_const_panic_str" => attr_flags.insert(AttrFlags::RUSTC_CONST_PANIC_STR),
+                    "rustc_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_INTRINSIC),
+                    "rustc_safe_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_SAFE_INTRINSIC),
+                    "rustc_intrinsic_must_be_overridden" => {
+                        attr_flags.insert(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN)
+                    }
+                    "rustc_allocator" => attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR),
+                    "rustc_deallocator" => attr_flags.insert(AttrFlags::RUSTC_DEALLOCATOR),
+                    "rustc_reallocator" => attr_flags.insert(AttrFlags::RUSTC_REALLOCATOR),
+                    "rustc_allocator_zeroed" => {
+                        attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR_ZEROED)
+                    }
+                    "rustc_reservation_impl" => {
+                        attr_flags.insert(AttrFlags::RUSTC_RESERVATION_IMPL)
+                    }
+                    "rustc_deprecated_safe_2024" => {
+                        attr_flags.insert(AttrFlags::RUSTC_DEPRECATED_SAFE_2024)
+                    }
+                    "rustc_skip_array_during_method_dispatch" => {
+                        attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH)
+                    }
+                    _ => {}
+                },
+                2 => match path.segments[0].text() {
+                    "rust_analyzer" => match path.segments[1].text() {
+                        "skip" => attr_flags.insert(AttrFlags::RUST_ANALYZER_SKIP),
+                        _ => {}
+                    },
+                    _ => {}
+                },
+                _ => {}
+            }
+
+            if path.is_test {
+                attr_flags.insert(AttrFlags::IS_TEST);
+            }
+        }
+        _ => {}
+    };
+    ControlFlow::Continue(())
+}
+
+bitflags::bitflags! {
+    #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+    pub struct AttrFlags: u64 {
+        const RUST_ANALYZER_SKIP = 1 << 0;
+
+        const LANG_ITEM = 1 << 1;
+
+        const HAS_DOC_ALIASES = 1 << 2;
+        const HAS_DOC_KEYWORD = 1 << 3;
+        const IS_DOC_NOTABLE_TRAIT = 1 << 4;
+        const IS_DOC_HIDDEN = 1 << 5;
+
+        const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 6;
+        const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7;
+        const FUNDAMENTAL = 1 << 8;
+        const RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 9;
+        const RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 10;
+        const HAS_REPR = 1 << 11;
+        const HAS_TARGET_FEATURE = 1 << 12;
+        const RUSTC_DEPRECATED_SAFE_2024 = 1 << 13;
+        const HAS_LEGACY_CONST_GENERICS = 1 << 14;
+        const NO_MANGLE = 1 << 15;
+        const NON_EXHAUSTIVE = 1 << 16;
+        const RUSTC_RESERVATION_IMPL = 1 << 17;
+        const RUSTC_CONST_PANIC_STR = 1 << 18;
+        const MAY_DANGLE = 1 << 19;
+
+        const RUSTC_INTRINSIC = 1 << 20;
+        const RUSTC_SAFE_INTRINSIC = 1 << 21;
+        const RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN = 1 << 22;
+        const RUSTC_ALLOCATOR = 1 << 23;
+        const RUSTC_DEALLOCATOR = 1 << 24;
+        const RUSTC_REALLOCATOR = 1 << 25;
+        const RUSTC_ALLOCATOR_ZEROED = 1 << 26;
+
+        const IS_UNSTABLE = 1 << 27;
+        const IS_IGNORE = 1 << 28;
+        // FIXME: `IS_TEST` and `IS_BENCH` should be based on semantic information, not textual match.
+        const IS_BENCH = 1 << 29;
+        const IS_TEST = 1 << 30;
+        const IS_EXPORT_NAME_MAIN = 1 << 31;
+        const IS_MACRO_EXPORT = 1 << 32;
+        const IS_NO_STD = 1 << 33;
+        const IS_DERIVE_OR_BUILTIN_MACRO = 1 << 34;
+        const IS_DEPRECATED = 1 << 35;
+        const HAS_PATH = 1 << 36;
+        const HAS_CFG = 1 << 37;
+
+        const COMPLETE_IGNORE_FLYIMPORT = 1 << 38;
+        const COMPLETE_IGNORE_FLYIMPORT_METHODS = 1 << 39;
+        const COMPLETE_IGNORE_METHODS = 1 << 40;
+
+        const RUSTC_LAYOUT_SCALAR_VALID_RANGE = 1 << 41;
+        const RUSTC_PAREN_SUGAR = 1 << 42;
+        const RUSTC_COINDUCTIVE = 1 << 43;
+        const RUSTC_FORCE_INLINE = 1 << 44;
+    }
+}
+
+fn attrs_source(
+    db: &dyn DefDatabase,
+    owner: AttrDefId,
+) -> (InFile<ast::AnyHasAttrs>, Option<InFile<ast::Module>>, Crate) {
+    let (owner, krate) = match owner {
+        AttrDefId::ModuleId(id) => {
+            let id = id.loc(db);
+            let def_map = id.def_map(db);
+            let (definition, declaration) = match def_map[id.local_id].origin {
+                ModuleOrigin::CrateRoot { definition } => {
+                    let file = db.parse(definition).tree();
+                    (InFile::new(definition.into(), ast::AnyHasAttrs::from(file)), None)
+                }
+                ModuleOrigin::File { declaration, declaration_tree_id, definition, .. } => {
+                    let declaration = InFile::new(declaration_tree_id.file_id(), declaration);
+                    let declaration = declaration.with_value(declaration.to_node(db));
+                    let definition_source = db.parse(definition).tree();
+                    (InFile::new(definition.into(), definition_source.into()), Some(declaration))
+                }
+                ModuleOrigin::Inline { definition_tree_id, definition } => {
+                    let definition = InFile::new(definition_tree_id.file_id(), definition);
+                    let definition = definition.with_value(definition.to_node(db).into());
+                    (definition, None)
+                }
+                ModuleOrigin::BlockExpr { block, .. } => {
+                    let definition = block.to_node(db);
+                    (block.with_value(definition.into()), None)
+                }
+            };
+            return (definition, declaration, id.krate);
+        }
+        AttrDefId::AdtId(AdtId::StructId(it)) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::AdtId(AdtId::UnionId(it)) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::AdtId(AdtId::EnumId(it)) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::MacroId(MacroId::MacroRulesId(it)) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::MacroId(MacroId::Macro2Id(it)) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::MacroId(MacroId::ProcMacroId(it)) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
+        AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
+    };
+    (owner, None, krate)
+}
+
+fn collect_attrs<BreakValue>(
+    db: &dyn DefDatabase,
+    owner: AttrDefId,
+    mut callback: impl FnMut(Meta) -> ControlFlow<BreakValue>,
+) -> Option<BreakValue> {
+    let (source, outer_mod_decl, krate) = attrs_source(db, owner);
+
+    let mut cfg_options = None;
+    expand_cfg_attr(
+        outer_mod_decl
+            .into_iter()
+            .flat_map(|it| it.value.attrs())
+            .chain(ast::attrs_including_inner(&source.value)),
+        || cfg_options.get_or_insert_with(|| krate.cfg_options(db)),
+        move |meta, _, _, _| callback(meta),
+    )
+}
+
+fn collect_field_attrs<T>(
+    db: &dyn DefDatabase,
+    variant: VariantId,
+    mut field_attrs: impl FnMut(&CfgOptions, InFile<ast::AnyHasAttrs>) -> T,
+) -> ArenaMap<LocalFieldId, T> {
+    let (variant_syntax, krate) = match variant {
+        VariantId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
+        VariantId::StructId(it) => attrs_from_ast_id_loc(db, it),
+        VariantId::UnionId(it) => attrs_from_ast_id_loc(db, it),
+    };
+    let cfg_options = krate.cfg_options(db);
+    let variant_syntax = variant_syntax
+        .with_value(ast::VariantDef::cast(variant_syntax.value.syntax().clone()).unwrap());
+    let fields = match &variant_syntax.value {
+        ast::VariantDef::Struct(it) => it.field_list(),
+        ast::VariantDef::Union(it) => it.record_field_list().map(ast::FieldList::RecordFieldList),
+        ast::VariantDef::Variant(it) => it.field_list(),
+    };
+    let Some(fields) = fields else {
+        return ArenaMap::new();
+    };
+
+    let mut result = ArenaMap::new();
+    let mut idx = 0;
+    match fields {
+        ast::FieldList::RecordFieldList(fields) => {
+            for field in fields.fields() {
+                if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() {
+                    result.insert(
+                        la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)),
+                        field_attrs(cfg_options, variant_syntax.with_value(field.into())),
+                    );
+                    idx += 1;
+                }
+            }
+        }
+        ast::FieldList::TupleFieldList(fields) => {
+            for field in fields.fields() {
+                if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() {
+                    result.insert(
+                        la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)),
+                        field_attrs(cfg_options, variant_syntax.with_value(field.into())),
+                    );
+                    idx += 1;
+                }
+            }
+        }
+    }
+    result.shrink_to_fit();
+    result
+}
+
+#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct RustcLayoutScalarValidRange {
+    pub start: Option<u128>,
+    pub end: Option<u128>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+struct DocsSourceMapLine {
+    /// The offset in [`Docs::docs`].
+    string_offset: TextSize,
+    /// The offset in the AST of the text.
+    ast_offset: TextSize,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Docs {
+    /// The concatenated string of all `#[doc = "..."]` attributes and documentation comments.
+    docs: String,
+    /// A sorted map from an offset in `docs` to an offset in the source code.
+    docs_source_map: Vec<DocsSourceMapLine>,
+    /// If the item is an outlined module (`mod foo;`), `docs_source_map` store the concatenated
+    /// list of the outline and inline docs (outline first). Then, this field contains the [`HirFileId`]
+    /// of the outline declaration, and the index in `docs` from which the inline docs
+    /// begin.
+    outline_mod: Option<(HirFileId, usize)>,
+    inline_file: HirFileId,
+    /// The size the prepended prefix, which does not map to real doc comments.
+    prefix_len: TextSize,
+    /// The offset in `docs` from which the docs are inner attributes/comments.
+    inline_inner_docs_start: Option<TextSize>,
+    /// Like `inline_inner_docs_start`, but for `outline_mod`. This can happen only when merging `Docs`
+    /// (as outline modules don't have inner attributes).
+    outline_inner_docs_start: Option<TextSize>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum IsInnerDoc {
+    No,
+    Yes,
+}
+
+impl IsInnerDoc {
+    #[inline]
+    pub fn yes(self) -> bool {
+        self == IsInnerDoc::Yes
+    }
+}
+
+impl Docs {
+    #[inline]
+    pub fn docs(&self) -> &str {
+        &self.docs
+    }
+
+    #[inline]
+    pub fn into_docs(self) -> String {
+        self.docs
+    }
+
+    pub fn find_ast_range(
+        &self,
+        mut string_range: TextRange,
+    ) -> Option<(InFile<TextRange>, IsInnerDoc)> {
+        if string_range.start() < self.prefix_len {
+            return None;
+        }
+        string_range -= self.prefix_len;
+
+        let mut file = self.inline_file;
+        let mut inner_docs_start = self.inline_inner_docs_start;
+        // Check whether the range is from the outline, the inline, or both.
+        let source_map = if let Some((outline_mod_file, outline_mod_end)) = self.outline_mod {
+            if let Some(first_inline) = self.docs_source_map.get(outline_mod_end) {
+                if string_range.end() <= first_inline.string_offset {
+                    // The range is completely in the outline.
+                    file = outline_mod_file;
+                    inner_docs_start = self.outline_inner_docs_start;
+                    &self.docs_source_map[..outline_mod_end]
+                } else if string_range.start() >= first_inline.string_offset {
+                    // The range is completely in the inline.
+                    &self.docs_source_map[outline_mod_end..]
+                } else {
+                    // The range is combined from the outline and the inline - cannot map it back.
+                    return None;
+                }
+            } else {
+                // There is no inline.
+                file = outline_mod_file;
+                inner_docs_start = self.outline_inner_docs_start;
+                &self.docs_source_map
+            }
+        } else {
+            // There is no outline.
+            &self.docs_source_map
+        };
+
+        let after_range =
+            source_map.partition_point(|line| line.string_offset <= string_range.start()) - 1;
+        let after_range = &source_map[after_range..];
+        let line = after_range.first()?;
+        if after_range.get(1).is_some_and(|next_line| next_line.string_offset < string_range.end())
+        {
+            // The range is combined from two lines - cannot map it back.
+            return None;
+        }
+        let ast_range = string_range - line.string_offset + line.ast_offset;
+        let is_inner = if inner_docs_start
+            .is_some_and(|inner_docs_start| string_range.start() >= inner_docs_start)
+        {
+            IsInnerDoc::Yes
+        } else {
+            IsInnerDoc::No
+        };
+        Some((InFile::new(file, ast_range), is_inner))
+    }
+
+    #[inline]
+    pub fn shift_by(&mut self, offset: TextSize) {
+        self.prefix_len += offset;
+    }
+
+    pub fn prepend_str(&mut self, s: &str) {
+        self.prefix_len += TextSize::of(s);
+        self.docs.insert_str(0, s);
+    }
+
+    pub fn append_str(&mut self, s: &str) {
+        self.docs.push_str(s);
+    }
+
+    pub fn append(&mut self, other: &Docs) {
+        let other_offset = TextSize::of(&self.docs);
+
+        assert!(
+            self.outline_mod.is_none() && other.outline_mod.is_none(),
+            "cannot merge `Docs` that have `outline_mod` set"
+        );
+        self.outline_mod = Some((self.inline_file, self.docs_source_map.len()));
+        self.inline_file = other.inline_file;
+        self.outline_inner_docs_start = self.inline_inner_docs_start;
+        self.inline_inner_docs_start = other.inline_inner_docs_start.map(|it| it + other_offset);
+
+        self.docs.push_str(&other.docs);
+        self.docs_source_map.extend(other.docs_source_map.iter().map(
+            |&DocsSourceMapLine { string_offset, ast_offset }| DocsSourceMapLine {
+                ast_offset,
+                string_offset: string_offset + other_offset,
+            },
+        ));
+    }
+
+    fn extend_with_doc_comment(&mut self, comment: ast::Comment, indent: &mut usize) {
+        let Some((doc, offset)) = comment.doc_comment() else { return };
+        self.extend_with_doc_str(doc, comment.syntax().text_range().start() + offset, indent);
+    }
+
+    fn extend_with_doc_attr(&mut self, value: SyntaxToken, indent: &mut usize) {
+        let Some(value) = ast::String::cast(value) else { return };
+        let Some(value_offset) = value.text_range_between_quotes() else { return };
+        let value_offset = value_offset.start();
+        let Ok(value) = value.value() else { return };
+        // FIXME: Handle source maps for escaped text.
+        self.extend_with_doc_str(&value, value_offset, indent);
+    }
+
+    fn extend_with_doc_str(&mut self, doc: &str, mut offset_in_ast: TextSize, indent: &mut usize) {
+        for line in doc.split('\n') {
+            self.docs_source_map.push(DocsSourceMapLine {
+                string_offset: TextSize::of(&self.docs),
+                ast_offset: offset_in_ast,
+            });
+            offset_in_ast += TextSize::of(line) + TextSize::of("\n");
+
+            let line = line.trim_end();
+            if let Some(line_indent) = line.chars().position(|ch| !ch.is_whitespace()) {
+                // Empty lines are handled because `position()` returns `None` for them.
+                *indent = std::cmp::min(*indent, line_indent);
+            }
+            self.docs.push_str(line);
+            self.docs.push('\n');
+        }
+    }
+
+    fn remove_indent(&mut self, indent: usize, start_source_map_index: usize) {
+        /// In case of panics, we want to avoid corrupted UTF-8 in `self.docs`, so we clear it.
+        struct Guard<'a>(&'a mut Docs);
+        impl Drop for Guard<'_> {
+            fn drop(&mut self) {
+                let Docs {
+                    docs,
+                    docs_source_map,
+                    outline_mod,
+                    inline_file: _,
+                    prefix_len: _,
+                    inline_inner_docs_start: _,
+                    outline_inner_docs_start: _,
+                } = self.0;
+                // Don't use `String::clear()` here because it's not guaranteed to not do UTF-8-dependent things,
+                // and we may have temporarily broken the string's encoding.
+                unsafe { docs.as_mut_vec() }.clear();
+                // This is just to avoid panics down the road.
+                docs_source_map.clear();
+                *outline_mod = None;
+            }
+        }
+
+        if self.docs.is_empty() {
+            return;
+        }
+
+        let guard = Guard(self);
+        let source_map = &mut guard.0.docs_source_map[start_source_map_index..];
+        let Some(&DocsSourceMapLine { string_offset: mut copy_into, .. }) = source_map.first()
+        else {
+            return;
+        };
+        // We basically want to remove multiple ranges from a string. Doing this efficiently (without O(N^2)
+        // or allocations) requires unsafe. Basically, for each line, we copy the line minus the indent into
+        // consecutive to the previous line (which may have moved). Then at the end we truncate.
+        let mut accumulated_offset = TextSize::new(0);
+        for idx in 0..source_map.len() {
+            let string_end_offset = source_map
+                .get(idx + 1)
+                .map_or_else(|| TextSize::of(&guard.0.docs), |next_attr| next_attr.string_offset);
+            let line_source = &mut source_map[idx];
+            let line_docs =
+                &guard.0.docs[TextRange::new(line_source.string_offset, string_end_offset)];
+            let line_docs_len = TextSize::of(line_docs);
+            let indent_size = line_docs.char_indices().nth(indent).map_or_else(
+                || TextSize::of(line_docs) - TextSize::of("\n"),
+                |(offset, _)| TextSize::new(offset as u32),
+            );
+            unsafe { guard.0.docs.as_bytes_mut() }.copy_within(
+                Range::<usize>::from(TextRange::new(
+                    line_source.string_offset + indent_size,
+                    string_end_offset,
+                )),
+                copy_into.into(),
+            );
+            copy_into += line_docs_len - indent_size;
+
+            if let Some(inner_attrs_start) = &mut guard.0.inline_inner_docs_start
+                && *inner_attrs_start == line_source.string_offset
+            {
+                *inner_attrs_start -= accumulated_offset;
+            }
+            // The removals in the string accumulate, but in the AST not, because it already points
+            // to the beginning of each attribute.
+            // Also, we need to shift the AST offset of every line, but the string offset of the first
+            // line should not get shifted (in general, the shift for the string offset is by the
+            // number of lines until the current one, excluding the current one).
+            line_source.string_offset -= accumulated_offset;
+            line_source.ast_offset += indent_size;
+
+            accumulated_offset += indent_size;
+        }
+        // Don't use `String::truncate()` here because it's not guaranteed to not do UTF-8-dependent things,
+        // and we may have temporarily broken the string's encoding.
+        unsafe { guard.0.docs.as_mut_vec() }.truncate(copy_into.into());
+
+        std::mem::forget(guard);
+    }
+
+    fn remove_last_newline(&mut self) {
+        self.docs.truncate(self.docs.len().saturating_sub(1));
+    }
+
+    fn shrink_to_fit(&mut self) {
+        let Docs {
+            docs,
+            docs_source_map,
+            outline_mod: _,
+            inline_file: _,
+            prefix_len: _,
+            inline_inner_docs_start: _,
+            outline_inner_docs_start: _,
+        } = self;
+        docs.shrink_to_fit();
+        docs_source_map.shrink_to_fit();
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Hash)]
+pub struct DeriveInfo {
+    pub trait_name: Symbol,
+    pub helpers: Box<[Symbol]>,
+}
+
+fn extract_doc_aliases(result: &mut Vec<Symbol>, attr: Meta) -> ControlFlow<Infallible> {
+    if let Meta::TokenTree { path, tt } = attr
+        && path.is1("doc")
+    {
+        for atom in DocAtom::parse(tt) {
+            match atom {
+                DocAtom::Alias(aliases) => {
+                    result.extend(aliases.into_iter().map(|alias| Symbol::intern(&alias)))
+                }
+                DocAtom::KeyValue { key, value } if key == "alias" => {
+                    result.push(Symbol::intern(&value))
+                }
+                _ => {}
+            }
+        }
+    }
+    ControlFlow::Continue(())
+}
+
+fn extract_cfgs(result: &mut Vec<CfgExpr>, attr: Meta) -> ControlFlow<Infallible> {
+    if let Meta::TokenTree { path, tt } = attr
+        && path.is1("cfg")
+    {
+        result.push(CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable()));
+    }
+    ControlFlow::Continue(())
+}
+
+fn extract_docs<'a>(
+    get_cfg_options: &dyn Fn() -> &'a CfgOptions,
+    source: InFile<ast::AnyHasAttrs>,
+    outer_mod_decl: Option<InFile<ast::Module>>,
+    inner_attrs_node: Option<SyntaxNode>,
+) -> Option<Box<Docs>> {
+    let mut result = Docs {
+        docs: String::new(),
+        docs_source_map: Vec::new(),
+        outline_mod: None,
+        inline_file: source.file_id,
+        prefix_len: TextSize::new(0),
+        inline_inner_docs_start: None,
+        outline_inner_docs_start: None,
+    };
+
+    let mut cfg_options = None;
+    let mut extend_with_attrs =
+        |result: &mut Docs, node: &SyntaxNode, expect_inner_attrs, indent: &mut usize| {
+            expand_cfg_attr_with_doc_comments::<_, Infallible>(
+                AttrDocCommentIter::from_syntax_node(node).filter(|attr| match attr {
+                    Either::Left(attr) => attr.kind().is_inner() == expect_inner_attrs,
+                    Either::Right(comment) => comment.kind().doc.is_some_and(|kind| {
+                        (kind == ast::CommentPlacement::Inner) == expect_inner_attrs
+                    }),
+                }),
+                || cfg_options.get_or_insert_with(get_cfg_options),
+                |attr| {
+                    match attr {
+                        Either::Right(doc_comment) => {
+                            result.extend_with_doc_comment(doc_comment, indent)
+                        }
+                        Either::Left((attr, _, _, _)) => match attr {
+                            // FIXME: Handle macros: `#[doc = concat!("foo", "bar")]`.
+                            Meta::NamedKeyValue {
+                                name: Some(name), value: Some(value), ..
+                            } if name.text() == "doc" => {
+                                result.extend_with_doc_attr(value, indent);
+                            }
+                            _ => {}
+                        },
+                    }
+                    ControlFlow::Continue(())
+                },
+            );
+        };
+
+    if let Some(outer_mod_decl) = outer_mod_decl {
+        let mut indent = usize::MAX;
+        extend_with_attrs(&mut result, outer_mod_decl.value.syntax(), false, &mut indent);
+        result.remove_indent(indent, 0);
+        result.outline_mod = Some((outer_mod_decl.file_id, result.docs_source_map.len()));
+    }
+
+    let inline_source_map_start = result.docs_source_map.len();
+    let mut indent = usize::MAX;
+    extend_with_attrs(&mut result, source.value.syntax(), false, &mut indent);
+    if let Some(inner_attrs_node) = &inner_attrs_node {
+        result.inline_inner_docs_start = Some(TextSize::of(&result.docs));
+        extend_with_attrs(&mut result, inner_attrs_node, true, &mut indent);
+    }
+    result.remove_indent(indent, inline_source_map_start);
+
+    result.remove_last_newline();
+
+    result.shrink_to_fit();
+
+    if result.docs.is_empty() { None } else { Some(Box::new(result)) }
+}
+
+#[salsa::tracked]
+impl AttrFlags {
+    #[salsa::tracked]
+    pub fn query(db: &dyn DefDatabase, owner: AttrDefId) -> AttrFlags {
+        let mut attr_flags = AttrFlags::empty();
+        collect_attrs(db, owner, |attr| match_attr_flags(&mut attr_flags, attr));
+        attr_flags
+    }
+
+    #[inline]
+    pub fn query_field(db: &dyn DefDatabase, field: FieldId) -> AttrFlags {
+        return field_attr_flags(db, field.parent)
+            .get(field.local_id)
+            .copied()
+            .unwrap_or_else(AttrFlags::empty);
+
+        #[salsa::tracked(returns(ref))]
+        fn field_attr_flags(
+            db: &dyn DefDatabase,
+            variant: VariantId,
+        ) -> ArenaMap<LocalFieldId, AttrFlags> {
+            collect_field_attrs(db, variant, |cfg_options, field| {
+                let mut attr_flags = AttrFlags::empty();
+                expand_cfg_attr(
+                    field.value.attrs(),
+                    || cfg_options,
+                    |attr, _, _, _| match_attr_flags(&mut attr_flags, attr),
+                );
+                attr_flags
+            })
+        }
+    }
+
+    #[inline]
+    pub fn query_generic_params(
+        db: &dyn DefDatabase,
+        def: GenericDefId,
+    ) -> &(ArenaMap<LocalLifetimeParamId, AttrFlags>, ArenaMap<LocalTypeOrConstParamId, AttrFlags>)
+    {
+        let generic_params = GenericParams::new(db, def);
+        let params_count_excluding_self =
+            generic_params.len() - usize::from(generic_params.trait_self_param().is_some());
+        if params_count_excluding_self == 0 {
+            return const { &(ArenaMap::new(), ArenaMap::new()) };
+        }
+        return generic_params_attr_flags(db, def);
+
+        #[salsa::tracked(returns(ref))]
+        fn generic_params_attr_flags(
+            db: &dyn DefDatabase,
+            def: GenericDefId,
+        ) -> (ArenaMap<LocalLifetimeParamId, AttrFlags>, ArenaMap<LocalTypeOrConstParamId, AttrFlags>)
+        {
+            let mut lifetimes = ArenaMap::new();
+            let mut type_and_consts = ArenaMap::new();
+
+            let mut cfg_options = None;
+            let mut cfg_options =
+                || *cfg_options.get_or_insert_with(|| def.krate(db).cfg_options(db));
+
+            let lifetimes_source = HasChildSource::<LocalLifetimeParamId>::child_source(&def, db);
+            for (lifetime_id, lifetime) in lifetimes_source.value.iter() {
+                let mut attr_flags = AttrFlags::empty();
+                expand_cfg_attr(lifetime.attrs(), &mut cfg_options, |attr, _, _, _| {
+                    match_attr_flags(&mut attr_flags, attr)
+                });
+                if !attr_flags.is_empty() {
+                    lifetimes.insert(lifetime_id, attr_flags);
+                }
+            }
+
+            let type_and_consts_source =
+                HasChildSource::<LocalTypeOrConstParamId>::child_source(&def, db);
+            for (type_or_const_id, type_or_const) in type_and_consts_source.value.iter() {
+                let mut attr_flags = AttrFlags::empty();
+                expand_cfg_attr(type_or_const.attrs(), &mut cfg_options, |attr, _, _, _| {
+                    match_attr_flags(&mut attr_flags, attr)
+                });
+                if !attr_flags.is_empty() {
+                    type_and_consts.insert(type_or_const_id, attr_flags);
+                }
+            }
+
+            lifetimes.shrink_to_fit();
+            type_and_consts.shrink_to_fit();
+            (lifetimes, type_and_consts)
+        }
+    }
+
+    #[inline]
+    pub fn query_lifetime_param(db: &dyn DefDatabase, owner: LifetimeParamId) -> AttrFlags {
+        AttrFlags::query_generic_params(db, owner.parent)
+            .0
+            .get(owner.local_id)
+            .copied()
+            .unwrap_or_else(AttrFlags::empty)
+    }
+    #[inline]
+    pub fn query_type_or_const_param(db: &dyn DefDatabase, owner: TypeOrConstParamId) -> AttrFlags {
+        AttrFlags::query_generic_params(db, owner.parent)
+            .1
+            .get(owner.local_id)
+            .copied()
+            .unwrap_or_else(AttrFlags::empty)
+    }
+
+    pub(crate) fn is_cfg_enabled_for(
+        owner: &dyn HasAttrs,
+        cfg_options: &CfgOptions,
+    ) -> Result<(), CfgExpr> {
+        let attrs = ast::attrs_including_inner(owner);
+        let result = expand_cfg_attr(
+            attrs,
+            || cfg_options,
+            |attr, _, _, _| {
+                if let Meta::TokenTree { path, tt } = attr
+                    && path.is1("cfg")
+                    && let cfg =
+                        CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable())
+                    && cfg_options.check(&cfg) == Some(false)
+                {
+                    ControlFlow::Break(cfg)
+                } else {
+                    ControlFlow::Continue(())
+                }
+            },
+        );
+        match result {
+            Some(cfg) => Err(cfg),
+            None => Ok(()),
+        }
+    }
+
+    #[inline]
+    pub fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option<Symbol> {
+        AttrFlags::query(db, owner).lang_item_with_attrs(db, owner)
+    }
+
+    #[inline]
+    pub fn lang_item_with_attrs(self, db: &dyn DefDatabase, owner: AttrDefId) -> Option<Symbol> {
+        if !self.contains(AttrFlags::LANG_ITEM) {
+            // Don't create the query in case this is not a lang item, this wastes memory.
+            return None;
+        }
+
+        return lang_item(db, owner);
+
+        #[salsa::tracked]
+        fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option<Symbol> {
+            collect_attrs(db, owner, |attr| {
+                if let Meta::NamedKeyValue { name: Some(name), value: Some(value), .. } = attr
+                    && name.text() == "lang"
+                    && let Some(value) = ast::String::cast(value)
+                    && let Ok(value) = value.value()
+                {
+                    ControlFlow::Break(Symbol::intern(&value))
+                } else {
+                    ControlFlow::Continue(())
+                }
+            })
+        }
+    }
+
+    #[inline]
+    pub fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option<ReprOptions> {
+        if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_REPR) {
+            // Don't create the query in case this has no repr, this wastes memory.
+            return None;
+        }
+
+        return repr(db, owner);
+
+        #[salsa::tracked]
+        fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option<ReprOptions> {
+            let mut result = None;
+            collect_attrs::<Infallible>(db, owner.into(), |attr| {
+                if let Meta::TokenTree { path, tt } = attr
+                    && path.is1("repr")
+                    && let Some(repr) = parse_repr_tt(&tt)
+                {
+                    match &mut result {
+                        Some(existing) => merge_repr(existing, repr),
+                        None => result = Some(repr),
+                    }
+                }
+                ControlFlow::Continue(())
+            });
+            result
+        }
+    }
+
+    /// Call this only if there are legacy const generics, to save memory.
+    #[salsa::tracked(returns(ref))]
+    pub(crate) fn legacy_const_generic_indices(
+        db: &dyn DefDatabase,
+        owner: FunctionId,
+    ) -> Option<Box<[u32]>> {
+        let result = collect_attrs(db, owner.into(), |attr| {
+            if let Meta::TokenTree { path, tt } = attr
+                && path.is1("rustc_legacy_const_generics")
+            {
+                let result = parse_rustc_legacy_const_generics(tt);
+                ControlFlow::Break(result)
+            } else {
+                ControlFlow::Continue(())
+            }
+        });
+        result.filter(|it| !it.is_empty())
+    }
+
+    // There aren't typically many crates, so it's okay to always make this a query without a flag.
+    #[salsa::tracked(returns(ref))]
+    pub fn doc_html_root_url(db: &dyn DefDatabase, krate: Crate) -> Option<SmolStr> {
+        let root_file_id = krate.root_file_id(db);
+        let syntax = db.parse(root_file_id).tree();
+
+        let mut cfg_options = None;
+        expand_cfg_attr(
+            syntax.attrs(),
+            || cfg_options.get_or_insert(krate.cfg_options(db)),
+            |attr, _, _, _| {
+                if let Meta::TokenTree { path, tt } = attr
+                    && path.is1("doc")
+                    && let Some(result) = DocAtom::parse(tt).into_iter().find_map(|atom| {
+                        if let DocAtom::KeyValue { key, value } = atom
+                            && key == "html_root_url"
+                        {
+                            Some(value)
+                        } else {
+                            None
+                        }
+                    })
+                {
+                    ControlFlow::Break(result)
+                } else {
+                    ControlFlow::Continue(())
+                }
+            },
+        )
+    }
+
+    #[inline]
+    pub fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> &FxHashSet<Symbol> {
+        if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_TARGET_FEATURE) {
+            return const { &FxHashSet::with_hasher(rustc_hash::FxBuildHasher) };
+        }
+
+        return target_features(db, owner);
+
+        #[salsa::tracked(returns(ref))]
+        fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> FxHashSet<Symbol> {
+            let mut result = FxHashSet::default();
+            collect_attrs::<Infallible>(db, owner.into(), |attr| {
+                if let Meta::TokenTree { path, tt } = attr
+                    && path.is1("target_feature")
+                {
+                    let mut tt = TokenTreeChildren::new(&tt);
+                    while let Some(NodeOrToken::Token(enable_ident)) = tt.next()
+                        && enable_ident.text() == "enable"
+                        && let Some(NodeOrToken::Token(eq_token)) = tt.next()
+                        && eq_token.kind() == T![=]
+                        && let Some(NodeOrToken::Token(features)) = tt.next()
+                        && let Some(features) = ast::String::cast(features)
+                        && let Ok(features) = features.value()
+                    {
+                        result.extend(features.split(',').map(Symbol::intern));
+                        if tt
+                            .next()
+                            .and_then(NodeOrToken::into_token)
+                            .is_none_or(|it| it.kind() != T![,])
+                        {
+                            break;
+                        }
+                    }
+                }
+                ControlFlow::Continue(())
+            });
+            result.shrink_to_fit();
+            result
+        }
+    }
+
+    #[inline]
+    pub fn rustc_layout_scalar_valid_range(
+        db: &dyn DefDatabase,
+        owner: AdtId,
+    ) -> RustcLayoutScalarValidRange {
+        if !AttrFlags::query(db, owner.into()).contains(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE)
+        {
+            return RustcLayoutScalarValidRange::default();
+        }
+
+        return rustc_layout_scalar_valid_range(db, owner);
+
+        #[salsa::tracked]
+        fn rustc_layout_scalar_valid_range(
+            db: &dyn DefDatabase,
+            owner: AdtId,
+        ) -> RustcLayoutScalarValidRange {
+            let mut result = RustcLayoutScalarValidRange::default();
+            collect_attrs::<Infallible>(db, owner.into(), |attr| {
+                if let Meta::TokenTree { path, tt } = attr
+                    && (path.is1("rustc_layout_scalar_valid_range_start")
+                        || path.is1("rustc_layout_scalar_valid_range_end"))
+                    && let tt = TokenTreeChildren::new(&tt)
+                    && let Ok(NodeOrToken::Token(value)) = Itertools::exactly_one(tt)
+                    && let Some(value) = ast::IntNumber::cast(value)
+                    && let Ok(value) = value.value()
+                {
+                    if path.is1("rustc_layout_scalar_valid_range_start") {
+                        result.start = Some(value)
+                    } else {
+                        result.end = Some(value);
+                    }
+                }
+                ControlFlow::Continue(())
+            });
+            result
+        }
+    }
+
+    #[inline]
+    pub fn doc_aliases(self, db: &dyn DefDatabase, owner: Either<AttrDefId, FieldId>) -> &[Symbol] {
+        if !self.contains(AttrFlags::HAS_DOC_ALIASES) {
+            return &[];
+        }
+        return match owner {
+            Either::Left(it) => doc_aliases(db, it),
+            Either::Right(field) => fields_doc_aliases(db, field.parent)
+                .get(field.local_id)
+                .map(|it| &**it)
+                .unwrap_or_default(),
+        };
+
+        #[salsa::tracked(returns(ref))]
+        fn doc_aliases(db: &dyn DefDatabase, owner: AttrDefId) -> Box<[Symbol]> {
+            let mut result = Vec::new();
+            collect_attrs::<Infallible>(db, owner, |attr| extract_doc_aliases(&mut result, attr));
+            result.into_boxed_slice()
+        }
+
+        #[salsa::tracked(returns(ref))]
+        fn fields_doc_aliases(
+            db: &dyn DefDatabase,
+            variant: VariantId,
+        ) -> ArenaMap<LocalFieldId, Box<[Symbol]>> {
+            collect_field_attrs(db, variant, |cfg_options, field| {
+                let mut result = Vec::new();
+                expand_cfg_attr(
+                    field.value.attrs(),
+                    || cfg_options,
+                    |attr, _, _, _| extract_doc_aliases(&mut result, attr),
+                );
+                result.into_boxed_slice()
+            })
+        }
+    }
+
+    #[inline]
+    pub fn cfgs(self, db: &dyn DefDatabase, owner: Either<AttrDefId, FieldId>) -> Option<&CfgExpr> {
+        if !self.contains(AttrFlags::HAS_CFG) {
+            return None;
+        }
+        return match owner {
+            Either::Left(it) => cfgs(db, it).as_ref(),
+            Either::Right(field) => {
+                fields_cfgs(db, field.parent).get(field.local_id).and_then(|it| it.as_ref())
+            }
+        };
+
+        // We LRU this query because it is only used by IDE.
+        #[salsa::tracked(returns(ref), lru = 250)]
+        fn cfgs(db: &dyn DefDatabase, owner: AttrDefId) -> Option<CfgExpr> {
+            let mut result = Vec::new();
+            collect_attrs::<Infallible>(db, owner, |attr| extract_cfgs(&mut result, attr));
+            match result.len() {
+                0 => None,
+                1 => result.into_iter().next(),
+                _ => Some(CfgExpr::All(result.into_boxed_slice())),
+            }
+        }
+
+        // We LRU this query because it is only used by IDE.
+        #[salsa::tracked(returns(ref), lru = 50)]
+        fn fields_cfgs(
+            db: &dyn DefDatabase,
+            variant: VariantId,
+        ) -> ArenaMap<LocalFieldId, Option<CfgExpr>> {
+            collect_field_attrs(db, variant, |cfg_options, field| {
+                let mut result = Vec::new();
+                expand_cfg_attr(
+                    field.value.attrs(),
+                    || cfg_options,
+                    |attr, _, _, _| extract_cfgs(&mut result, attr),
+                );
+                match result.len() {
+                    0 => None,
+                    1 => result.into_iter().next(),
+                    _ => Some(CfgExpr::All(result.into_boxed_slice())),
+                }
+            })
+        }
+    }
+
+    #[inline]
+    pub fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option<Symbol> {
+        if !AttrFlags::query(db, AttrDefId::ModuleId(owner)).contains(AttrFlags::HAS_DOC_KEYWORD) {
+            return None;
+        }
+        return doc_keyword(db, owner);
+
+        #[salsa::tracked]
+        fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option<Symbol> {
+            collect_attrs(db, AttrDefId::ModuleId(owner), |attr| {
+                if let Meta::TokenTree { path, tt } = attr
+                    && path.is1("doc")
+                {
+                    for atom in DocAtom::parse(tt) {
+                        if let DocAtom::KeyValue { key, value } = atom
+                            && key == "keyword"
+                        {
+                            return ControlFlow::Break(Symbol::intern(&value));
+                        }
+                    }
+                }
+                ControlFlow::Continue(())
+            })
+        }
+    }
+
+    // We LRU this query because it is only used by IDE.
+    #[salsa::tracked(returns(ref), lru = 250)]
+    pub fn docs(db: &dyn DefDatabase, owner: AttrDefId) -> Option<Box<Docs>> {
+        let (source, outer_mod_decl, krate) = attrs_source(db, owner);
+        let inner_attrs_node = source.value.inner_attributes_node();
+        extract_docs(&|| krate.cfg_options(db), source, outer_mod_decl, inner_attrs_node)
+    }
+
+    #[inline]
+    pub fn field_docs(db: &dyn DefDatabase, field: FieldId) -> Option<&Docs> {
+        return fields_docs(db, field.parent).get(field.local_id).and_then(|it| it.as_deref());
+
+        // We LRU this query because it is only used by IDE.
+        #[salsa::tracked(returns(ref), lru = 50)]
+        pub fn fields_docs(
+            db: &dyn DefDatabase,
+            variant: VariantId,
+        ) -> ArenaMap<LocalFieldId, Option<Box<Docs>>> {
+            collect_field_attrs(db, variant, |cfg_options, field| {
+                extract_docs(&|| cfg_options, field, None, None)
+            })
+        }
+    }
+
+    #[inline]
+    pub fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option<&DeriveInfo> {
+        if !AttrFlags::query(db, owner.into()).contains(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO) {
+            return None;
+        }
+
+        return derive_info(db, owner).as_ref();
+
+        #[salsa::tracked(returns(ref))]
+        fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option<DeriveInfo> {
+            collect_attrs(db, owner.into(), |attr| {
+                if let Meta::TokenTree { path, tt } = attr
+                    && path.segments.len() == 1
+                    && matches!(
+                        path.segments[0].text(),
+                        "proc_macro_derive" | "rustc_builtin_macro"
+                    )
+                    && let mut tt = TokenTreeChildren::new(&tt)
+                    && let Some(NodeOrToken::Token(trait_name)) = tt.next()
+                    && trait_name.kind().is_any_identifier()
+                {
+                    let trait_name = Symbol::intern(trait_name.text());
+
+                    let helpers = if let Some(NodeOrToken::Token(comma)) = tt.next()
+                        && comma.kind() == T![,]
+                        && let Some(NodeOrToken::Token(attributes)) = tt.next()
+                        && attributes.text() == "attributes"
+                        && let Some(NodeOrToken::Node(attributes)) = tt.next()
+                    {
+                        attributes
+                            .syntax()
+                            .children_with_tokens()
+                            .filter_map(NodeOrToken::into_token)
+                            .filter(|it| it.kind().is_any_identifier())
+                            .map(|it| Symbol::intern(it.text()))
+                            .collect::<Box<[_]>>()
+                    } else {
+                        Box::new([])
+                    };
+
+                    ControlFlow::Break(DeriveInfo { trait_name, helpers })
+                } else {
+                    ControlFlow::Continue(())
+                }
+            })
+        }
+    }
+}
+
+fn merge_repr(this: &mut ReprOptions, other: ReprOptions) {
+    let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this;
+    flags.insert(other.flags);
+    *align = (*align).max(other.align);
+    *pack = match (*pack, other.pack) {
+        (Some(pack), None) | (None, Some(pack)) => Some(pack),
+        _ => (*pack).min(other.pack),
+    };
+    if other.int.is_some() {
+        *int = other.int;
+    }
+}
+
+fn parse_repr_tt(tt: &ast::TokenTree) -> Option<ReprOptions> {
+    use crate::builtin_type::{BuiltinInt, BuiltinUint};
+    use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
+
+    let mut tts = TokenTreeChildren::new(tt).peekable();
+
+    let mut acc = ReprOptions::default();
+    while let Some(tt) = tts.next() {
+        let NodeOrToken::Token(ident) = tt else {
+            continue;
+        };
+        if !ident.kind().is_any_identifier() {
+            continue;
+        }
+        let repr = match ident.text() {
+            "packed" => {
+                let pack = if let Some(NodeOrToken::Node(tt)) = tts.peek() {
+                    let tt = tt.clone();
+                    tts.next();
+                    let mut tt_iter = TokenTreeChildren::new(&tt);
+                    if let Some(NodeOrToken::Token(lit)) = tt_iter.next()
+                        && let Some(lit) = ast::IntNumber::cast(lit)
+                        && let Ok(lit) = lit.value()
+                        && let Ok(lit) = lit.try_into()
+                    {
+                        lit
+                    } else {
+                        0
+                    }
+                } else {
+                    0
+                };
+                let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE));
+                ReprOptions { pack, ..Default::default() }
+            }
+            "align" => {
+                let mut align = None;
+                if let Some(NodeOrToken::Node(tt)) = tts.peek() {
+                    let tt = tt.clone();
+                    tts.next();
+                    let mut tt_iter = TokenTreeChildren::new(&tt);
+                    if let Some(NodeOrToken::Token(lit)) = tt_iter.next()
+                        && let Some(lit) = ast::IntNumber::cast(lit)
+                        && let Ok(lit) = lit.value()
+                        && let Ok(lit) = lit.try_into()
+                    {
+                        align = Align::from_bytes(lit).ok();
+                    }
+                }
+                ReprOptions { align, ..Default::default() }
+            }
+            "C" => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() },
+            "transparent" => ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() },
+            "simd" => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() },
+            repr => {
+                let mut int = None;
+                if let Some(builtin) = BuiltinInt::from_suffix(repr)
+                    .map(Either::Left)
+                    .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right))
+                {
+                    int = Some(match builtin {
+                        Either::Left(bi) => match bi {
+                            BuiltinInt::Isize => IntegerType::Pointer(true),
+                            BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
+                            BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
+                            BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
+                            BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
+                            BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
+                        },
+                        Either::Right(bu) => match bu {
+                            BuiltinUint::Usize => IntegerType::Pointer(false),
+                            BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
+                            BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
+                            BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
+                            BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
+                            BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
+                        },
+                    });
+                }
+                ReprOptions { int, ..Default::default() }
+            }
+        };
+        merge_repr(&mut acc, repr);
+    }
+
+    Some(acc)
+}
+
+fn parse_rustc_legacy_const_generics(tt: ast::TokenTree) -> Box<[u32]> {
+    TokenTreeChildren::new(&tt)
+        .filter_map(|param| {
+            ast::IntNumber::cast(param.into_token()?)?.value().ok()?.try_into().ok()
+        })
+        .collect()
+}
+
+#[derive(Debug)]
+enum DocAtom {
+    /// eg. `#[doc(hidden)]`
+    Flag(SmolStr),
+    /// eg. `#[doc(alias = "it")]`
+    ///
+    /// Note that a key can have multiple values that are all considered "active" at the same time.
+    /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
+    KeyValue { key: SmolStr, value: SmolStr },
+    /// eg. `#[doc(alias("x", "y"))]`
+    Alias(Vec<SmolStr>),
+}
+
+impl DocAtom {
+    fn parse(tt: ast::TokenTree) -> SmallVec<[DocAtom; 1]> {
+        let mut iter = TokenTreeChildren::new(&tt).peekable();
+        let mut result = SmallVec::new();
+        while iter.peek().is_some() {
+            if let Some(expr) = next_doc_expr(&mut iter) {
+                result.push(expr);
+            }
+        }
+        result
+    }
+}
+
+fn next_doc_expr(it: &mut Peekable<TokenTreeChildren>) -> Option<DocAtom> {
+    let name = match it.next() {
+        Some(NodeOrToken::Token(token)) if token.kind().is_any_identifier() => {
+            SmolStr::new(token.text())
+        }
+        _ => return None,
+    };
+
+    let ret = match it.peek() {
+        Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
+            it.next();
+            if let Some(NodeOrToken::Token(value)) = it.next()
+                && let Some(value) = ast::String::cast(value)
+                && let Ok(value) = value.value()
+            {
+                DocAtom::KeyValue { key: name, value: SmolStr::new(&*value) }
+            } else {
+                return None;
+            }
+        }
+        Some(NodeOrToken::Node(subtree)) => {
+            if name != "alias" {
+                return None;
+            }
+            let aliases = TokenTreeChildren::new(subtree)
+                .filter_map(|alias| {
+                    Some(SmolStr::new(&*ast::String::cast(alias.into_token()?)?.value().ok()?))
+                })
+                .collect();
+            it.next();
+            DocAtom::Alias(aliases)
+        }
+        _ => DocAtom::Flag(name),
+    };
+    Some(ret)
+}
+
+#[cfg(test)]
+mod tests {
+    use expect_test::expect;
+    use hir_expand::InFile;
+    use test_fixture::WithFixture;
+    use tt::{TextRange, TextSize};
+
+    use crate::attrs::IsInnerDoc;
+    use crate::{attrs::Docs, test_db::TestDB};
+
+    #[test]
+    fn docs() {
+        let (_db, file_id) = TestDB::with_single_file("");
+        let mut docs = Docs {
+            docs: String::new(),
+            docs_source_map: Vec::new(),
+            outline_mod: None,
+            inline_file: file_id.into(),
+            prefix_len: TextSize::new(0),
+            inline_inner_docs_start: None,
+            outline_inner_docs_start: None,
+        };
+        let mut indent = usize::MAX;
+
+        let outer = " foo\n\tbar  baz";
+        let mut ast_offset = TextSize::new(123);
+        for line in outer.split('\n') {
+            docs.extend_with_doc_str(line, ast_offset, &mut indent);
+            ast_offset += TextSize::of(line) + TextSize::of("\n");
+        }
+
+        docs.inline_inner_docs_start = Some(TextSize::of(&docs.docs));
+        ast_offset += TextSize::new(123);
+        let inner = " bar \n baz";
+        for line in inner.split('\n') {
+            docs.extend_with_doc_str(line, ast_offset, &mut indent);
+            ast_offset += TextSize::of(line) + TextSize::of("\n");
+        }
+
+        assert_eq!(indent, 1);
+        expect![[r#"
+            [
+                DocsSourceMapLine {
+                    string_offset: 0,
+                    ast_offset: 123,
+                },
+                DocsSourceMapLine {
+                    string_offset: 5,
+                    ast_offset: 128,
+                },
+                DocsSourceMapLine {
+                    string_offset: 15,
+                    ast_offset: 261,
+                },
+                DocsSourceMapLine {
+                    string_offset: 20,
+                    ast_offset: 267,
+                },
+            ]
+        "#]]
+        .assert_debug_eq(&docs.docs_source_map);
+
+        docs.remove_indent(indent, 0);
+
+        assert_eq!(docs.inline_inner_docs_start, Some(TextSize::new(13)));
+
+        assert_eq!(docs.docs, "foo\nbar  baz\nbar\nbaz\n");
+        expect![[r#"
+            [
+                DocsSourceMapLine {
+                    string_offset: 0,
+                    ast_offset: 124,
+                },
+                DocsSourceMapLine {
+                    string_offset: 4,
+                    ast_offset: 129,
+                },
+                DocsSourceMapLine {
+                    string_offset: 13,
+                    ast_offset: 262,
+                },
+                DocsSourceMapLine {
+                    string_offset: 17,
+                    ast_offset: 268,
+                },
+            ]
+        "#]]
+        .assert_debug_eq(&docs.docs_source_map);
+
+        docs.append(&docs.clone());
+        docs.prepend_str("prefix---");
+        assert_eq!(docs.docs, "prefix---foo\nbar  baz\nbar\nbaz\nfoo\nbar  baz\nbar\nbaz\n");
+        expect![[r#"
+            [
+                DocsSourceMapLine {
+                    string_offset: 0,
+                    ast_offset: 124,
+                },
+                DocsSourceMapLine {
+                    string_offset: 4,
+                    ast_offset: 129,
+                },
+                DocsSourceMapLine {
+                    string_offset: 13,
+                    ast_offset: 262,
+                },
+                DocsSourceMapLine {
+                    string_offset: 17,
+                    ast_offset: 268,
+                },
+                DocsSourceMapLine {
+                    string_offset: 21,
+                    ast_offset: 124,
+                },
+                DocsSourceMapLine {
+                    string_offset: 25,
+                    ast_offset: 129,
+                },
+                DocsSourceMapLine {
+                    string_offset: 34,
+                    ast_offset: 262,
+                },
+                DocsSourceMapLine {
+                    string_offset: 38,
+                    ast_offset: 268,
+                },
+            ]
+        "#]]
+        .assert_debug_eq(&docs.docs_source_map);
+
+        let range = |start, end| TextRange::new(TextSize::new(start), TextSize::new(end));
+        let in_file = |range| InFile::new(file_id.into(), range);
+        assert_eq!(docs.find_ast_range(range(0, 2)), None);
+        assert_eq!(docs.find_ast_range(range(8, 10)), None);
+        assert_eq!(
+            docs.find_ast_range(range(9, 10)),
+            Some((in_file(range(124, 125)), IsInnerDoc::No))
+        );
+        assert_eq!(docs.find_ast_range(range(20, 23)), None);
+        assert_eq!(
+            docs.find_ast_range(range(23, 25)),
+            Some((in_file(range(263, 265)), IsInnerDoc::Yes))
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
index 49aafb2..cc311a4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -1,36 +1,32 @@
 //! Defines database & queries for name resolution.
 use base_db::{Crate, RootQueryDb, SourceDatabase};
-use either::Either;
 use hir_expand::{
     EditionedFileId, HirFileId, InFile, Lookup, MacroCallId, MacroDefId, MacroDefKind,
     db::ExpandDatabase,
 };
-use intern::sym;
 use la_arena::ArenaMap;
-use syntax::{AstPtr, ast};
 use triomphe::Arc;
 
 use crate::{
-    AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc,
-    EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc,
-    FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc,
-    MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId,
-    ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId,
-    TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
-    attr::{Attrs, AttrsWithOwner},
+    AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, CrateRootModuleId, DefWithBodyId,
+    EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId,
+    ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, HasModule, ImplId, ImplLoc,
+    InternedModuleId, LocalFieldId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId,
+    MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId,
+    StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc,
+    VariantId,
+    attrs::AttrFlags,
     expr_store::{
         Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes,
     },
     hir::generics::GenericParams,
     import_map::ImportMap,
     item_tree::{ItemTree, file_item_tree_query},
-    lang_item::{self, LangItem},
     nameres::crate_def_map,
     signatures::{
         ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature,
         StructSignature, TraitSignature, TypeAliasSignature, UnionSignature,
     },
-    tt,
     visibility::{self, Visibility},
 };
 
@@ -238,28 +234,6 @@ fn generic_params_and_store_and_source_map(
         def: GenericDefId,
     ) -> (Arc<GenericParams>, Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>);
 
-    // region:attrs
-
-    #[salsa::invoke(Attrs::fields_attrs_query)]
-    fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
-
-    // should this really be a query?
-    #[salsa::invoke(crate::attr::fields_attrs_source_map)]
-    fn fields_attrs_source_map(
-        &self,
-        def: VariantId,
-    ) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
-
-    // FIXME: Make this a non-interned query.
-    #[salsa::invoke_interned(AttrsWithOwner::attrs_query)]
-    fn attrs(&self, def: AttrDefId) -> Attrs;
-
-    #[salsa::transparent]
-    #[salsa::invoke(lang_item::lang_attr)]
-    fn lang_attr(&self, def: AttrDefId) -> Option<LangItem>;
-
-    // endregion:attrs
-
     #[salsa::invoke(ImportMap::import_map_query)]
     fn import_map(&self, krate: Crate) -> Arc<ImportMap>;
 
@@ -302,36 +276,9 @@ fn include_macro_invoc(
 }
 
 fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
-    let file = crate_id.data(db).root_file_id(db);
-    let item_tree = db.file_item_tree(file.into());
-    let attrs = item_tree.top_level_raw_attrs();
-    for attr in &**attrs {
-        match attr.path().as_ident() {
-            Some(ident) if *ident == sym::no_std => return true,
-            Some(ident) if *ident == sym::cfg_attr => {}
-            _ => continue,
-        }
-
-        // This is a `cfg_attr`; check if it could possibly expand to `no_std`.
-        // Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
-        let tt = match attr.token_tree_value() {
-            Some(tt) => tt.token_trees(),
-            None => continue,
-        };
-
-        let segments =
-            tt.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(p)) if p.char == ','));
-        for output in segments.skip(1) {
-            match output.flat_tokens() {
-                [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => {
-                    return true;
-                }
-                _ => {}
-            }
-        }
-    }
-
-    false
+    let root_module = CrateRootModuleId::from(crate_id).module(db);
+    let attrs = AttrFlags::query(db, AttrDefId::ModuleId(InternedModuleId::new(db, root_module)));
+    attrs.contains(AttrFlags::IS_NO_STD)
 }
 
 fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs
index 23b9712..6a2f06b 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs
@@ -17,11 +17,10 @@
 use triomphe::Arc;
 use tt::TextRange;
 
-use crate::attr::Attrs;
-use crate::expr_store::HygieneId;
-use crate::macro_call_as_call_id;
-use crate::nameres::DefMap;
-use crate::{MacroId, UnresolvedMacro, db::DefDatabase};
+use crate::{
+    MacroId, UnresolvedMacro, attrs::AttrFlags, db::DefDatabase, expr_store::HygieneId,
+    macro_call_as_call_id, nameres::DefMap,
+};
 
 #[derive(Debug)]
 pub(super) struct Expander {
@@ -70,11 +69,10 @@ pub(super) fn hygiene_for_range(&self, db: &dyn DefDatabase, range: TextRange) -
 
     pub(super) fn is_cfg_enabled(
         &self,
-        db: &dyn DefDatabase,
-        has_attrs: &dyn HasAttrs,
+        owner: &dyn HasAttrs,
         cfg_options: &CfgOptions,
     ) -> Result<(), cfg::CfgExpr> {
-        Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options)
+        AttrFlags::is_cfg_enabled_for(owner, cfg_options)
     }
 
     pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
index e3bfc5b..26a50b5 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
@@ -5,14 +5,13 @@
 mod generics;
 mod path;
 
-use std::mem;
+use std::{cell::OnceCell, mem};
 
 use base_db::FxIndexSet;
 use cfg::CfgOptions;
 use either::Either;
 use hir_expand::{
     HirFileId, InFile, MacroDefId,
-    mod_path::tool_path,
     name::{AsName, Name},
     span_map::SpanMapRef,
 };
@@ -34,6 +33,7 @@
 use crate::{
     AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId,
     ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro,
+    attrs::AttrFlags,
     builtin_type::BuiltinUint,
     db::DefDatabase,
     expr_store::{
@@ -57,7 +57,7 @@
     },
     item_scope::BuiltinShadowMode,
     item_tree::FieldsShape,
-    lang_item::LangItem,
+    lang_item::{LangItemTarget, LangItems},
     nameres::{DefMap, LocalDefMap, MacroSubNs, block_def_map},
     type_ref::{
         ArrayType, ConstRef, FnType, LifetimeRef, LifetimeRefId, Mutability, PathId, Rawness,
@@ -87,14 +87,16 @@ pub(super) fn lower_body(
     let mut params = vec![];
     let mut collector = ExprCollector::new(db, module, current_file_id);
 
-    let skip_body = match owner {
-        DefWithBodyId::FunctionId(it) => db.attrs(it.into()),
-        DefWithBodyId::StaticId(it) => db.attrs(it.into()),
-        DefWithBodyId::ConstId(it) => db.attrs(it.into()),
-        DefWithBodyId::VariantId(it) => db.attrs(it.into()),
-    }
-    .rust_analyzer_tool()
-    .any(|attr| *attr.path() == tool_path![skip]);
+    let skip_body = AttrFlags::query(
+        db,
+        match owner {
+            DefWithBodyId::FunctionId(it) => it.into(),
+            DefWithBodyId::StaticId(it) => it.into(),
+            DefWithBodyId::ConstId(it) => it.into(),
+            DefWithBodyId::VariantId(it) => it.into(),
+        },
+    )
+    .contains(AttrFlags::RUST_ANALYZER_SKIP);
     // If #[rust_analyzer::skip] annotated, only construct enough information for the signature
     // and skip the body.
     if skip_body {
@@ -416,6 +418,7 @@ pub struct ExprCollector<'db> {
     def_map: &'db DefMap,
     local_def_map: &'db LocalDefMap,
     module: ModuleId,
+    lang_items: OnceCell<&'db LangItems>,
     pub store: ExpressionStoreBuilder,
 
     // state stuff
@@ -513,7 +516,7 @@ fn check_is_used(&mut self, ec: &mut ExprCollector<'_>, id: BindingId) {
     }
 }
 
-impl ExprCollector<'_> {
+impl<'db> ExprCollector<'db> {
     pub fn new(
         db: &dyn DefDatabase,
         module: ModuleId,
@@ -527,6 +530,7 @@ pub fn new(
             module,
             def_map,
             local_def_map,
+            lang_items: OnceCell::new(),
             store: ExpressionStoreBuilder::default(),
             expander,
             current_try_block_label: None,
@@ -540,6 +544,11 @@ pub fn new(
     }
 
     #[inline]
+    pub(crate) fn lang_items(&self) -> &'db LangItems {
+        self.lang_items.get_or_init(|| crate::lang_item::lang_items(self.db, self.module.krate))
+    }
+
+    #[inline]
     pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
         self.expander.span_map()
     }
@@ -1654,7 +1663,7 @@ fn collect_as_a_binding_owner_bad(
     /// `try { <stmts>; }` into `'<new_label>: { <stmts>; ::std::ops::Try::from_output(()) }`
     /// and save the `<new_label>` to use it as a break target for desugaring of the `?` operator.
     fn desugar_try_block(&mut self, e: BlockExpr) -> ExprId {
-        let try_from_output = self.lang_path(LangItem::TryTraitFromOutput);
+        let try_from_output = self.lang_path(self.lang_items().TryTraitFromOutput);
         let label = self.alloc_label_desugared(Label {
             name: Name::generate_new_name(self.store.labels.len()),
         });
@@ -1753,10 +1762,11 @@ fn collect_while_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::WhileExp
     /// }
     /// ```
     fn collect_for_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::ForExpr) -> ExprId {
-        let into_iter_fn = self.lang_path(LangItem::IntoIterIntoIter);
-        let iter_next_fn = self.lang_path(LangItem::IteratorNext);
-        let option_some = self.lang_path(LangItem::OptionSome);
-        let option_none = self.lang_path(LangItem::OptionNone);
+        let lang_items = self.lang_items();
+        let into_iter_fn = self.lang_path(lang_items.IntoIterIntoIter);
+        let iter_next_fn = self.lang_path(lang_items.IteratorNext);
+        let option_some = self.lang_path(lang_items.OptionSome);
+        let option_none = self.lang_path(lang_items.OptionNone);
         let head = self.collect_expr_opt(e.iterable());
         let into_iter_fn_expr =
             self.alloc_expr(into_iter_fn.map_or(Expr::Missing, Expr::Path), syntax_ptr);
@@ -1836,10 +1846,11 @@ fn collect_for_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::ForExpr) -
     /// }
     /// ```
     fn collect_try_operator(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::TryExpr) -> ExprId {
-        let try_branch = self.lang_path(LangItem::TryTraitBranch);
-        let cf_continue = self.lang_path(LangItem::ControlFlowContinue);
-        let cf_break = self.lang_path(LangItem::ControlFlowBreak);
-        let try_from_residual = self.lang_path(LangItem::TryTraitFromResidual);
+        let lang_items = self.lang_items();
+        let try_branch = self.lang_path(lang_items.TryTraitBranch);
+        let cf_continue = self.lang_path(lang_items.ControlFlowContinue);
+        let cf_break = self.lang_path(lang_items.ControlFlowBreak);
+        let try_from_residual = self.lang_path(lang_items.TryTraitFromResidual);
         let operand = self.collect_expr_opt(e.expr());
         let try_branch = self.alloc_expr(try_branch.map_or(Expr::Missing, Expr::Path), syntax_ptr);
         let expr = self
@@ -2489,7 +2500,7 @@ fn collect_pat_possibly_rest(
     /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
     /// not.
     fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool {
-        let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options);
+        let enabled = self.expander.is_cfg_enabled(owner, self.cfg_options);
         match enabled {
             Ok(()) => true,
             Err(cfg) => {
@@ -2773,11 +2784,10 @@ fn collect_format_args(
 
         // Assume that rustc version >= 1.89.0 iff lang item `format_arguments` exists
         // but `format_unsafe_arg` does not
-        let fmt_args =
-            || crate::lang_item::lang_item(self.db, self.module.krate(), LangItem::FormatArguments);
-        let fmt_unsafe_arg =
-            || crate::lang_item::lang_item(self.db, self.module.krate(), LangItem::FormatUnsafeArg);
-        let use_format_args_since_1_89_0 = fmt_args().is_some() && fmt_unsafe_arg().is_none();
+        let lang_items = self.lang_items();
+        let fmt_args = lang_items.FormatArguments;
+        let fmt_unsafe_arg = lang_items.FormatUnsafeArg;
+        let use_format_args_since_1_89_0 = fmt_args.is_some() && fmt_unsafe_arg.is_none();
 
         let idx = if use_format_args_since_1_89_0 {
             self.collect_format_args_impl(syntax_ptr, fmt, argmap, lit_pieces, format_options)
@@ -2856,16 +2866,13 @@ fn collect_format_args_before_1_89_0_impl(
         //         unsafe { ::core::fmt::UnsafeArg::new() }
         //     )
 
-        let new_v1_formatted = LangItem::FormatArguments.ty_rel_path(
-            self.db,
-            self.module.krate(),
+        let lang_items = self.lang_items();
+        let new_v1_formatted = self.ty_rel_lang_path(
+            lang_items.FormatArguments,
             Name::new_symbol_root(sym::new_v1_formatted),
         );
-        let unsafe_arg_new = LangItem::FormatUnsafeArg.ty_rel_path(
-            self.db,
-            self.module.krate(),
-            Name::new_symbol_root(sym::new),
-        );
+        let unsafe_arg_new =
+            self.ty_rel_lang_path(lang_items.FormatUnsafeArg, Name::new_symbol_root(sym::new));
         let new_v1_formatted =
             self.alloc_expr_desugared(new_v1_formatted.map_or(Expr::Missing, Expr::Path));
 
@@ -3044,9 +3051,8 @@ fn collect_format_args_impl(
             //         )
             //     }
 
-            let new_v1_formatted = LangItem::FormatArguments.ty_rel_path(
-                self.db,
-                self.module.krate(),
+            let new_v1_formatted = self.ty_rel_lang_path(
+                self.lang_items().FormatArguments,
                 Name::new_symbol_root(sym::new_v1_formatted),
             );
             let new_v1_formatted =
@@ -3099,6 +3105,7 @@ fn make_format_spec(
         placeholder: &FormatPlaceholder,
         argmap: &mut FxIndexSet<(usize, ArgumentType)>,
     ) -> ExprId {
+        let lang_items = self.lang_items();
         let position = match placeholder.argument.index {
             Ok(arg_index) => {
                 let (i, _) =
@@ -3159,15 +3166,14 @@ fn make_format_spec(
             let width =
                 RecordLitField { name: Name::new_symbol_root(sym::width), expr: width_expr };
             self.alloc_expr_desugared(Expr::RecordLit {
-                path: LangItem::FormatPlaceholder.path(self.db, self.module.krate()).map(Box::new),
+                path: self.lang_path(lang_items.FormatPlaceholder).map(Box::new),
                 fields: Box::new([position, flags, precision, width]),
                 spread: None,
             })
         } else {
             let format_placeholder_new = {
-                let format_placeholder_new = LangItem::FormatPlaceholder.ty_rel_path(
-                    self.db,
-                    self.module.krate(),
+                let format_placeholder_new = self.ty_rel_lang_path(
+                    lang_items.FormatPlaceholder,
                     Name::new_symbol_root(sym::new),
                 );
                 match format_placeholder_new {
@@ -3188,9 +3194,8 @@ fn make_format_spec(
             )));
             let fill = self.alloc_expr_desugared(Expr::Literal(Literal::Char(fill.unwrap_or(' '))));
             let align = {
-                let align = LangItem::FormatAlignment.ty_rel_path(
-                    self.db,
-                    self.module.krate(),
+                let align = self.ty_rel_lang_path(
+                    lang_items.FormatAlignment,
                     match alignment {
                         Some(FormatAlignment::Left) => Name::new_symbol_root(sym::Left),
                         Some(FormatAlignment::Right) => Name::new_symbol_root(sym::Right),
@@ -3234,6 +3239,7 @@ fn make_count(
         count: &Option<FormatCount>,
         argmap: &mut FxIndexSet<(usize, ArgumentType)>,
     ) -> ExprId {
+        let lang_items = self.lang_items();
         match count {
             Some(FormatCount::Literal(n)) => {
                 let args = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
@@ -3241,11 +3247,9 @@ fn make_count(
                     // FIXME: Change this to Some(BuiltinUint::U16) once we drop support for toolchains < 1.88
                     None,
                 )));
-                let count_is = match LangItem::FormatCount.ty_rel_path(
-                    self.db,
-                    self.module.krate(),
-                    Name::new_symbol_root(sym::Is),
-                ) {
+                let count_is = match self
+                    .ty_rel_lang_path(lang_items.FormatCount, Name::new_symbol_root(sym::Is))
+                {
                     Some(count_is) => self.alloc_expr_desugared(Expr::Path(count_is)),
                     None => self.missing_expr(),
                 };
@@ -3259,11 +3263,9 @@ fn make_count(
                         i as u128,
                         Some(BuiltinUint::Usize),
                     )));
-                    let count_param = match LangItem::FormatCount.ty_rel_path(
-                        self.db,
-                        self.module.krate(),
-                        Name::new_symbol_root(sym::Param),
-                    ) {
+                    let count_param = match self
+                        .ty_rel_lang_path(lang_items.FormatCount, Name::new_symbol_root(sym::Param))
+                    {
                         Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)),
                         None => self.missing_expr(),
                     };
@@ -3277,11 +3279,9 @@ fn make_count(
                     self.missing_expr()
                 }
             }
-            None => match LangItem::FormatCount.ty_rel_path(
-                self.db,
-                self.module.krate(),
-                Name::new_symbol_root(sym::Implied),
-            ) {
+            None => match self
+                .ty_rel_lang_path(lang_items.FormatCount, Name::new_symbol_root(sym::Implied))
+            {
                 Some(count_param) => self.alloc_expr_desugared(Expr::Path(count_param)),
                 None => self.missing_expr(),
             },
@@ -3299,9 +3299,8 @@ fn make_argument(&mut self, arg: ExprId, ty: ArgumentType) -> ExprId {
         use ArgumentType::*;
         use FormatTrait::*;
 
-        let new_fn = match LangItem::FormatArgument.ty_rel_path(
-            self.db,
-            self.module.krate(),
+        let new_fn = match self.ty_rel_lang_path(
+            self.lang_items().FormatArgument,
             Name::new_symbol_root(match ty {
                 Format(Display) => sym::new_display,
                 Format(Debug) => sym::new_debug,
@@ -3323,8 +3322,16 @@ fn make_argument(&mut self, arg: ExprId, ty: ArgumentType) -> ExprId {
 
     // endregion: format
 
-    fn lang_path(&self, lang: LangItem) -> Option<Path> {
-        lang.path(self.db, self.module.krate())
+    fn lang_path(&self, lang: Option<impl Into<LangItemTarget>>) -> Option<Path> {
+        Some(Path::LangItem(lang?.into(), None))
+    }
+
+    fn ty_rel_lang_path(
+        &self,
+        lang: Option<impl Into<LangItemTarget>>,
+        relative_name: Name,
+    ) -> Option<Path> {
+        Some(Path::LangItem(lang?.into(), Some(relative_name)))
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
index 4ba7093..f5ef8e1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
@@ -12,7 +12,8 @@
 use syntax::ast::{HasName, RangeOp};
 
 use crate::{
-    AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId,
+    AdtId, DefWithBodyId, FunctionId, GenericDefId, StructId, TypeParamId, VariantId,
+    attrs::AttrFlags,
     expr_store::path::{GenericArg, GenericArgs},
     hir::{
         Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
@@ -167,7 +168,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
         GenericDefId::AdtId(id) => match id {
             AdtId::StructId(id) => {
                 let signature = db.struct_signature(id);
-                print_struct(db, &signature, edition)
+                print_struct(db, id, &signature, edition)
             }
             AdtId::UnionId(id) => {
                 format!("unimplemented {id:?}")
@@ -179,7 +180,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
         GenericDefId::ConstId(id) => format!("unimplemented {id:?}"),
         GenericDefId::FunctionId(id) => {
             let signature = db.function_signature(id);
-            print_function(db, &signature, edition)
+            print_function(db, id, &signature, edition)
         }
         GenericDefId::ImplId(id) => format!("unimplemented {id:?}"),
         GenericDefId::StaticId(id) => format!("unimplemented {id:?}"),
@@ -208,7 +209,8 @@ pub fn print_path(
 
 pub fn print_struct(
     db: &dyn DefDatabase,
-    StructSignature { name, generic_params, store, flags, shape, repr }: &StructSignature,
+    id: StructId,
+    StructSignature { name, generic_params, store, flags, shape }: &StructSignature,
     edition: Edition,
 ) -> String {
     let mut p = Printer {
@@ -219,7 +221,7 @@ pub fn print_struct(
         line_format: LineFormat::Newline,
         edition,
     };
-    if let Some(repr) = repr {
+    if let Some(repr) = AttrFlags::repr(db, id.into()) {
         if repr.c() {
             wln!(p, "#[repr(C)]");
         }
@@ -255,7 +257,8 @@ pub fn print_struct(
 
 pub fn print_function(
     db: &dyn DefDatabase,
-    FunctionSignature {
+    id: FunctionId,
+    signature @ FunctionSignature {
         name,
         generic_params,
         store,
@@ -263,10 +266,10 @@ pub fn print_function(
         ret_type,
         abi,
         flags,
-        legacy_const_generics_indices,
     }: &FunctionSignature,
     edition: Edition,
 ) -> String {
+    let legacy_const_generics_indices = signature.legacy_const_generics_indices(db, id);
     let mut p = Printer {
         db,
         store,
@@ -298,7 +301,7 @@ pub fn print_function(
         if i != 0 {
             w!(p, ", ");
         }
-        if legacy_const_generics_indices.as_ref().is_some_and(|idx| idx.contains(&(i as u32))) {
+        if legacy_const_generics_indices.is_some_and(|idx| idx.contains(&(i as u32))) {
             w!(p, "const: ");
         }
         p.print_type_ref(*param);
@@ -1091,15 +1094,15 @@ macro_rules! write_name {
                 }};
             }
             match *it {
-                LangItemTarget::ImplDef(it) => w!(self, "{it:?}"),
+                LangItemTarget::ImplId(it) => w!(self, "{it:?}"),
                 LangItemTarget::EnumId(it) => write_name!(it),
-                LangItemTarget::Function(it) => write_name!(it),
-                LangItemTarget::Static(it) => write_name!(it),
-                LangItemTarget::Struct(it) => write_name!(it),
-                LangItemTarget::Union(it) => write_name!(it),
-                LangItemTarget::TypeAlias(it) => write_name!(it),
-                LangItemTarget::Trait(it) => write_name!(it),
-                LangItemTarget::EnumVariant(it) => write_name!(it),
+                LangItemTarget::FunctionId(it) => write_name!(it),
+                LangItemTarget::StaticId(it) => write_name!(it),
+                LangItemTarget::StructId(it) => write_name!(it),
+                LangItemTarget::UnionId(it) => write_name!(it),
+                LangItemTarget::TypeAliasId(it) => write_name!(it),
+                LangItemTarget::TraitId(it) => write_name!(it),
+                LangItemTarget::EnumVariantId(it) => write_name!(it),
             }
 
             if let Some(s) = s {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs
index e8334cd..4501ff4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -189,8 +189,8 @@ fn f() {
 }
     "#,
         expect![[r#"
-            BlockId(3c01) in BlockRelativeModuleId { block: Some(BlockId(3c00)), local_id: Idx::<ModuleData>(1) }
-            BlockId(3c00) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
+            BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::<ModuleData>(1) }
+            BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
             crate scope
         "#]],
     );
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs
index b68674c..2dac4e7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs
@@ -38,14 +38,24 @@ fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expe
         match def {
             GenericDefId::AdtId(adt_id) => match adt_id {
                 crate::AdtId::StructId(struct_id) => {
-                    out += &print_struct(&db, &db.struct_signature(struct_id), Edition::CURRENT);
+                    out += &print_struct(
+                        &db,
+                        struct_id,
+                        &db.struct_signature(struct_id),
+                        Edition::CURRENT,
+                    );
                 }
                 crate::AdtId::UnionId(_id) => (),
                 crate::AdtId::EnumId(_id) => (),
             },
             GenericDefId::ConstId(_id) => (),
             GenericDefId::FunctionId(function_id) => {
-                out += &print_function(&db, &db.function_signature(function_id), Edition::CURRENT)
+                out += &print_function(
+                    &db,
+                    function_id,
+                    &db.function_signature(function_id),
+                    Edition::CURRENT,
+                )
             }
 
             GenericDefId::ImplId(_id) => (),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
index 66eade2..53be0de 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
@@ -45,7 +45,7 @@
 
 // FIXME: Encode this as a single u32, we won't ever reach all 32 bits especially given these counts
 // are local to the body.
-#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, salsa::Update)]
 pub enum ExprOrPatId {
     ExprId(ExprId),
     PatId(PatId),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
index f31f355..67cf466 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -13,7 +13,8 @@
 use triomphe::Arc;
 
 use crate::{
-    AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
+    AssocItemId, AttrDefId, Complete, FxIndexMap, InternedModuleId, ModuleDefId, ModuleId, TraitId,
+    attrs::AttrFlags,
     db::DefDatabase,
     item_scope::{ImportOrExternCrate, ItemInNs},
     nameres::{DefMap, assoc::TraitItems, crate_def_map},
@@ -165,17 +166,34 @@ fn collect_import_map(db: &dyn DefDatabase, krate: Crate) -> ImportMapIndex {
                         }
                     } else {
                         match item {
-                            ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(),
+                            ItemInNs::Types(id) | ItemInNs::Values(id) => match id {
+                                ModuleDefId::ModuleId(it) => {
+                                    Some(AttrDefId::ModuleId(InternedModuleId::new(db, it)))
+                                }
+                                ModuleDefId::FunctionId(it) => Some(it.into()),
+                                ModuleDefId::AdtId(it) => Some(it.into()),
+                                ModuleDefId::EnumVariantId(it) => Some(it.into()),
+                                ModuleDefId::ConstId(it) => Some(it.into()),
+                                ModuleDefId::StaticId(it) => Some(it.into()),
+                                ModuleDefId::TraitId(it) => Some(it.into()),
+                                ModuleDefId::TypeAliasId(it) => Some(it.into()),
+                                ModuleDefId::MacroId(it) => Some(it.into()),
+                                ModuleDefId::BuiltinType(_) => None,
+                            },
                             ItemInNs::Macros(id) => Some(id.into()),
                         }
                     };
                     let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id {
                         None => (false, false, Complete::Yes),
                         Some(attr_id) => {
-                            let attrs = db.attrs(attr_id);
+                            let attrs = AttrFlags::query(db, attr_id);
                             let do_not_complete =
-                                Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs);
-                            (attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete)
+                                Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), attrs);
+                            (
+                                attrs.contains(AttrFlags::IS_DOC_HIDDEN),
+                                attrs.contains(AttrFlags::IS_UNSTABLE),
+                                do_not_complete,
+                            )
                         }
                     };
 
@@ -239,15 +257,15 @@ fn collect_trait_assoc_items(
             };
 
             let attr_id = item.into();
-            let attrs = &db.attrs(attr_id);
+            let attrs = AttrFlags::query(db, attr_id);
             let item_do_not_complete = Complete::extract(false, attrs);
             let do_not_complete =
                 Complete::for_trait_item(trait_import_info.complete, item_do_not_complete);
             let assoc_item_info = ImportInfo {
                 container: trait_import_info.container,
                 name: assoc_item_name.clone(),
-                is_doc_hidden: attrs.has_doc_hidden(),
-                is_unstable: attrs.is_unstable(),
+                is_doc_hidden: attrs.contains(AttrFlags::IS_DOC_HIDDEN),
+                is_unstable: attrs.contains(AttrFlags::IS_UNSTABLE),
                 complete: do_not_complete,
             };
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
index f35df8d..2a104ff 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -30,6 +30,7 @@
 //! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
 //! surface syntax.
 
+mod attrs;
 mod lower;
 mod pretty;
 #[cfg(test)]
@@ -43,10 +44,8 @@
 };
 
 use ast::{AstNode, StructKind};
-use base_db::Crate;
 use hir_expand::{
     ExpandTo, HirFileId,
-    attrs::RawAttrs,
     mod_path::{ModPath, PathKind},
     name::Name,
 };
@@ -59,9 +58,12 @@
 use thin_vec::ThinVec;
 use triomphe::Arc;
 
-use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase};
+use crate::{BlockId, Lookup, db::DefDatabase};
 
-pub(crate) use crate::item_tree::lower::{lower_use_tree, visibility_from_ast};
+pub(crate) use crate::item_tree::{
+    attrs::*,
+    lower::{lower_use_tree, visibility_from_ast},
+};
 
 #[derive(Copy, Clone, Eq, PartialEq)]
 pub(crate) struct RawVisibilityId(u32);
@@ -96,7 +98,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
     let mut item_tree = match_ast! {
         match syntax {
             ast::SourceFile(file) => {
-                let top_attrs = RawAttrs::new(db, &file, ctx.span_map());
+                let top_attrs = ctx.lower_attrs(&file);
                 let mut item_tree = ctx.lower_module_items(&file);
                 item_tree.top_attrs = top_attrs;
                 item_tree
@@ -132,7 +134,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
                     attrs: FxHashMap::default(),
                     small_data: FxHashMap::default(),
                     big_data: FxHashMap::default(),
-                    top_attrs: RawAttrs::EMPTY,
+                    top_attrs: AttrsOrCfg::empty(),
                     vis: ItemVisibilities { arena: ThinVec::new() },
                 })
             })
@@ -168,7 +170,7 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
                     attrs: FxHashMap::default(),
                     small_data: FxHashMap::default(),
                     big_data: FxHashMap::default(),
-                    top_attrs: RawAttrs::EMPTY,
+                    top_attrs: AttrsOrCfg::empty(),
                     vis: ItemVisibilities { arena: ThinVec::new() },
                 })
             })
@@ -182,8 +184,8 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
 #[derive(Debug, Default, Eq, PartialEq)]
 pub struct ItemTree {
     top_level: Box<[ModItemId]>,
-    top_attrs: RawAttrs,
-    attrs: FxHashMap<FileAstId<ast::Item>, RawAttrs>,
+    top_attrs: AttrsOrCfg,
+    attrs: FxHashMap<FileAstId<ast::Item>, AttrsOrCfg>,
     vis: ItemVisibilities,
     big_data: FxHashMap<FileAstId<ast::Item>, BigModItem>,
     small_data: FxHashMap<FileAstId<ast::Item>, SmallModItem>,
@@ -197,26 +199,12 @@ pub(crate) fn top_level_items(&self) -> &[ModItemId] {
     }
 
     /// Returns the inner attributes of the source file.
-    pub(crate) fn top_level_raw_attrs(&self) -> &RawAttrs {
+    pub(crate) fn top_level_attrs(&self) -> &AttrsOrCfg {
         &self.top_attrs
     }
 
-    /// Returns the inner attributes of the source file.
-    pub(crate) fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
-        Attrs::expand_cfg_attr(db, krate, self.top_attrs.clone())
-    }
-
-    pub(crate) fn raw_attrs(&self, of: FileAstId<ast::Item>) -> &RawAttrs {
-        self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
-    }
-
-    pub(crate) fn attrs(
-        &self,
-        db: &dyn DefDatabase,
-        krate: Crate,
-        of: FileAstId<ast::Item>,
-    ) -> Attrs {
-        Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone())
+    pub(crate) fn attrs(&self, of: FileAstId<ast::Item>) -> Option<&AttrsOrCfg> {
+        self.attrs.get(&of)
     }
 
     /// Returns a count of a few, expensive items.
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs
new file mode 100644
index 0000000..5c635a4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs
@@ -0,0 +1,220 @@
+//! Defines attribute helpers for name resolution.
+//!
+//! Notice we don't preserve all attributes for name resolution, to save space:
+//! for example, we skip doc comments (desugared to `#[doc = "..."]` attributes)
+//! and `#[inline]`. The filtered attributes are listed in [`hir_expand::attrs`].
+
+use std::{
+    borrow::Cow,
+    convert::Infallible,
+    ops::{self, ControlFlow},
+};
+
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{
+    attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs},
+    mod_path::ModPath,
+    name::Name,
+    span_map::SpanMapRef,
+};
+use intern::{Interned, Symbol, sym};
+use syntax::{AstNode, T, ast};
+use syntax_bridge::DocCommentDesugarMode;
+use tt::token_to_literal;
+
+use crate::{db::DefDatabase, item_tree::lower::Ctx};
+
+#[derive(Debug, PartialEq, Eq)]
+pub(crate) enum AttrsOrCfg {
+    Enabled {
+        attrs: AttrsOwned,
+    },
+    /// This only collects the attributes up to the disabled `cfg` (this is what needed for crate-level attributes.)
+    CfgDisabled(Box<(CfgExpr, AttrsOwned)>),
+}
+
+impl Default for AttrsOrCfg {
+    #[inline]
+    fn default() -> Self {
+        AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
+    }
+}
+
+impl AttrsOrCfg {
+    pub(crate) fn lower<'a>(
+        db: &dyn DefDatabase,
+        owner: &dyn ast::HasAttrs,
+        cfg_options: &dyn Fn() -> &'a CfgOptions,
+        span_map: SpanMapRef<'_>,
+    ) -> AttrsOrCfg {
+        let mut attrs = Vec::new();
+        let result =
+            collect_item_tree_attrs::<Infallible>(owner, cfg_options, |meta, container, _, _| {
+                // NOTE: We cannot early return from this function, *every* attribute must be pushed, otherwise we'll mess the `AttrId`
+                // tracking.
+                let (span, path_range, input) = match meta {
+                    Meta::NamedKeyValue { path_range, name: _, value } => {
+                        let span = span_map.span_for_range(path_range);
+                        let input = value.map(|value| {
+                            Box::new(AttrInput::Literal(token_to_literal(
+                                value.text(),
+                                span_map.span_for_range(value.text_range()),
+                            )))
+                        });
+                        (span, path_range, input)
+                    }
+                    Meta::TokenTree { path, tt } => {
+                        let span = span_map.span_for_range(path.range);
+                        let tt = syntax_bridge::syntax_node_to_token_tree(
+                            tt.syntax(),
+                            span_map,
+                            span,
+                            DocCommentDesugarMode::ProcMacro,
+                        );
+                        let input = Some(Box::new(AttrInput::TokenTree(tt)));
+                        (span, path.range, input)
+                    }
+                    Meta::Path { path } => {
+                        let span = span_map.span_for_range(path.range);
+                        (span, path.range, None)
+                    }
+                };
+
+                let path = container.token_at_offset(path_range.start()).right_biased().and_then(
+                    |first_path_token| {
+                        let is_abs = matches!(first_path_token.kind(), T![:] | T![::]);
+                        let segments =
+                            std::iter::successors(Some(first_path_token), |it| it.next_token())
+                                .take_while(|it| it.text_range().end() <= path_range.end())
+                                .filter(|it| it.kind().is_any_identifier());
+                        ModPath::from_tokens(
+                            db,
+                            &mut |range| span_map.span_for_range(range).ctx,
+                            is_abs,
+                            segments,
+                        )
+                    },
+                );
+                let path = path.unwrap_or_else(|| Name::missing().into());
+
+                attrs.push(Attr { path: Interned::new(path), input, ctxt: span.ctx });
+                ControlFlow::Continue(())
+            });
+        let attrs = AttrsOwned(attrs.into_boxed_slice());
+        match result {
+            Some(Either::Right(cfg)) => AttrsOrCfg::CfgDisabled(Box::new((cfg, attrs))),
+            None => AttrsOrCfg::Enabled { attrs },
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub(crate) struct AttrsOwned(Box<[Attr]>);
+
+#[derive(Debug, Clone, Copy)]
+pub(crate) struct Attrs<'a>(&'a [Attr]);
+
+impl ops::Deref for Attrs<'_> {
+    type Target = [Attr];
+
+    #[inline]
+    fn deref(&self) -> &Self::Target {
+        self.0
+    }
+}
+
+impl Ctx<'_> {
+    #[inline]
+    pub(super) fn lower_attrs(&self, owner: &dyn ast::HasAttrs) -> AttrsOrCfg {
+        AttrsOrCfg::lower(self.db, owner, &|| self.cfg_options(), self.span_map())
+    }
+}
+
+impl AttrsOwned {
+    #[inline]
+    pub(crate) fn as_ref(&self) -> Attrs<'_> {
+        Attrs(&self.0)
+    }
+}
+
+impl<'a> Attrs<'a> {
+    pub(crate) const EMPTY: Self = Attrs(&[]);
+
+    #[inline]
+    pub(crate) fn by_key(self, key: Symbol) -> AttrQuery<'a> {
+        AttrQuery { attrs: self, key }
+    }
+
+    #[inline]
+    pub(crate) fn iter(self) -> impl Iterator<Item = (AttrId, &'a Attr)> {
+        self.0.iter().enumerate().map(|(id, attr)| (AttrId::from_item_tree_index(id as u32), attr))
+    }
+
+    #[inline]
+    pub(crate) fn iter_after(
+        self,
+        after: Option<AttrId>,
+    ) -> impl Iterator<Item = (AttrId, &'a Attr)> {
+        let skip = after.map_or(0, |after| after.item_tree_index() + 1);
+        self.0[skip as usize..]
+            .iter()
+            .enumerate()
+            .map(move |(id, attr)| (AttrId::from_item_tree_index(id as u32 + skip), attr))
+    }
+
+    #[inline]
+    pub(crate) fn is_proc_macro(&self) -> bool {
+        self.by_key(sym::proc_macro).exists()
+    }
+
+    #[inline]
+    pub(crate) fn is_proc_macro_attribute(&self) -> bool {
+        self.by_key(sym::proc_macro_attribute).exists()
+    }
+}
+#[derive(Debug, Clone)]
+pub(crate) struct AttrQuery<'attr> {
+    attrs: Attrs<'attr>,
+    key: Symbol,
+}
+
+impl<'attr> AttrQuery<'attr> {
+    #[inline]
+    pub(crate) fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
+        self.attrs().filter_map(|attr| attr.token_tree_value())
+    }
+
+    #[inline]
+    pub(crate) fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
+        self.attrs().find_map(|attr| attr.string_value_with_span())
+    }
+
+    #[inline]
+    pub(crate) fn string_value_unescape(self) -> Option<Cow<'attr, str>> {
+        self.attrs().find_map(|attr| attr.string_value_unescape())
+    }
+
+    #[inline]
+    pub(crate) fn exists(self) -> bool {
+        self.attrs().next().is_some()
+    }
+
+    #[inline]
+    pub(crate) fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
+        let key = self.key;
+        self.attrs.0.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
+    }
+}
+
+impl AttrsOrCfg {
+    #[inline]
+    pub(super) fn empty() -> Self {
+        AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
+    }
+
+    #[inline]
+    pub(super) fn is_empty(&self) -> bool {
+        matches!(self, AttrsOrCfg::Enabled { attrs } if attrs.as_ref().is_empty())
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
index db50e65..d8519f7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -1,8 +1,9 @@
 //! AST -> `ItemTree` lowering code.
 
-use std::{cell::OnceCell, collections::hash_map::Entry};
+use std::cell::OnceCell;
 
 use base_db::FxIndexSet;
+use cfg::CfgOptions;
 use hir_expand::{
     HirFileId,
     mod_path::PathKind,
@@ -22,18 +23,19 @@
     item_tree::{
         BigModItem, Const, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl,
         ImportAlias, Interned, ItemTree, ItemTreeAstId, Macro2, MacroCall, MacroRules, Mod,
-        ModItemId, ModKind, ModPath, RawAttrs, RawVisibility, RawVisibilityId, SmallModItem,
-        Static, Struct, StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
-        VisibilityExplicitness,
+        ModItemId, ModKind, ModPath, RawVisibility, RawVisibilityId, SmallModItem, Static, Struct,
+        StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, VisibilityExplicitness,
+        attrs::AttrsOrCfg,
     },
 };
 
 pub(super) struct Ctx<'a> {
-    db: &'a dyn DefDatabase,
+    pub(super) db: &'a dyn DefDatabase,
     tree: ItemTree,
     source_ast_id_map: Arc<AstIdMap>,
     span_map: OnceCell<SpanMap>,
     file: HirFileId,
+    cfg_options: OnceCell<&'a CfgOptions>,
     top_level: Vec<ModItemId>,
     visibilities: FxIndexSet<RawVisibility>,
 }
@@ -45,12 +47,18 @@ pub(super) fn new(db: &'a dyn DefDatabase, file: HirFileId) -> Self {
             tree: ItemTree::default(),
             source_ast_id_map: db.ast_id_map(file),
             file,
+            cfg_options: OnceCell::new(),
             span_map: OnceCell::new(),
             visibilities: FxIndexSet::default(),
             top_level: Vec::new(),
         }
     }
 
+    #[inline]
+    pub(super) fn cfg_options(&self) -> &'a CfgOptions {
+        self.cfg_options.get_or_init(|| self.file.krate(self.db).cfg_options(self.db))
+    }
+
     pub(super) fn span_map(&self) -> SpanMapRef<'_> {
         self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref()
     }
@@ -98,7 +106,7 @@ pub(super) fn lower_macro_stmts(mut self, stmts: ast::MacroStmts) -> ItemTree {
     }
 
     pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
-        self.tree.top_attrs = RawAttrs::new(self.db, block, self.span_map());
+        self.tree.top_attrs = self.lower_attrs(block);
         self.top_level = block
             .statements()
             .filter_map(|stmt| match stmt {
@@ -144,22 +152,15 @@ fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItemId> {
             // FIXME: Handle `global_asm!()`.
             ast::Item::AsmExpr(_) => return None,
         };
-        let attrs = RawAttrs::new(self.db, item, self.span_map());
+        let attrs = self.lower_attrs(item);
         self.add_attrs(mod_item.ast_id(), attrs);
 
         Some(mod_item)
     }
 
-    fn add_attrs(&mut self, item: FileAstId<ast::Item>, attrs: RawAttrs) {
+    fn add_attrs(&mut self, item: FileAstId<ast::Item>, attrs: AttrsOrCfg) {
         if !attrs.is_empty() {
-            match self.tree.attrs.entry(item) {
-                Entry::Occupied(mut entry) => {
-                    *entry.get_mut() = entry.get().merge(attrs);
-                }
-                Entry::Vacant(entry) => {
-                    entry.insert(attrs);
-                }
-            }
+            self.tree.attrs.insert(item, attrs);
         }
     }
 
@@ -352,7 +353,7 @@ fn lower_extern_block(&mut self, block: &ast::ExternBlock) -> ItemTreeAstId<Exte
                         ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
                         ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
                     };
-                    let attrs = RawAttrs::new(self.db, &item, self.span_map());
+                    let attrs = self.lower_attrs(&item);
                     self.add_attrs(mod_item.ast_id(), attrs);
                     Some(mod_item)
                 })
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index 94a6cce..66a2d14 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -7,8 +7,8 @@
 use crate::{
     item_tree::{
         Const, DefDatabase, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ItemTree,
-        Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawAttrs, RawVisibilityId, Static,
-        Struct, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
+        Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawVisibilityId, Static, Struct,
+        Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, attrs::AttrsOrCfg,
     },
     visibility::RawVisibility,
 };
@@ -85,9 +85,13 @@ fn whitespace(&mut self) {
         }
     }
 
-    fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool, separated_by: &str) {
+    fn print_attrs(&mut self, attrs: &AttrsOrCfg, inner: bool, separated_by: &str) {
+        let AttrsOrCfg::Enabled { attrs } = attrs else {
+            w!(self, "#[cfg(false)]{separated_by}");
+            return;
+        };
         let inner = if inner { "!" } else { "" };
-        for attr in &**attrs {
+        for attr in &*attrs.as_ref() {
             w!(
                 self,
                 "#{}[{}{}]{}",
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
index 91b42be..a57432f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
@@ -30,10 +30,8 @@ fn imports() {
 
 use a::{c, d::{e}};
         "#,
-        expect![[r##"
-            #![doc = " file comment"]
+        expect![[r#"
             #![no_std]
-            #![doc = " another file comment"]
 
             // AstId: ExternCrate[070B, 0]
             pub(self) extern crate self as renamed;
@@ -47,13 +45,12 @@ fn imports() {
             // AstId: Use[0000, 1]
             pub(self) use globs::*;
 
-            #[doc = " docs on import"]
             // AstId: Use[0000, 2]
             pub(self) use crate::{A, B};
 
             // AstId: Use[0000, 3]
             pub(self) use a::{c, d::{e}};
-        "##]],
+        "#]],
     );
 }
 
@@ -195,8 +192,6 @@ fn fn_in_module() {}
 mod outline;
         "#,
         expect![[r##"
-            #[doc = " outer"]
-            #[doc = " inner"]
             // AstId: Module[03AE, 0]
             pub(self) mod inline {
                 // AstId: Use[0000, 0]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
index 91a90f6..3f2cf09 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
@@ -2,100 +2,36 @@
 //!
 //! This attribute to tell the compiler about semi built-in std library
 //! features, such as Fn family of traits.
-use hir_expand::name::Name;
 use intern::{Symbol, sym};
-use rustc_hash::FxHashMap;
+use stdx::impl_from;
 
 use crate::{
     AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
     StaticId, StructId, TraitId, TypeAliasId, UnionId,
+    attrs::AttrFlags,
     db::DefDatabase,
-    expr_store::path::Path,
     nameres::{assoc::TraitItems, crate_def_map, crate_local_def_map},
 };
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub enum LangItemTarget {
     EnumId(EnumId),
-    Function(FunctionId),
-    ImplDef(ImplId),
-    Static(StaticId),
-    Struct(StructId),
-    Union(UnionId),
-    TypeAlias(TypeAliasId),
-    Trait(TraitId),
-    EnumVariant(EnumVariantId),
+    FunctionId(FunctionId),
+    ImplId(ImplId),
+    StaticId(StaticId),
+    StructId(StructId),
+    UnionId(UnionId),
+    TypeAliasId(TypeAliasId),
+    TraitId(TraitId),
+    EnumVariantId(EnumVariantId),
 }
 
-impl LangItemTarget {
-    pub fn as_enum(self) -> Option<EnumId> {
-        match self {
-            LangItemTarget::EnumId(id) => Some(id),
-            _ => None,
-        }
-    }
-
-    pub fn as_function(self) -> Option<FunctionId> {
-        match self {
-            LangItemTarget::Function(id) => Some(id),
-            _ => None,
-        }
-    }
-
-    pub fn as_impl_def(self) -> Option<ImplId> {
-        match self {
-            LangItemTarget::ImplDef(id) => Some(id),
-            _ => None,
-        }
-    }
-
-    pub fn as_static(self) -> Option<StaticId> {
-        match self {
-            LangItemTarget::Static(id) => Some(id),
-            _ => None,
-        }
-    }
-
-    pub fn as_struct(self) -> Option<StructId> {
-        match self {
-            LangItemTarget::Struct(id) => Some(id),
-            _ => None,
-        }
-    }
-
-    pub fn as_trait(self) -> Option<TraitId> {
-        match self {
-            LangItemTarget::Trait(id) => Some(id),
-            _ => None,
-        }
-    }
-
-    pub fn as_enum_variant(self) -> Option<EnumVariantId> {
-        match self {
-            LangItemTarget::EnumVariant(id) => Some(id),
-            _ => None,
-        }
-    }
-
-    pub fn as_type_alias(self) -> Option<TypeAliasId> {
-        match self {
-            LangItemTarget::TypeAlias(id) => Some(id),
-            _ => None,
-        }
-    }
-
-    pub fn as_adt(self) -> Option<AdtId> {
-        match self {
-            LangItemTarget::Union(it) => Some(it.into()),
-            LangItemTarget::EnumId(it) => Some(it.into()),
-            LangItemTarget::Struct(it) => Some(it.into()),
-            _ => None,
-        }
-    }
-}
+impl_from!(
+    EnumId, FunctionId, ImplId, StaticId, StructId, UnionId, TypeAliasId, TraitId, EnumVariantId for LangItemTarget
+);
 
 /// Salsa query. This will look for lang items in a specific crate.
-#[salsa_macros::tracked(returns(ref))]
+#[salsa_macros::tracked(returns(as_deref))]
 pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangItems>> {
     let _p = tracing::info_span!("crate_lang_items_query").entered();
 
@@ -105,15 +41,11 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
 
     for (_, module_data) in crate_def_map.modules() {
         for impl_def in module_data.scope.impls() {
-            lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
+            lang_items.collect_lang_item(db, impl_def);
             for &(_, assoc) in impl_def.impl_items(db).items.iter() {
                 match assoc {
-                    AssocItemId::FunctionId(f) => {
-                        lang_items.collect_lang_item(db, f, LangItemTarget::Function)
-                    }
-                    AssocItemId::TypeAliasId(t) => {
-                        lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias)
-                    }
+                    AssocItemId::FunctionId(f) => lang_items.collect_lang_item(db, f),
+                    AssocItemId::TypeAliasId(t) => lang_items.collect_lang_item(db, t),
                     AssocItemId::ConstId(_) => (),
                 }
             }
@@ -122,62 +54,55 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
         for def in module_data.scope.declarations() {
             match def {
                 ModuleDefId::TraitId(trait_) => {
-                    lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
+                    lang_items.collect_lang_item(db, trait_);
                     TraitItems::query(db, trait_).items.iter().for_each(|&(_, assoc_id)| {
                         match assoc_id {
                             AssocItemId::FunctionId(f) => {
-                                lang_items.collect_lang_item(db, f, LangItemTarget::Function);
+                                lang_items.collect_lang_item(db, f);
                             }
                             AssocItemId::TypeAliasId(alias) => {
-                                lang_items.collect_lang_item(db, alias, LangItemTarget::TypeAlias)
+                                lang_items.collect_lang_item(db, alias)
                             }
                             AssocItemId::ConstId(_) => {}
                         }
                     });
                 }
                 ModuleDefId::AdtId(AdtId::EnumId(e)) => {
-                    lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
+                    lang_items.collect_lang_item(db, e);
                     e.enum_variants(db).variants.iter().for_each(|&(id, _, _)| {
-                        lang_items.collect_lang_item(db, id, LangItemTarget::EnumVariant);
+                        lang_items.collect_lang_item(db, id);
                     });
                 }
                 ModuleDefId::AdtId(AdtId::StructId(s)) => {
-                    lang_items.collect_lang_item(db, s, LangItemTarget::Struct);
+                    lang_items.collect_lang_item(db, s);
                 }
                 ModuleDefId::AdtId(AdtId::UnionId(u)) => {
-                    lang_items.collect_lang_item(db, u, LangItemTarget::Union);
+                    lang_items.collect_lang_item(db, u);
                 }
                 ModuleDefId::FunctionId(f) => {
-                    lang_items.collect_lang_item(db, f, LangItemTarget::Function);
+                    lang_items.collect_lang_item(db, f);
                 }
                 ModuleDefId::StaticId(s) => {
-                    lang_items.collect_lang_item(db, s, LangItemTarget::Static);
+                    lang_items.collect_lang_item(db, s);
                 }
                 ModuleDefId::TypeAliasId(t) => {
-                    lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias);
+                    lang_items.collect_lang_item(db, t);
                 }
                 _ => {}
             }
         }
     }
 
-    if lang_items.items.is_empty() { None } else { Some(Box::new(lang_items)) }
+    if lang_items.is_empty() { None } else { Some(Box::new(lang_items)) }
 }
 
-/// Salsa query. Look for a lang item, starting from the specified crate and recursively
+/// Salsa query. Look for a lang items, starting from the specified crate and recursively
 /// traversing its dependencies.
-#[salsa_macros::tracked]
-pub fn lang_item(
-    db: &dyn DefDatabase,
-    start_crate: Crate,
-    item: LangItem,
-) -> Option<LangItemTarget> {
-    let _p = tracing::info_span!("lang_item_query").entered();
-    if let Some(target) =
-        crate_lang_items(db, start_crate).as_ref().and_then(|it| it.items.get(&item).copied())
-    {
-        return Some(target);
-    }
+#[salsa_macros::tracked(returns(ref))]
+pub fn lang_items(db: &dyn DefDatabase, start_crate: Crate) -> LangItems {
+    let _p = tracing::info_span!("lang_items_query").entered();
+
+    let mut result = crate_lang_items(db, start_crate).cloned().unwrap_or_default();
 
     // Our `CrateGraph` eagerly inserts sysroot dependencies like `core` or `std` into dependencies
     // even if the target crate has `#![no_std]`, `#![no_core]` or shadowed sysroot dependencies
@@ -186,42 +111,29 @@ pub fn lang_item(
     // while nameres.
     //
     // See https://github.com/rust-lang/rust-analyzer/pull/20475 for details.
-    crate_local_def_map(db, start_crate).local(db).extern_prelude().find_map(|(_, (krate, _))| {
+    for (_, (krate, _)) in crate_local_def_map(db, start_crate).local(db).extern_prelude() {
         // Some crates declares themselves as extern crate like `extern crate self as core`.
         // Ignore these to prevent cycles.
-        if krate.krate == start_crate { None } else { lang_item(db, krate.krate, item) }
-    })
-}
+        if krate.krate != start_crate {
+            result.merge_prefer_self(lang_items(db, krate.krate));
+        }
+    }
 
-#[derive(Default, Debug, Clone, PartialEq, Eq)]
-pub struct LangItems {
-    items: FxHashMap<LangItem, LangItemTarget>,
+    result
 }
 
 impl LangItems {
-    pub fn target(&self, item: LangItem) -> Option<LangItemTarget> {
-        self.items.get(&item).copied()
-    }
-
-    fn collect_lang_item<T>(
-        &mut self,
-        db: &dyn DefDatabase,
-        item: T,
-        constructor: fn(T) -> LangItemTarget,
-    ) where
-        T: Into<AttrDefId> + Copy,
+    fn collect_lang_item<T>(&mut self, db: &dyn DefDatabase, item: T)
+    where
+        T: Into<AttrDefId> + Into<LangItemTarget> + Copy,
     {
         let _p = tracing::info_span!("collect_lang_item").entered();
-        if let Some(lang_item) = lang_attr(db, item.into()) {
-            self.items.entry(lang_item).or_insert_with(|| constructor(item));
+        if let Some(lang_item) = AttrFlags::lang_item(db, item.into()) {
+            self.assign_lang_item(lang_item, item.into());
         }
     }
 }
 
-pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> {
-    db.attrs(item).lang_item()
-}
-
 #[salsa::tracked(returns(as_deref))]
 pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option<Box<[TraitId]>> {
     let mut traits = Vec::new();
@@ -231,7 +143,7 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option
     for (_, module_data) in crate_def_map.modules() {
         for def in module_data.scope.declarations() {
             if let ModuleDefId::TraitId(trait_) = def
-                && db.attrs(trait_.into()).has_doc_notable_trait()
+                && AttrFlags::query(db, trait_.into()).contains(AttrFlags::IS_DOC_NOTABLE_TRAIT)
             {
                 traits.push(trait_);
             }
@@ -249,30 +161,62 @@ pub enum GenericRequirement {
 
 macro_rules! language_item_table {
     (
-        $( $(#[$attr:meta])* $variant:ident, $module:ident :: $name:ident, $method:ident, $target:expr, $generics:expr; )*
+        $LangItems:ident =>
+        $( $(#[$attr:meta])* $lang_item:ident, $module:ident :: $name:ident, $method:ident, $target:ident, $generics:expr; )*
     ) => {
-
-        /// A representation of all the valid language items in Rust.
-        #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-        pub enum LangItem {
+        #[allow(non_snake_case)] // FIXME: Should we remove this?
+        #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
+        pub struct $LangItems {
             $(
-                #[doc = concat!("The `", stringify!($name), "` lang item.")]
                 $(#[$attr])*
-                $variant,
+                pub $lang_item: Option<$target>,
             )*
         }
 
-        impl LangItem {
-            pub fn name(self) -> &'static str {
+        impl LangItems {
+            fn is_empty(&self) -> bool {
+                $( self.$lang_item.is_none() )&&*
+            }
+
+            /// Merges `self` with `other`, with preference to `self` items.
+            fn merge_prefer_self(&mut self, other: &Self) {
+                $( self.$lang_item = self.$lang_item.or(other.$lang_item); )*
+            }
+
+            fn assign_lang_item(&mut self, name: Symbol, target: LangItemTarget) {
+                match name {
+                    $(
+                        _ if name == $module::$name => {
+                            if let LangItemTarget::$target(target) = target {
+                                self.$lang_item = Some(target);
+                            }
+                        }
+                    )*
+                    _ => {}
+                }
+            }
+        }
+
+        #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+        pub enum LangItemEnum {
+            $(
+                $(#[$attr])*
+                $lang_item,
+            )*
+        }
+
+        impl LangItemEnum {
+            #[inline]
+            pub fn from_lang_items(self, lang_items: &LangItems) -> Option<LangItemTarget> {
                 match self {
-                    $( LangItem::$variant => stringify!($name), )*
+                    $( LangItemEnum::$lang_item => lang_items.$lang_item.map(Into::into), )*
                 }
             }
 
-            /// Opposite of [`LangItem::name`]
-            pub fn from_symbol(sym: &Symbol) -> Option<Self> {
-                match sym {
-                    $(sym if *sym == $module::$name => Some(LangItem::$variant), )*
+            #[inline]
+            pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
+                match symbol {
+                    $( _ if *symbol == $module::$name => Some(Self::$lang_item), )*
                     _ => None,
                 }
             }
@@ -280,142 +224,101 @@ pub fn from_symbol(sym: &Symbol) -> Option<Self> {
     }
 }
 
-impl LangItem {
-    pub fn resolve_function(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<FunctionId> {
-        lang_item(db, start_crate, self).and_then(|t| t.as_function())
-    }
-
-    pub fn resolve_trait(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<TraitId> {
-        lang_item(db, start_crate, self).and_then(|t| t.as_trait())
-    }
-
-    pub fn resolve_adt(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<AdtId> {
-        lang_item(db, start_crate, self).and_then(|t| t.as_adt())
-    }
-
-    pub fn resolve_enum(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<EnumId> {
-        lang_item(db, start_crate, self).and_then(|t| t.as_enum())
-    }
-
-    pub fn resolve_type_alias(
-        self,
-        db: &dyn DefDatabase,
-        start_crate: Crate,
-    ) -> Option<TypeAliasId> {
-        lang_item(db, start_crate, self).and_then(|t| t.as_type_alias())
-    }
-
-    /// Opposite of [`LangItem::name`]
-    pub fn from_name(name: &hir_expand::name::Name) -> Option<Self> {
-        Self::from_symbol(name.symbol())
-    }
-
-    pub fn path(&self, db: &dyn DefDatabase, start_crate: Crate) -> Option<Path> {
-        let t = lang_item(db, start_crate, *self)?;
-        Some(Path::LangItem(t, None))
-    }
-
-    pub fn ty_rel_path(&self, db: &dyn DefDatabase, start_crate: Crate, seg: Name) -> Option<Path> {
-        let t = lang_item(db, start_crate, *self)?;
-        Some(Path::LangItem(t, Some(seg)))
-    }
-}
-
-language_item_table! {
+language_item_table! { LangItems =>
 //  Variant name,            Name,                     Getter method name,         Target                  Generic requirements;
-    Sized,                   sym::sized,               sized_trait,                Target::Trait,          GenericRequirement::Exact(0);
-    MetaSized,               sym::meta_sized,          sized_trait,                Target::Trait,          GenericRequirement::Exact(0);
-    PointeeSized,            sym::pointee_sized,       sized_trait,                Target::Trait,          GenericRequirement::Exact(0);
-    Unsize,                  sym::unsize,              unsize_trait,               Target::Trait,          GenericRequirement::Minimum(1);
+    Sized,                   sym::sized,               sized_trait,                TraitId,                GenericRequirement::Exact(0);
+    MetaSized,               sym::meta_sized,          sized_trait,                TraitId,                GenericRequirement::Exact(0);
+    PointeeSized,            sym::pointee_sized,       sized_trait,                TraitId,                GenericRequirement::Exact(0);
+    Unsize,                  sym::unsize,              unsize_trait,               TraitId,                GenericRequirement::Minimum(1);
     /// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ").
-    StructuralPeq,           sym::structural_peq,      structural_peq_trait,       Target::Trait,          GenericRequirement::None;
+    StructuralPeq,           sym::structural_peq,      structural_peq_trait,       TraitId,                GenericRequirement::None;
     /// Trait injected by `#[derive(Eq)]`, (i.e. "Total EQ"; no, I will not apologize).
-    StructuralTeq,           sym::structural_teq,      structural_teq_trait,       Target::Trait,          GenericRequirement::None;
-    Copy,                    sym::copy,                copy_trait,                 Target::Trait,          GenericRequirement::Exact(0);
-    Clone,                   sym::clone,               clone_trait,                Target::Trait,          GenericRequirement::None;
-    Sync,                    sym::sync,                sync_trait,                 Target::Trait,          GenericRequirement::Exact(0);
-    DiscriminantKind,        sym::discriminant_kind,   discriminant_kind_trait,    Target::Trait,          GenericRequirement::None;
+    StructuralTeq,           sym::structural_teq,      structural_teq_trait,       TraitId,                GenericRequirement::None;
+    Copy,                    sym::copy,                copy_trait,                 TraitId,                GenericRequirement::Exact(0);
+    Clone,                   sym::clone,               clone_trait,                TraitId,                GenericRequirement::None;
+    Sync,                    sym::sync,                sync_trait,                 TraitId,                GenericRequirement::Exact(0);
+    DiscriminantKind,        sym::discriminant_kind,   discriminant_kind_trait,    TraitId,                GenericRequirement::None;
     /// The associated item of the [`DiscriminantKind`] trait.
-    Discriminant,            sym::discriminant_type,   discriminant_type,          Target::AssocTy,        GenericRequirement::None;
+    Discriminant,            sym::discriminant_type,   discriminant_type,          TypeAliasId,            GenericRequirement::None;
 
-    PointeeTrait,            sym::pointee_trait,       pointee_trait,              Target::Trait,          GenericRequirement::None;
-    Metadata,                sym::metadata_type,       metadata_type,              Target::AssocTy,        GenericRequirement::None;
-    DynMetadata,             sym::dyn_metadata,        dyn_metadata,               Target::Struct,         GenericRequirement::None;
+    PointeeTrait,            sym::pointee_trait,       pointee_trait,              TraitId,                GenericRequirement::None;
+    Metadata,                sym::metadata_type,       metadata_type,              TypeAliasId,            GenericRequirement::None;
+    DynMetadata,             sym::dyn_metadata,        dyn_metadata,               StructId,               GenericRequirement::None;
 
-    Freeze,                  sym::freeze,              freeze_trait,               Target::Trait,          GenericRequirement::Exact(0);
+    Freeze,                  sym::freeze,              freeze_trait,               TraitId,                GenericRequirement::Exact(0);
 
-    FnPtrTrait,              sym::fn_ptr_trait,        fn_ptr_trait,               Target::Trait,          GenericRequirement::Exact(0);
-    FnPtrAddr,               sym::fn_ptr_addr,         fn_ptr_addr,                Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
+    FnPtrTrait,              sym::fn_ptr_trait,        fn_ptr_trait,               TraitId,                GenericRequirement::Exact(0);
+    FnPtrAddr,               sym::fn_ptr_addr,         fn_ptr_addr,                FunctionId,             GenericRequirement::None;
 
-    Drop,                    sym::drop,                drop_trait,                 Target::Trait,          GenericRequirement::None;
-    Destruct,                sym::destruct,            destruct_trait,             Target::Trait,          GenericRequirement::None;
+    Drop,                    sym::drop,                drop_trait,                 TraitId,                GenericRequirement::None;
+    Destruct,                sym::destruct,            destruct_trait,             TraitId,                GenericRequirement::None;
 
-    CoerceUnsized,           sym::coerce_unsized,      coerce_unsized_trait,       Target::Trait,          GenericRequirement::Minimum(1);
-    DispatchFromDyn,         sym::dispatch_from_dyn,   dispatch_from_dyn_trait,    Target::Trait,          GenericRequirement::Minimum(1);
+    CoerceUnsized,           sym::coerce_unsized,      coerce_unsized_trait,       TraitId,                GenericRequirement::Minimum(1);
+    DispatchFromDyn,         sym::dispatch_from_dyn,   dispatch_from_dyn_trait,    TraitId,                GenericRequirement::Minimum(1);
 
     // language items relating to transmutability
-    TransmuteOpts,           sym::transmute_opts,      transmute_opts,             Target::Struct,         GenericRequirement::Exact(0);
-    TransmuteTrait,          sym::transmute_trait,     transmute_trait,            Target::Trait,          GenericRequirement::Exact(3);
+    TransmuteOpts,           sym::transmute_opts,      transmute_opts,             StructId,               GenericRequirement::Exact(0);
+    TransmuteTrait,          sym::transmute_trait,     transmute_trait,            TraitId,                GenericRequirement::Exact(3);
 
-    Add,                     sym::add,                 add_trait,                  Target::Trait,          GenericRequirement::Exact(1);
-    Sub,                     sym::sub,                 sub_trait,                  Target::Trait,          GenericRequirement::Exact(1);
-    Mul,                     sym::mul,                 mul_trait,                  Target::Trait,          GenericRequirement::Exact(1);
-    Div,                     sym::div,                 div_trait,                  Target::Trait,          GenericRequirement::Exact(1);
-    Rem,                     sym::rem,                 rem_trait,                  Target::Trait,          GenericRequirement::Exact(1);
-    Neg,                     sym::neg,                 neg_trait,                  Target::Trait,          GenericRequirement::Exact(0);
-    Not,                     sym::not,                 not_trait,                  Target::Trait,          GenericRequirement::Exact(0);
-    BitXor,                  sym::bitxor,              bitxor_trait,               Target::Trait,          GenericRequirement::Exact(1);
-    BitAnd,                  sym::bitand,              bitand_trait,               Target::Trait,          GenericRequirement::Exact(1);
-    BitOr,                   sym::bitor,               bitor_trait,                Target::Trait,          GenericRequirement::Exact(1);
-    Shl,                     sym::shl,                 shl_trait,                  Target::Trait,          GenericRequirement::Exact(1);
-    Shr,                     sym::shr,                 shr_trait,                  Target::Trait,          GenericRequirement::Exact(1);
-    AddAssign,               sym::add_assign,          add_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
-    SubAssign,               sym::sub_assign,          sub_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
-    MulAssign,               sym::mul_assign,          mul_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
-    DivAssign,               sym::div_assign,          div_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
-    RemAssign,               sym::rem_assign,          rem_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
-    BitXorAssign,            sym::bitxor_assign,       bitxor_assign_trait,        Target::Trait,          GenericRequirement::Exact(1);
-    BitAndAssign,            sym::bitand_assign,       bitand_assign_trait,        Target::Trait,          GenericRequirement::Exact(1);
-    BitOrAssign,             sym::bitor_assign,        bitor_assign_trait,         Target::Trait,          GenericRequirement::Exact(1);
-    ShlAssign,               sym::shl_assign,          shl_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
-    ShrAssign,               sym::shr_assign,          shr_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
-    Index,                   sym::index,               index_trait,                Target::Trait,          GenericRequirement::Exact(1);
-    IndexMut,                sym::index_mut,           index_mut_trait,            Target::Trait,          GenericRequirement::Exact(1);
+    Add,                     sym::add,                 add_trait,                  TraitId,                GenericRequirement::Exact(1);
+    Sub,                     sym::sub,                 sub_trait,                  TraitId,                GenericRequirement::Exact(1);
+    Mul,                     sym::mul,                 mul_trait,                  TraitId,                GenericRequirement::Exact(1);
+    Div,                     sym::div,                 div_trait,                  TraitId,                GenericRequirement::Exact(1);
+    Rem,                     sym::rem,                 rem_trait,                  TraitId,                GenericRequirement::Exact(1);
+    Neg,                     sym::neg,                 neg_trait,                  TraitId,                GenericRequirement::Exact(0);
+    Not,                     sym::not,                 not_trait,                  TraitId,                GenericRequirement::Exact(0);
+    BitXor,                  sym::bitxor,              bitxor_trait,               TraitId,                GenericRequirement::Exact(1);
+    BitAnd,                  sym::bitand,              bitand_trait,               TraitId,                GenericRequirement::Exact(1);
+    BitOr,                   sym::bitor,               bitor_trait,                TraitId,                GenericRequirement::Exact(1);
+    Shl,                     sym::shl,                 shl_trait,                  TraitId,                GenericRequirement::Exact(1);
+    Shr,                     sym::shr,                 shr_trait,                  TraitId,                GenericRequirement::Exact(1);
+    AddAssign,               sym::add_assign,          add_assign_trait,           TraitId,                GenericRequirement::Exact(1);
+    SubAssign,               sym::sub_assign,          sub_assign_trait,           TraitId,                GenericRequirement::Exact(1);
+    MulAssign,               sym::mul_assign,          mul_assign_trait,           TraitId,                GenericRequirement::Exact(1);
+    DivAssign,               sym::div_assign,          div_assign_trait,           TraitId,                GenericRequirement::Exact(1);
+    RemAssign,               sym::rem_assign,          rem_assign_trait,           TraitId,                GenericRequirement::Exact(1);
+    BitXorAssign,            sym::bitxor_assign,       bitxor_assign_trait,        TraitId,                GenericRequirement::Exact(1);
+    BitAndAssign,            sym::bitand_assign,       bitand_assign_trait,        TraitId,                GenericRequirement::Exact(1);
+    BitOrAssign,             sym::bitor_assign,        bitor_assign_trait,         TraitId,                GenericRequirement::Exact(1);
+    ShlAssign,               sym::shl_assign,          shl_assign_trait,           TraitId,                GenericRequirement::Exact(1);
+    ShrAssign,               sym::shr_assign,          shr_assign_trait,           TraitId,                GenericRequirement::Exact(1);
+    Index,                   sym::index,               index_trait,                TraitId,                GenericRequirement::Exact(1);
+    IndexMut,                sym::index_mut,           index_mut_trait,            TraitId,                GenericRequirement::Exact(1);
 
-    UnsafeCell,              sym::unsafe_cell,         unsafe_cell_type,           Target::Struct,         GenericRequirement::None;
-    UnsafePinned,            sym::unsafe_pinned,       unsafe_pinned_type,         Target::Struct,         GenericRequirement::None;
-    VaList,                  sym::va_list,             va_list,                    Target::Struct,         GenericRequirement::None;
+    UnsafeCell,              sym::unsafe_cell,         unsafe_cell_type,           StructId,               GenericRequirement::None;
+    UnsafePinned,            sym::unsafe_pinned,       unsafe_pinned_type,         StructId,               GenericRequirement::None;
+    VaList,                  sym::va_list,             va_list,                    StructId,               GenericRequirement::None;
 
-    Deref,                   sym::deref,               deref_trait,                Target::Trait,          GenericRequirement::Exact(0);
-    DerefMut,                sym::deref_mut,           deref_mut_trait,            Target::Trait,          GenericRequirement::Exact(0);
-    DerefTarget,             sym::deref_target,        deref_target,               Target::AssocTy,        GenericRequirement::None;
-    Receiver,                sym::receiver,            receiver_trait,             Target::Trait,          GenericRequirement::None;
-    ReceiverTarget,           sym::receiver_target,     receiver_target,            Target::AssocTy,        GenericRequirement::None;
+    Deref,                   sym::deref,               deref_trait,                TraitId,                GenericRequirement::Exact(0);
+    DerefMut,                sym::deref_mut,           deref_mut_trait,            TraitId,                GenericRequirement::Exact(0);
+    DerefTarget,             sym::deref_target,        deref_target,               TypeAliasId,            GenericRequirement::None;
+    Receiver,                sym::receiver,            receiver_trait,             TraitId,                GenericRequirement::None;
+    ReceiverTarget,           sym::receiver_target,     receiver_target,           TypeAliasId,            GenericRequirement::None;
 
-    Fn,                      sym::fn_,                 fn_trait,                   Target::Trait,          GenericRequirement::Exact(1);
-    FnMut,                   sym::fn_mut,              fn_mut_trait,               Target::Trait,          GenericRequirement::Exact(1);
-    FnOnce,                  sym::fn_once,             fn_once_trait,              Target::Trait,          GenericRequirement::Exact(1);
-    AsyncFn,                 sym::async_fn,            async_fn_trait,             Target::Trait,          GenericRequirement::Exact(1);
-    AsyncFnMut,              sym::async_fn_mut,        async_fn_mut_trait,         Target::Trait,          GenericRequirement::Exact(1);
-    AsyncFnOnce,             sym::async_fn_once,       async_fn_once_trait,        Target::Trait,          GenericRequirement::Exact(1);
+    Fn,                      sym::fn_,                 fn_trait,                   TraitId,                GenericRequirement::Exact(1);
+    FnMut,                   sym::fn_mut,              fn_mut_trait,               TraitId,                GenericRequirement::Exact(1);
+    FnOnce,                  sym::fn_once,             fn_once_trait,              TraitId,                GenericRequirement::Exact(1);
+    AsyncFn,                 sym::async_fn,            async_fn_trait,             TraitId,                GenericRequirement::Exact(1);
+    AsyncFnMut,              sym::async_fn_mut,        async_fn_mut_trait,         TraitId,                GenericRequirement::Exact(1);
+    AsyncFnOnce,             sym::async_fn_once,       async_fn_once_trait,        TraitId,                GenericRequirement::Exact(1);
 
-    CallRefFuture,           sym::call_ref_future,     call_ref_future_ty,         Target::AssocTy,        GenericRequirement::None;
-    CallOnceFuture,          sym::call_once_future,    call_once_future_ty,        Target::AssocTy,        GenericRequirement::None;
-    AsyncFnOnceOutput,       sym::async_fn_once_output, async_fn_once_output_ty,   Target::AssocTy,        GenericRequirement::None;
+    CallRefFuture,           sym::call_ref_future,     call_ref_future_ty,         TypeAliasId,            GenericRequirement::None;
+    CallOnceFuture,          sym::call_once_future,    call_once_future_ty,        TypeAliasId,            GenericRequirement::None;
+    AsyncFnOnceOutput,       sym::async_fn_once_output, async_fn_once_output_ty,   TypeAliasId,            GenericRequirement::None;
 
-    FnOnceOutput,            sym::fn_once_output,      fn_once_output,             Target::AssocTy,        GenericRequirement::None;
+    FnOnceOutput,            sym::fn_once_output,      fn_once_output,             TypeAliasId,            GenericRequirement::None;
 
-    Future,                  sym::future_trait,        future_trait,               Target::Trait,          GenericRequirement::Exact(0);
-    CoroutineState,          sym::coroutine_state,     coroutine_state,            Target::Enum,           GenericRequirement::None;
-    Coroutine,               sym::coroutine,           coroutine_trait,            Target::Trait,          GenericRequirement::Minimum(1);
-    CoroutineReturn,         sym::coroutine_return,    coroutine_return_ty,        Target::AssocTy,        GenericRequirement::None;
-    CoroutineYield,          sym::coroutine_yield,     coroutine_yield_ty,         Target::AssocTy,        GenericRequirement::None;
-    Unpin,                   sym::unpin,               unpin_trait,                Target::Trait,          GenericRequirement::None;
-    Pin,                     sym::pin,                 pin_type,                   Target::Struct,         GenericRequirement::None;
+    Future,                  sym::future_trait,        future_trait,               TraitId,                GenericRequirement::Exact(0);
+    CoroutineState,          sym::coroutine_state,     coroutine_state,            EnumId,                 GenericRequirement::None;
+    Coroutine,               sym::coroutine,           coroutine_trait,            TraitId,                GenericRequirement::Minimum(1);
+    CoroutineReturn,         sym::coroutine_return,    coroutine_return_ty,        TypeAliasId,            GenericRequirement::None;
+    CoroutineYield,          sym::coroutine_yield,     coroutine_yield_ty,         TypeAliasId,            GenericRequirement::None;
+    Unpin,                   sym::unpin,               unpin_trait,                TraitId,                GenericRequirement::None;
+    Pin,                     sym::pin,                 pin_type,                   StructId,               GenericRequirement::None;
 
-    PartialEq,               sym::eq,                  eq_trait,                   Target::Trait,          GenericRequirement::Exact(1);
-    PartialOrd,              sym::partial_ord,         partial_ord_trait,          Target::Trait,          GenericRequirement::Exact(1);
-    CVoid,                   sym::c_void,              c_void,                     Target::Enum,           GenericRequirement::None;
+    PartialEq,               sym::eq,                  eq_trait,                   TraitId,                GenericRequirement::Exact(1);
+    PartialOrd,              sym::partial_ord,         partial_ord_trait,          TraitId,                GenericRequirement::Exact(1);
+    CVoid,                   sym::c_void,              c_void,                     EnumId,                 GenericRequirement::None;
 
     // A number of panic-related lang items. The `panic` item corresponds to divide-by-zero and
     // various panic cases with `match`. The `panic_bounds_check` item is for indexing arrays.
@@ -424,107 +327,107 @@ pub fn ty_rel_path(&self, db: &dyn DefDatabase, start_crate: Crate, seg: Name) -
     // in the sense that a crate is not required to have it defined to use it, but a final product
     // is required to define it somewhere. Additionally, there are restrictions on crates that use
     // a weak lang item, but do not have it defined.
-    Panic,                   sym::panic,               panic_fn,                   Target::Fn,             GenericRequirement::Exact(0);
-    PanicNounwind,           sym::panic_nounwind,      panic_nounwind,             Target::Fn,             GenericRequirement::Exact(0);
-    PanicFmt,                sym::panic_fmt,           panic_fmt,                  Target::Fn,             GenericRequirement::None;
-    PanicDisplay,            sym::panic_display,       panic_display,              Target::Fn,             GenericRequirement::None;
-    ConstPanicFmt,           sym::const_panic_fmt,     const_panic_fmt,            Target::Fn,             GenericRequirement::None;
-    PanicBoundsCheck,        sym::panic_bounds_check,  panic_bounds_check_fn,      Target::Fn,             GenericRequirement::Exact(0);
-    PanicMisalignedPointerDereference,        sym::panic_misaligned_pointer_dereference,  panic_misaligned_pointer_dereference_fn,      Target::Fn,             GenericRequirement::Exact(0);
-    PanicInfo,               sym::panic_info,          panic_info,                 Target::Struct,         GenericRequirement::None;
-    PanicLocation,           sym::panic_location,      panic_location,             Target::Struct,         GenericRequirement::None;
-    PanicImpl,               sym::panic_impl,          panic_impl,                 Target::Fn,             GenericRequirement::None;
-    PanicCannotUnwind,       sym::panic_cannot_unwind, panic_cannot_unwind,        Target::Fn,             GenericRequirement::Exact(0);
-    PanicNullPointerDereference, sym::panic_null_pointer_dereference, panic_null_pointer_dereference, Target::Fn, GenericRequirement::None;
+    Panic,                   sym::panic,               panic_fn,                   FunctionId,             GenericRequirement::Exact(0);
+    PanicNounwind,           sym::panic_nounwind,      panic_nounwind,             FunctionId,             GenericRequirement::Exact(0);
+    PanicFmt,                sym::panic_fmt,           panic_fmt,                  FunctionId,             GenericRequirement::None;
+    PanicDisplay,            sym::panic_display,       panic_display,              FunctionId,             GenericRequirement::None;
+    ConstPanicFmt,           sym::const_panic_fmt,     const_panic_fmt,            FunctionId,             GenericRequirement::None;
+    PanicBoundsCheck,        sym::panic_bounds_check,  panic_bounds_check_fn,      FunctionId,             GenericRequirement::Exact(0);
+    PanicMisalignedPointerDereference,        sym::panic_misaligned_pointer_dereference,  panic_misaligned_pointer_dereference_fn,      FunctionId,             GenericRequirement::Exact(0);
+    PanicInfo,               sym::panic_info,          panic_info,                 StructId,               GenericRequirement::None;
+    PanicLocation,           sym::panic_location,      panic_location,             StructId,               GenericRequirement::None;
+    PanicImpl,               sym::panic_impl,          panic_impl,                 FunctionId,             GenericRequirement::None;
+    PanicCannotUnwind,       sym::panic_cannot_unwind, panic_cannot_unwind,        FunctionId,             GenericRequirement::Exact(0);
+    PanicNullPointerDereference, sym::panic_null_pointer_dereference, panic_null_pointer_dereference, FunctionId, GenericRequirement::None;
     /// libstd panic entry point. Necessary for const eval to be able to catch it
-    BeginPanic,              sym::begin_panic,         begin_panic_fn,             Target::Fn,             GenericRequirement::None;
+    BeginPanic,              sym::begin_panic,         begin_panic_fn,             FunctionId,             GenericRequirement::None;
 
     // Lang items needed for `format_args!()`.
-    FormatAlignment,         sym::format_alignment,    format_alignment,           Target::Enum,           GenericRequirement::None;
-    FormatArgument,          sym::format_argument,     format_argument,            Target::Struct,         GenericRequirement::None;
-    FormatArguments,         sym::format_arguments,    format_arguments,           Target::Struct,         GenericRequirement::None;
-    FormatCount,             sym::format_count,        format_count,               Target::Enum,           GenericRequirement::None;
-    FormatPlaceholder,       sym::format_placeholder,  format_placeholder,         Target::Struct,         GenericRequirement::None;
-    FormatUnsafeArg,         sym::format_unsafe_arg,   format_unsafe_arg,          Target::Struct,         GenericRequirement::None;
+    FormatAlignment,         sym::format_alignment,    format_alignment,           EnumId,                 GenericRequirement::None;
+    FormatArgument,          sym::format_argument,     format_argument,            StructId,               GenericRequirement::None;
+    FormatArguments,         sym::format_arguments,    format_arguments,           StructId,               GenericRequirement::None;
+    FormatCount,             sym::format_count,        format_count,               EnumId,                 GenericRequirement::None;
+    FormatPlaceholder,       sym::format_placeholder,  format_placeholder,         StructId,               GenericRequirement::None;
+    FormatUnsafeArg,         sym::format_unsafe_arg,   format_unsafe_arg,          StructId,               GenericRequirement::None;
 
-    ExchangeMalloc,          sym::exchange_malloc,     exchange_malloc_fn,         Target::Fn,             GenericRequirement::None;
-    BoxFree,                 sym::box_free,            box_free_fn,                Target::Fn,             GenericRequirement::Minimum(1);
-    DropInPlace,             sym::drop_in_place,       drop_in_place_fn,           Target::Fn,             GenericRequirement::Minimum(1);
-    AllocLayout,             sym::alloc_layout,        alloc_layout,               Target::Struct,         GenericRequirement::None;
+    ExchangeMalloc,          sym::exchange_malloc,     exchange_malloc_fn,         FunctionId,             GenericRequirement::None;
+    BoxFree,                 sym::box_free,            box_free_fn,                FunctionId,             GenericRequirement::Minimum(1);
+    DropInPlace,             sym::drop_in_place,       drop_in_place_fn,           FunctionId,             GenericRequirement::Minimum(1);
+    AllocLayout,             sym::alloc_layout,        alloc_layout,               StructId,               GenericRequirement::None;
 
-    Start,                   sym::start,               start_fn,                   Target::Fn,             GenericRequirement::Exact(1);
+    Start,                   sym::start,               start_fn,                   FunctionId,             GenericRequirement::Exact(1);
 
-    EhPersonality,           sym::eh_personality,      eh_personality,             Target::Fn,             GenericRequirement::None;
-    EhCatchTypeinfo,         sym::eh_catch_typeinfo,   eh_catch_typeinfo,          Target::Static,         GenericRequirement::None;
+    EhPersonality,           sym::eh_personality,      eh_personality,             FunctionId,             GenericRequirement::None;
+    EhCatchTypeinfo,         sym::eh_catch_typeinfo,   eh_catch_typeinfo,          StaticId,               GenericRequirement::None;
 
-    OwnedBox,                sym::owned_box,           owned_box,                  Target::Struct,         GenericRequirement::Minimum(1);
+    OwnedBox,                sym::owned_box,           owned_box,                  StructId,               GenericRequirement::Minimum(1);
 
-    PhantomData,             sym::phantom_data,        phantom_data,               Target::Struct,         GenericRequirement::Exact(1);
+    PhantomData,             sym::phantom_data,        phantom_data,               StructId,               GenericRequirement::Exact(1);
 
-    ManuallyDrop,            sym::manually_drop,       manually_drop,              Target::Struct,         GenericRequirement::None;
+    ManuallyDrop,            sym::manually_drop,       manually_drop,              StructId,               GenericRequirement::None;
 
-    MaybeUninit,             sym::maybe_uninit,        maybe_uninit,               Target::Union,          GenericRequirement::None;
+    MaybeUninit,             sym::maybe_uninit,        maybe_uninit,               UnionId,                GenericRequirement::None;
 
     /// Align offset for stride != 1; must not panic.
-    AlignOffset,             sym::align_offset,        align_offset_fn,            Target::Fn,             GenericRequirement::None;
+    AlignOffset,             sym::align_offset,        align_offset_fn,            FunctionId,             GenericRequirement::None;
 
-    Termination,             sym::termination,         termination,                Target::Trait,          GenericRequirement::None;
+    Termination,             sym::termination,         termination,                TraitId,                GenericRequirement::None;
 
-    Try,                     sym::Try,                 try_trait,                  Target::Trait,          GenericRequirement::None;
+    Try,                     sym::Try,                 try_trait,                  TraitId,                GenericRequirement::None;
 
-    Tuple,                   sym::tuple_trait,         tuple_trait,                Target::Trait,          GenericRequirement::Exact(0);
+    Tuple,                   sym::tuple_trait,         tuple_trait,                TraitId,                GenericRequirement::Exact(0);
 
-    SliceLen,                sym::slice_len_fn,        slice_len_fn,               Target::Method(MethodKind::Inherent), GenericRequirement::None;
+    SliceLen,                sym::slice_len_fn,        slice_len_fn,               FunctionId,             GenericRequirement::None;
 
     // Language items from AST lowering
-    TryTraitFromResidual,    sym::from_residual,       from_residual_fn,           Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
-    TryTraitFromOutput,      sym::from_output,         from_output_fn,             Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
-    TryTraitBranch,          sym::branch,              branch_fn,                  Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
-    TryTraitFromYeet,        sym::from_yeet,           from_yeet_fn,               Target::Fn,             GenericRequirement::None;
+    TryTraitFromResidual,    sym::from_residual,       from_residual_fn,           FunctionId,             GenericRequirement::None;
+    TryTraitFromOutput,      sym::from_output,         from_output_fn,             FunctionId,             GenericRequirement::None;
+    TryTraitBranch,          sym::branch,              branch_fn,                  FunctionId,             GenericRequirement::None;
+    TryTraitFromYeet,        sym::from_yeet,           from_yeet_fn,               FunctionId,             GenericRequirement::None;
 
-    PointerLike,             sym::pointer_like,        pointer_like,               Target::Trait,          GenericRequirement::Exact(0);
+    PointerLike,             sym::pointer_like,        pointer_like,               TraitId,                GenericRequirement::Exact(0);
 
-    ConstParamTy,            sym::const_param_ty,      const_param_ty_trait,       Target::Trait,          GenericRequirement::Exact(0);
+    ConstParamTy,            sym::const_param_ty,      const_param_ty_trait,       TraitId,                GenericRequirement::Exact(0);
 
-    Poll,                    sym::Poll,                poll,                       Target::Enum,           GenericRequirement::None;
-    PollReady,               sym::Ready,               poll_ready_variant,         Target::Variant,        GenericRequirement::None;
-    PollPending,             sym::Pending,             poll_pending_variant,       Target::Variant,        GenericRequirement::None;
+    Poll,                    sym::Poll,                poll,                       EnumId,                 GenericRequirement::None;
+    PollReady,               sym::Ready,               poll_ready_variant,         EnumVariantId,          GenericRequirement::None;
+    PollPending,             sym::Pending,             poll_pending_variant,       EnumVariantId,          GenericRequirement::None;
 
     // FIXME(swatinem): the following lang items are used for async lowering and
     // should become obsolete eventually.
-    ResumeTy,                sym::ResumeTy,            resume_ty,                  Target::Struct,         GenericRequirement::None;
-    GetContext,              sym::get_context,         get_context_fn,             Target::Fn,             GenericRequirement::None;
+    ResumeTy,                sym::ResumeTy,            resume_ty,                  StructId,               GenericRequirement::None;
+    GetContext,              sym::get_context,         get_context_fn,             FunctionId,             GenericRequirement::None;
 
-    Context,                 sym::Context,             context,                    Target::Struct,         GenericRequirement::None;
-    FuturePoll,              sym::poll,                future_poll_fn,             Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
-    FutureOutput,            sym::future_output,       future_output,              Target::TypeAlias,      GenericRequirement::None;
+    Context,                 sym::Context,             context,                    StructId,               GenericRequirement::None;
+    FuturePoll,              sym::poll,                future_poll_fn,             FunctionId,             GenericRequirement::None;
+    FutureOutput,            sym::future_output,       future_output,              TypeAliasId,            GenericRequirement::None;
 
-    Option,                  sym::Option,              option_type,                Target::Enum,           GenericRequirement::None;
-    OptionSome,              sym::Some,                option_some_variant,        Target::Variant,        GenericRequirement::None;
-    OptionNone,              sym::None,                option_none_variant,        Target::Variant,        GenericRequirement::None;
+    Option,                  sym::Option,              option_type,                EnumId,                 GenericRequirement::None;
+    OptionSome,              sym::Some,                option_some_variant,        EnumVariantId,          GenericRequirement::None;
+    OptionNone,              sym::None,                option_none_variant,        EnumVariantId,          GenericRequirement::None;
 
-    ResultOk,                sym::Ok,                  result_ok_variant,          Target::Variant,        GenericRequirement::None;
-    ResultErr,               sym::Err,                 result_err_variant,         Target::Variant,        GenericRequirement::None;
+    ResultOk,                sym::Ok,                  result_ok_variant,          EnumVariantId,          GenericRequirement::None;
+    ResultErr,               sym::Err,                 result_err_variant,         EnumVariantId,          GenericRequirement::None;
 
-    ControlFlowContinue,     sym::Continue,            cf_continue_variant,        Target::Variant,        GenericRequirement::None;
-    ControlFlowBreak,        sym::Break,               cf_break_variant,           Target::Variant,        GenericRequirement::None;
+    ControlFlowContinue,     sym::Continue,            cf_continue_variant,        EnumVariantId,          GenericRequirement::None;
+    ControlFlowBreak,        sym::Break,               cf_break_variant,           EnumVariantId,          GenericRequirement::None;
 
-    IntoFutureIntoFuture,    sym::into_future,         into_future_fn,             Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
-    IntoIterIntoIter,        sym::into_iter,           into_iter_fn,               Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
-    IteratorNext,            sym::next,                next_fn,                    Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None;
-    Iterator,                sym::iterator,            iterator,                   Target::Trait,           GenericRequirement::None;
+    IntoFutureIntoFuture,    sym::into_future,         into_future_fn,             FunctionId,             GenericRequirement::None;
+    IntoIterIntoIter,        sym::into_iter,           into_iter_fn,               FunctionId,             GenericRequirement::None;
+    IteratorNext,            sym::next,                next_fn,                    FunctionId,             GenericRequirement::None;
+    Iterator,                sym::iterator,            iterator,                   TraitId,                GenericRequirement::None;
 
-    PinNewUnchecked,         sym::new_unchecked,       new_unchecked_fn,           Target::Method(MethodKind::Inherent), GenericRequirement::None;
+    PinNewUnchecked,         sym::new_unchecked,       new_unchecked_fn,           FunctionId,             GenericRequirement::None;
 
-    RangeFrom,               sym::RangeFrom,           range_from_struct,          Target::Struct,         GenericRequirement::None;
-    RangeFull,               sym::RangeFull,           range_full_struct,          Target::Struct,         GenericRequirement::None;
-    RangeInclusiveStruct,    sym::RangeInclusive,      range_inclusive_struct,     Target::Struct,         GenericRequirement::None;
-    RangeInclusiveNew,       sym::range_inclusive_new, range_inclusive_new_method, Target::Method(MethodKind::Inherent), GenericRequirement::None;
-    Range,                   sym::Range,               range_struct,               Target::Struct,         GenericRequirement::None;
-    RangeToInclusive,        sym::RangeToInclusive,    range_to_inclusive_struct,  Target::Struct,         GenericRequirement::None;
-    RangeTo,                 sym::RangeTo,             range_to_struct,            Target::Struct,         GenericRequirement::None;
+    RangeFrom,               sym::RangeFrom,           range_from_struct,          StructId,               GenericRequirement::None;
+    RangeFull,               sym::RangeFull,           range_full_struct,          StructId,               GenericRequirement::None;
+    RangeInclusiveStruct,    sym::RangeInclusive,      range_inclusive_struct,     StructId,               GenericRequirement::None;
+    RangeInclusiveNew,       sym::range_inclusive_new, range_inclusive_new_method, FunctionId,             GenericRequirement::None;
+    Range,                   sym::Range,               range_struct,               StructId,               GenericRequirement::None;
+    RangeToInclusive,        sym::RangeToInclusive,    range_to_inclusive_struct,  StructId,               GenericRequirement::None;
+    RangeTo,                 sym::RangeTo,             range_to_struct,            StructId,               GenericRequirement::None;
 
-    String,                  sym::String,              string,                     Target::Struct,         GenericRequirement::None;
-    CStr,                    sym::CStr,                c_str,                      Target::Struct,         GenericRequirement::None;
-    Ordering,                sym::Ordering,            ordering,                   Target::Enum,           GenericRequirement::None;
+    String,                  sym::String,              string,                     StructId,               GenericRequirement::None;
+    CStr,                    sym::CStr,                c_str,                      StructId,               GenericRequirement::None;
+    Ordering,                sym::Ordering,            ordering,                   EnumId,                 GenericRequirement::None;
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index 52d9991..ad247e0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -19,7 +19,7 @@
 
 pub mod db;
 
-pub mod attr;
+pub mod attrs;
 pub mod builtin_type;
 pub mod item_scope;
 pub mod per_ns;
@@ -45,7 +45,7 @@
 pub mod import_map;
 pub mod visibility;
 
-use intern::{Interned, Symbol, sym};
+use intern::{Interned, Symbol};
 pub use rustc_abi as layout;
 use thin_vec::ThinVec;
 use triomphe::Arc;
@@ -80,7 +80,7 @@
 pub use hir_expand::{Intern, Lookup, tt};
 
 use crate::{
-    attr::Attrs,
+    attrs::AttrFlags,
     builtin_type::BuiltinType,
     db::DefDatabase,
     expr_store::ExpressionStoreSourceMap,
@@ -600,17 +600,17 @@ fn module(&self, _db: &dyn DefDatabase) -> ModuleId {
 /// An ID of a module, **local** to a `DefMap`.
 pub type LocalModuleId = Idx<nameres::ModuleData>;
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
 pub struct FieldId {
     // FIXME: Store this as an erased `salsa::Id` to save space
     pub parent: VariantId,
     pub local_id: LocalFieldId,
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
 pub struct TupleId(pub u32);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
 pub struct TupleFieldId {
     pub tuple: TupleId,
     pub index: u32,
@@ -956,10 +956,16 @@ pub fn krate(self, db: &dyn DefDatabase) -> Crate {
     }
 }
 
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+// FIXME: We probably should use this in more places.
+/// This is used to avoid interning the whole `AttrDefId`, so we intern just modules and not everything.
+#[salsa_macros::interned(debug, no_lifetime)]
+pub struct InternedModuleId {
+    pub loc: ModuleId,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, salsa_macros::Supertype)]
 pub enum AttrDefId {
-    ModuleId(ModuleId),
-    FieldId(FieldId),
+    ModuleId(InternedModuleId),
     AdtId(AdtId),
     FunctionId(FunctionId),
     EnumVariantId(EnumVariantId),
@@ -969,15 +975,12 @@ pub enum AttrDefId {
     TypeAliasId(TypeAliasId),
     MacroId(MacroId),
     ImplId(ImplId),
-    GenericParamId(GenericParamId),
     ExternBlockId(ExternBlockId),
     ExternCrateId(ExternCrateId),
     UseId(UseId),
 }
 
 impl_from!(
-    ModuleId,
-    FieldId,
     AdtId(StructId, EnumId, UnionId),
     EnumVariantId,
     StaticId,
@@ -987,41 +990,11 @@ pub enum AttrDefId {
     TypeAliasId,
     MacroId(Macro2Id, MacroRulesId, ProcMacroId),
     ImplId,
-    GenericParamId,
     ExternCrateId,
     UseId
     for AttrDefId
 );
 
-impl TryFrom<ModuleDefId> for AttrDefId {
-    type Error = ();
-
-    fn try_from(value: ModuleDefId) -> Result<Self, Self::Error> {
-        match value {
-            ModuleDefId::ModuleId(it) => Ok(it.into()),
-            ModuleDefId::FunctionId(it) => Ok(it.into()),
-            ModuleDefId::AdtId(it) => Ok(it.into()),
-            ModuleDefId::EnumVariantId(it) => Ok(it.into()),
-            ModuleDefId::ConstId(it) => Ok(it.into()),
-            ModuleDefId::StaticId(it) => Ok(it.into()),
-            ModuleDefId::TraitId(it) => Ok(it.into()),
-            ModuleDefId::TypeAliasId(it) => Ok(it.into()),
-            ModuleDefId::MacroId(id) => Ok(id.into()),
-            ModuleDefId::BuiltinType(_) => Err(()),
-        }
-    }
-}
-
-impl From<ItemContainerId> for AttrDefId {
-    fn from(acid: ItemContainerId) -> Self {
-        match acid {
-            ItemContainerId::ModuleId(mid) => AttrDefId::ModuleId(mid),
-            ItemContainerId::ImplId(iid) => AttrDefId::ImplId(iid),
-            ItemContainerId::TraitId(tid) => AttrDefId::TraitId(tid),
-            ItemContainerId::ExternBlockId(id) => AttrDefId::ExternBlockId(id),
-        }
-    }
-}
 impl From<AssocItemId> for AttrDefId {
     fn from(assoc: AssocItemId) -> Self {
         match assoc {
@@ -1041,7 +1014,7 @@ fn from(vid: VariantId) -> Self {
     }
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype, salsa::Update)]
 pub enum VariantId {
     EnumVariantId(EnumVariantId),
     StructId(StructId),
@@ -1262,8 +1235,7 @@ fn module(&self, db: &dyn DefDatabase) -> ModuleId {
 impl HasModule for AttrDefId {
     fn module(&self, db: &dyn DefDatabase) -> ModuleId {
         match self {
-            AttrDefId::ModuleId(it) => *it,
-            AttrDefId::FieldId(it) => it.parent.module(db),
+            AttrDefId::ModuleId(it) => it.loc(db),
             AttrDefId::AdtId(it) => it.module(db),
             AttrDefId::FunctionId(it) => it.module(db),
             AttrDefId::EnumVariantId(it) => it.module(db),
@@ -1273,12 +1245,6 @@ fn module(&self, db: &dyn DefDatabase) -> ModuleId {
             AttrDefId::TypeAliasId(it) => it.module(db),
             AttrDefId::ImplId(it) => it.module(db),
             AttrDefId::ExternBlockId(it) => it.module(db),
-            AttrDefId::GenericParamId(it) => match it {
-                GenericParamId::TypeParamId(it) => it.parent(),
-                GenericParamId::ConstParamId(it) => it.parent(),
-                GenericParamId::LifetimeParamId(it) => it.parent,
-            }
-            .module(db),
             AttrDefId::MacroId(it) => it.module(db),
             AttrDefId::ExternCrateId(it) => it.module(db),
             AttrDefId::UseId(it) => it.module(db),
@@ -1402,32 +1368,18 @@ pub enum Complete {
 }
 
 impl Complete {
-    pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete {
-        let mut do_not_complete = Complete::Yes;
-        for ra_attr in attrs.rust_analyzer_tool() {
-            let segments = ra_attr.path.segments();
-            if segments.len() != 2 {
-                continue;
-            }
-            let action = segments[1].symbol();
-            if *action == sym::completions {
-                match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) {
-                    Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => {
-                        if ident.sym == sym::ignore_flyimport {
-                            do_not_complete = Complete::IgnoreFlyimport;
-                        } else if is_trait {
-                            if ident.sym == sym::ignore_methods {
-                                do_not_complete = Complete::IgnoreMethods;
-                            } else if ident.sym == sym::ignore_flyimport_methods {
-                                do_not_complete = Complete::IgnoreFlyimportMethods;
-                            }
-                        }
-                    }
-                    _ => {}
-                }
+    #[inline]
+    pub fn extract(is_trait: bool, attrs: AttrFlags) -> Complete {
+        if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT) {
+            return Complete::IgnoreFlyimport;
+        } else if is_trait {
+            if attrs.contains(AttrFlags::COMPLETE_IGNORE_METHODS) {
+                return Complete::IgnoreMethods;
+            } else if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS) {
+                return Complete::IgnoreFlyimportMethods;
             }
         }
-        do_not_complete
+        Complete::Yes
     }
 
     #[inline]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index c489c1f..947a54f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -300,21 +300,21 @@ fn match_by_first_token_literally() {
     check(
         r#"
 macro_rules! m {
-    ($i:ident) => ( mod $i {} );
+    ($i:ident) => ( enum $i {} );
     (= $i:ident) => ( fn $i() {} );
     (+ $i:ident) => ( struct $i; )
 }
-m! { foo }
+m! { Foo }
 m! { = bar }
 m! { + Baz }
 "#,
         expect![[r#"
 macro_rules! m {
-    ($i:ident) => ( mod $i {} );
+    ($i:ident) => ( enum $i {} );
     (= $i:ident) => ( fn $i() {} );
     (+ $i:ident) => ( struct $i; )
 }
-mod foo {}
+enum Foo {}
 fn bar() {}
 struct Baz;
 "#]],
@@ -326,21 +326,21 @@ fn match_by_last_token_literally() {
     check(
         r#"
 macro_rules! m {
-    ($i:ident) => ( mod $i {} );
+    ($i:ident) => ( enum $i {} );
     ($i:ident =) => ( fn $i() {} );
     ($i:ident +) => ( struct $i; )
 }
-m! { foo }
+m! { Foo }
 m! { bar = }
 m! { Baz + }
 "#,
         expect![[r#"
 macro_rules! m {
-    ($i:ident) => ( mod $i {} );
+    ($i:ident) => ( enum $i {} );
     ($i:ident =) => ( fn $i() {} );
     ($i:ident +) => ( struct $i; )
 }
-mod foo {}
+enum Foo {}
 fn bar() {}
 struct Baz;
 "#]],
@@ -352,21 +352,21 @@ fn match_by_ident() {
     check(
         r#"
 macro_rules! m {
-    ($i:ident) => ( mod $i {} );
+    ($i:ident) => ( enum $i {} );
     (spam $i:ident) => ( fn $i() {} );
     (eggs $i:ident) => ( struct $i; )
 }
-m! { foo }
+m! { Foo }
 m! { spam bar }
 m! { eggs Baz }
 "#,
         expect![[r#"
 macro_rules! m {
-    ($i:ident) => ( mod $i {} );
+    ($i:ident) => ( enum $i {} );
     (spam $i:ident) => ( fn $i() {} );
     (eggs $i:ident) => ( struct $i; )
 }
-mod foo {}
+enum Foo {}
 fn bar() {}
 struct Baz;
 "#]],
@@ -378,12 +378,12 @@ fn match_by_separator_token() {
     check(
         r#"
 macro_rules! m {
-    ($($i:ident),*) => ($(mod $i {} )*);
+    ($($i:ident),*) => ($(enum $i {} )*);
     ($($i:ident)#*) => ($(fn $i() {} )*);
     ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
 }
 
-m! { foo, bar }
+m! { Baz, Qux }
 
 m! { foo# bar }
 
@@ -391,13 +391,13 @@ macro_rules! m {
 "#,
         expect![[r#"
 macro_rules! m {
-    ($($i:ident),*) => ($(mod $i {} )*);
+    ($($i:ident),*) => ($(enum $i {} )*);
     ($($i:ident)#*) => ($(fn $i() {} )*);
     ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
 }
 
-mod foo {}
-mod bar {}
+enum Baz {}
+enum Qux {}
 
 fn foo() {}
 fn bar() {}
@@ -1114,11 +1114,11 @@ fn test_single_item() {
     check(
         r#"
 macro_rules! m { ($i:item) => ( $i ) }
-m! { mod c {} }
+m! { struct C {} }
 "#,
         expect![[r#"
 macro_rules! m { ($i:item) => ( $i ) }
-mod c {}
+struct C {}
 "#]],
     )
 }
@@ -1144,6 +1144,7 @@ fn h() {}
     type T = u8;
 }
 "#,
+        // The modules are counted twice, once because of the module and once because of the macro call.
         expect![[r#"
 macro_rules! m { ($($i:item)*) => ($($i )*) }
 extern crate a;
@@ -1161,7 +1162,9 @@ trait J {}
 fn h() {}
 extern {}
 type T = u8;
-"#]],
+
+mod b;
+mod c {}"#]],
     );
 }
 
@@ -1959,28 +1962,6 @@ fn f() {
 }
 
 #[test]
-fn test_edition_handling_in() {
-    check(
-        r#"
-//- /main.rs crate:main deps:old edition:2021
-fn f() {
-    old::parse_try_old!(try!{});
-}
-//- /old.rs crate:old edition:2015
-#[macro_export]
-macro_rules! parse_try_old {
-    ($it:expr) => {};
-}
- "#,
-        expect![[r#"
-fn f() {
-    ;
-}
-"#]],
-    );
-}
-
-#[test]
 fn semicolon_does_not_glue() {
     check(
         r#"
@@ -2051,3 +2032,33 @@ fn f() {
     "#]],
     );
 }
+
+#[test]
+fn per_token_edition() {
+    check(
+        r#"
+//- /foo.rs crate:foo edition:2024
+#[macro_export]
+macro_rules! m {
+    ($e:expr) => {};
+}
+//- /bar.rs crate:bar deps:foo edition:2021
+fn gen() -> usize {
+    0
+}
+
+fn foo() {
+    foo::m!(gen());
+}
+    "#,
+        expect![[r#"
+fn gen() -> usize {
+    0
+}
+
+fn foo() {
+    ;
+}
+    "#]],
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
index e8ae499..98b3115 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -245,6 +245,21 @@ fn resolve(
         }
     }
 
+    for (_, module) in def_map.modules() {
+        let Some(src) = module.declaration_source(&db) else {
+            continue;
+        };
+        if let Some(macro_file) = src.file_id.macro_file() {
+            let pp = pretty_print_macro_expansion(
+                src.value.syntax().clone(),
+                db.span_map(macro_file.into()).as_ref(),
+                false,
+                false,
+            );
+            format_to!(expanded_text, "\n{}", pp)
+        }
+    }
+
     for impl_id in def_map[local_id].scope.impls() {
         let src = impl_id.lookup(&db).source(&db);
         if let Some(macro_file) = src.file_id.macro_file()
@@ -372,7 +387,6 @@ fn expand(
             subtree,
             syntax_bridge::TopEntryPoint::MacroItems,
             &mut |_| span::Edition::CURRENT,
-            span::Edition::CURRENT,
         );
         if parse.errors().is_empty() {
             Ok(subtree.clone())
@@ -413,10 +427,7 @@ fn regression_20171() {
         #dollar_crate::panic::panic_2021!();
     }}
         };
-    token_tree_to_syntax_node(
-        &tt,
-        syntax_bridge::TopEntryPoint::MacroStmts,
-        &mut |_| Edition::CURRENT,
-        Edition::CURRENT,
-    );
+    token_tree_to_syntax_node(&tt, syntax_bridge::TopEntryPoint::MacroStmts, &mut |_| {
+        Edition::CURRENT
+    });
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index 6952a9d..5216246 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -9,37 +9,65 @@
 
 #[test]
 fn attribute_macro_attr_censoring() {
-    cov_mark::check!(attribute_macro_attr_censoring);
     check(
         r#"
 //- proc_macros: identity
-#[attr1] #[proc_macros::identity] #[attr2]
-struct S;
-"#,
-        expect![[r#"
-#[attr1] #[proc_macros::identity] #[attr2]
+//- minicore: derive
+#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
 struct S;
 
+/// Foo
+#[cfg_attr(false, doc = "abc...", attr1)]
+mod foo {
+    #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
+    #![cfg_attr(true, doc = "123...", attr2)]
+    #![attr3]
+
+    #[cfg_attr(true, cfg(false))]
+    fn foo() {}
+
+    #[cfg(true)]
+    fn bar() {}
+}
+"#,
+        expect![[r##"
+#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
+struct S;
+
+/// Foo
+#[cfg_attr(false, doc = "abc...", attr1)]
+mod foo {
+    #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
+    #![cfg_attr(true, doc = "123...", attr2)]
+    #![attr3]
+
+    #[cfg_attr(true, cfg(false))]
+    fn foo() {}
+
+    #[cfg(true)]
+    fn bar() {}
+}
+
 #[attr1]
-#[attr2] struct S;"#]],
+#[attr2] struct S;
+#[doc = " Foo"] mod foo {
+    # ![foo]
+    # ![doc = "123..."]
+    # ![attr2]
+    # ![attr3]
+    #[cfg_attr(true , cfg(false ))] fn foo() {}
+    #[cfg(true )] fn bar() {}
+}"##]],
     );
 }
 
 #[test]
 fn derive_censoring() {
-    cov_mark::check!(derive_censoring);
     check(
         r#"
 //- proc_macros: derive_identity
 //- minicore:derive
-#[attr1]
-#[derive(Foo)]
-#[derive(proc_macros::DeriveIdentity)]
-#[derive(Bar)]
-#[attr2]
-struct S;
-"#,
-        expect![[r#"
+use derive as my_cool_derive;
 #[attr1]
 #[derive(Foo)]
 #[derive(proc_macros::DeriveIdentity)]
@@ -47,6 +75,60 @@ fn derive_censoring() {
 #[attr2]
 struct S;
 
+#[my_cool_derive()]
+#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
+#[my_cool_derive()]
+struct Foo {
+    #[cfg_attr(false, cfg(false), attr2)]
+    v1: i32,
+    #[cfg_attr(true, cfg(false), attr2)]
+    v1: i32,
+    #[cfg_attr(true, attr3)]
+    v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
+    v3: Foo<{
+        #[cfg(false)]
+        let foo = 123;
+        456
+    }>,
+    #[cfg(false)]
+    v4: bool // No comma here
+}
+"#,
+        expect![[r#"
+use derive as my_cool_derive;
+#[attr1]
+#[derive(Foo)]
+#[derive(proc_macros::DeriveIdentity)]
+#[derive(Bar)]
+#[attr2]
+struct S;
+
+#[my_cool_derive()]
+#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
+#[my_cool_derive()]
+struct Foo {
+    #[cfg_attr(false, cfg(false), attr2)]
+    v1: i32,
+    #[cfg_attr(true, cfg(false), attr2)]
+    v1: i32,
+    #[cfg_attr(true, attr3)]
+    v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
+    v3: Foo<{
+        #[cfg(false)]
+        let foo = 123;
+        456
+    }>,
+    #[cfg(false)]
+    v4: bool // No comma here
+}
+
+#[attr1]
+#[my_cool_derive()] struct Foo {
+    v1: i32, #[attr3]v2: fn(#[attr4]param2: u32), v3: Foo< {
+        456
+    }
+    >,
+}
 #[attr1]
 #[derive(Bar)]
 #[attr2] struct S;"#]],
@@ -87,7 +169,7 @@ fn attribute_macro_syntax_completion_2() {
 fn foo() { bar.; blub }
 
 fn foo() {
-    bar. ;
+    bar.;
     blub
 }"#]],
     );
@@ -234,3 +316,28 @@ fn cfg_evaluated_before_attr_macros() {
         expect![[r#""#]],
     );
 }
+
+#[test]
+fn derive_helpers_are_ignored() {
+    check(
+        r#"
+//- proc_macros: identity, helper_should_be_ignored, helper_should_be_ignored_derive
+//- minicore: derive
+use proc_macros::{identity, helper_should_be_ignored, HelperShouldBeIgnoredDerive};
+
+#[derive(HelperShouldBeIgnoredDerive)]
+#[helper_should_be_ignored]
+#[identity]
+struct Foo;
+"#,
+        expect![[r#"
+use proc_macros::{identity, helper_should_be_ignored, HelperShouldBeIgnoredDerive};
+
+#[derive(HelperShouldBeIgnoredDerive)]
+#[helper_should_be_ignored]
+#[identity]
+struct Foo;
+
+#[helper_should_be_ignored] struct Foo;"#]],
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
index f910008..5f45e18 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
@@ -391,19 +391,14 @@ pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefM
     )
     .entered();
 
-    let module_data = ModuleData::new(
-        ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
-        Visibility::Public,
-    );
+    let root_file_id = crate_id.root_file_id(db);
+    let module_data =
+        ModuleData::new(ModuleOrigin::CrateRoot { definition: root_file_id }, Visibility::Public);
 
     let def_map =
         DefMap::empty(crate_id, Arc::new(DefMapCrateData::new(krate.edition)), module_data, None);
-    let (def_map, local_def_map) = collector::collect_defs(
-        db,
-        def_map,
-        TreeId::new(krate.root_file_id(db).into(), None),
-        None,
-    );
+    let (def_map, local_def_map) =
+        collector::collect_defs(db, def_map, TreeId::new(root_file_id.into(), None), None);
 
     DefMapPair::new(db, def_map, local_def_map)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs
index 8d2a386..b678533 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs
@@ -4,7 +4,8 @@
 
 use cfg::CfgOptions;
 use hir_expand::{
-    AstId, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind,
+    AstId, AttrMacroAttrIds, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind,
+    MacroDefKind,
     mod_path::ModPath,
     name::{AsName, Name},
     span_map::SpanMap,
@@ -21,8 +22,8 @@
 use crate::{
     AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
     ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
-    attr::Attrs,
     db::DefDatabase,
+    item_tree::AttrsOrCfg,
     macro_call_as_call_id,
     nameres::{
         DefMap, LocalDefMap, MacroSubNs,
@@ -191,19 +192,22 @@ fn collect(
 
     fn collect_item(&mut self, item: ast::AssocItem) {
         let ast_id = self.ast_id_map.ast_id(&item);
-        let attrs = Attrs::new(self.db, &item, self.span_map.as_ref(), self.cfg_options);
-        if let Err(cfg) = attrs.is_cfg_enabled(self.cfg_options) {
-            self.diagnostics.push(DefDiagnostic::unconfigured_code(
-                self.module_id.local_id,
-                InFile::new(self.file_id, ast_id.erase()),
-                cfg,
-                self.cfg_options.clone(),
-            ));
-            return;
-        }
+        let attrs =
+            match AttrsOrCfg::lower(self.db, &item, &|| self.cfg_options, self.span_map.as_ref()) {
+                AttrsOrCfg::Enabled { attrs } => attrs,
+                AttrsOrCfg::CfgDisabled(cfg) => {
+                    self.diagnostics.push(DefDiagnostic::unconfigured_code(
+                        self.module_id.local_id,
+                        InFile::new(self.file_id, ast_id.erase()),
+                        cfg.0,
+                        self.cfg_options.clone(),
+                    ));
+                    return;
+                }
+            };
         let ast_id = InFile::new(self.file_id, ast_id.upcast());
 
-        'attrs: for attr in &*attrs {
+        'attrs: for (attr_id, attr) in attrs.as_ref().iter() {
             let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
 
             match self.def_map.resolve_attr_macro(
@@ -212,6 +216,7 @@ fn collect_item(&mut self, item: ast::AssocItem) {
                 self.module_id.local_id,
                 ast_id_with_path,
                 attr,
+                attr_id,
             ) {
                 Ok(ResolvedAttr::Macro(call_id)) => {
                     let loc = self.db.lookup_intern_macro_call(call_id);
@@ -240,8 +245,12 @@ fn collect_item(&mut self, item: ast::AssocItem) {
                 Err(_) => {
                     self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
                         self.module_id.local_id,
-                        MacroCallKind::Attr { ast_id, attr_args: None, invoc_attr_index: attr.id },
-                        attr.path().clone(),
+                        MacroCallKind::Attr {
+                            ast_id,
+                            attr_args: None,
+                            censored_attr_ids: AttrMacroAttrIds::from_one(attr_id),
+                        },
+                        (*attr.path).clone(),
                     ));
                 }
             }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
index 2f56d60..fb75502 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
@@ -2,7 +2,7 @@
 
 use base_db::Crate;
 use hir_expand::{
-    MacroCallId, MacroCallKind, MacroDefId,
+    AttrMacroAttrIds, MacroCallId, MacroCallKind, MacroDefId,
     attrs::{Attr, AttrId, AttrInput},
     inert_attr_macro::find_builtin_attr_idx,
     mod_path::{ModPath, PathKind},
@@ -28,6 +28,7 @@ pub enum ResolvedAttr {
 }
 
 impl DefMap {
+    /// This cannot be used to resolve items that allow derives.
     pub(crate) fn resolve_attr_macro(
         &self,
         local_def_map: &LocalDefMap,
@@ -35,6 +36,7 @@ pub(crate) fn resolve_attr_macro(
         original_module: LocalModuleId,
         ast_id: AstIdWithPath<ast::Item>,
         attr: &Attr,
+        attr_id: AttrId,
     ) -> Result<ResolvedAttr, UnresolvedMacro> {
         // NB: does not currently work for derive helpers as they aren't recorded in the `DefMap`
 
@@ -68,6 +70,9 @@ pub(crate) fn resolve_attr_macro(
             db,
             &ast_id,
             attr,
+            // There aren't any active attributes before this one, because attribute macros
+            // replace their input, and derive macros are not allowed in this function.
+            AttrMacroAttrIds::from_one(attr_id),
             self.krate,
             db.macro_def(def),
         )))
@@ -102,6 +107,7 @@ pub(super) fn attr_macro_as_call_id(
     db: &dyn DefDatabase,
     item_attr: &AstIdWithPath<ast::Item>,
     macro_attr: &Attr,
+    censored_attr_ids: AttrMacroAttrIds,
     krate: Crate,
     def: MacroDefId,
 ) -> MacroCallId {
@@ -121,7 +127,7 @@ pub(super) fn attr_macro_as_call_id(
         MacroCallKind::Attr {
             ast_id: item_attr.ast_id,
             attr_args: arg.map(Arc::new),
-            invoc_attr_index: macro_attr.id,
+            censored_attr_ids,
         },
         macro_attr.ctxt,
     )
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index a030ed1..9aa7feb 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -3,14 +3,14 @@
 //! `DefCollector::collect` contains the fixed-point iteration loop which
 //! resolves imports and expands macros.
 
-use std::{cmp::Ordering, iter, mem, ops::Not};
+use std::{cmp::Ordering, iter, mem};
 
 use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
 use cfg::{CfgAtom, CfgExpr, CfgOptions};
 use either::Either;
 use hir_expand::{
-    EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
-    MacroDefId, MacroDefKind,
+    AttrMacroAttrIds, EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId,
+    MacroCallKind, MacroDefId, MacroDefKind,
     attrs::{Attr, AttrId},
     builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
     mod_path::{ModPath, PathKind},
@@ -18,9 +18,10 @@
     proc_macro::CustomProcMacroExpander,
 };
 use intern::{Interned, sym};
-use itertools::{Itertools, izip};
+use itertools::izip;
 use la_arena::Idx;
 use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::SmallVec;
 use span::{Edition, FileAstId, SyntaxContext};
 use syntax::ast;
 use triomphe::Arc;
@@ -32,12 +33,11 @@
     MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId,
     ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId,
     UseLoc,
-    attr::Attrs,
     db::DefDatabase,
     item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports},
     item_tree::{
-        self, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, Macro2, MacroCall,
-        MacroRules, Mod, ModItemId, ModKind, TreeId,
+        self, Attrs, AttrsOrCfg, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId,
+        Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, TreeId,
     },
     macro_call_as_call_id,
     nameres::{
@@ -102,6 +102,7 @@ pub(super) fn collect_defs(
         proc_macros,
         from_glob_import: Default::default(),
         skip_attrs: Default::default(),
+        prev_active_attrs: Default::default(),
         unresolved_extern_crates: Default::default(),
         is_proc_macro: krate.is_proc_macro,
     };
@@ -206,6 +207,7 @@ enum MacroDirectiveKind<'db> {
     },
     Attr {
         ast_id: AstIdWithPath<ast::Item>,
+        attr_id: AttrId,
         attr: Attr,
         mod_item: ModItemId,
         /* is this needed? */ tree: TreeId,
@@ -246,28 +248,27 @@ struct DefCollector<'db> {
     /// This also stores the attributes to skip when we resolve derive helpers and non-macro
     /// non-builtin attributes in general.
     // FIXME: There has to be a better way to do this
-    skip_attrs: FxHashMap<InFile<FileAstId<ast::Item>>, AttrId>,
+    skip_attrs: FxHashMap<AstId<ast::Item>, AttrId>,
+    /// When we expand attributes, we need to censor all previous active attributes
+    /// on the same item. Therefore, this holds all active attributes that we already
+    /// expanded.
+    prev_active_attrs: FxHashMap<AstId<ast::Item>, SmallVec<[AttrId; 1]>>,
 }
 
 impl<'db> DefCollector<'db> {
     fn seed_with_top_level(&mut self) {
         let _p = tracing::info_span!("seed_with_top_level").entered();
 
-        let file_id = self.def_map.krate.data(self.db).root_file_id(self.db);
+        let file_id = self.def_map.krate.root_file_id(self.db);
         let item_tree = self.db.file_item_tree(file_id.into());
-        let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
+        let attrs = match item_tree.top_level_attrs() {
+            AttrsOrCfg::Enabled { attrs } => attrs.as_ref(),
+            AttrsOrCfg::CfgDisabled(it) => it.1.as_ref(),
+        };
         let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
 
-        let mut process = true;
-
         // Process other crate-level attributes.
         for attr in &*attrs {
-            if let Some(cfg) = attr.cfg()
-                && self.cfg_options.check(&cfg) == Some(false)
-            {
-                process = false;
-                break;
-            }
             let Some(attr_name) = attr.path.as_ident() else { continue };
 
             match () {
@@ -291,7 +292,7 @@ fn seed_with_top_level(&mut self) {
                 () if *attr_name == sym::feature => {
                     let features =
                         attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map(
-                            |(feat, _)| match feat.segments() {
+                            |(feat, _, _)| match feat.segments() {
                                 [name] => Some(name.symbol().clone()),
                                 _ => None,
                             },
@@ -344,7 +345,7 @@ fn seed_with_top_level(&mut self) {
 
         self.inject_prelude();
 
-        if !process {
+        if matches!(item_tree.top_level_attrs(), AttrsOrCfg::CfgDisabled(_)) {
             return;
         }
 
@@ -362,10 +363,7 @@ fn seed_with_top_level(&mut self) {
 
     fn seed_with_inner(&mut self, tree_id: TreeId) {
         let item_tree = tree_id.item_tree(self.db);
-        let is_cfg_enabled = item_tree
-            .top_level_attrs(self.db, self.def_map.krate)
-            .cfg()
-            .is_none_or(|cfg| self.cfg_options.check(&cfg) != Some(false));
+        let is_cfg_enabled = matches!(item_tree.top_level_attrs(), AttrsOrCfg::Enabled { .. });
         if is_cfg_enabled {
             self.inject_prelude();
 
@@ -456,18 +454,18 @@ fn reseed_with_unresolved_attribute(&mut self) -> ReachedFixedPoint {
             self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive
                 .kind
             {
-                MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree, item_tree } => {
+                MacroDirectiveKind::Attr { ast_id, mod_item, attr_id, attr, tree, item_tree } => {
                     self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
                         directive.module_id,
                         MacroCallKind::Attr {
                             ast_id: ast_id.ast_id,
                             attr_args: None,
-                            invoc_attr_index: attr.id,
+                            censored_attr_ids: AttrMacroAttrIds::from_one(*attr_id),
                         },
-                        attr.path().clone(),
+                        (*attr.path).clone(),
                     ));
 
-                    self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), attr.id);
+                    self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), *attr_id);
 
                     Some((idx, directive, *mod_item, *tree, *item_tree))
                 }
@@ -1240,7 +1238,17 @@ fn resolve_macros(&mut self) -> ReachedFixedPoint {
         let mut macros = mem::take(&mut self.unresolved_macros);
         let mut resolved = Vec::new();
         let mut push_resolved = |directive: &MacroDirective<'_>, call_id| {
-            resolved.push((directive.module_id, directive.depth, directive.container, call_id));
+            let attr_macro_item = match &directive.kind {
+                MacroDirectiveKind::Attr { ast_id, .. } => Some(ast_id.ast_id),
+                MacroDirectiveKind::FnLike { .. } | MacroDirectiveKind::Derive { .. } => None,
+            };
+            resolved.push((
+                directive.module_id,
+                directive.depth,
+                directive.container,
+                call_id,
+                attr_macro_item,
+            ));
         };
 
         #[derive(PartialEq, Eq)]
@@ -1350,6 +1358,7 @@ enum Resolved {
                 MacroDirectiveKind::Attr {
                     ast_id: file_ast_id,
                     mod_item,
+                    attr_id,
                     attr,
                     tree,
                     item_tree,
@@ -1362,7 +1371,7 @@ enum Resolved {
                         let mod_dir = collector.mod_dirs[&directive.module_id].clone();
                         collector
                             .skip_attrs
-                            .insert(InFile::new(file_id, mod_item.ast_id()), attr.id);
+                            .insert(InFile::new(file_id, mod_item.ast_id()), *attr_id);
 
                         ModCollector {
                             def_collector: collector,
@@ -1398,7 +1407,6 @@ enum Resolved {
                     // being cfg'ed out).
                     // Ideally we will just expand them to nothing here. But we are only collecting macro calls,
                     // not expanding them, so we have no way to do that.
-                    // If you add an ignored attribute here, also add it to `Semantics::might_be_inside_macro_call()`.
                     if matches!(
                         def.kind,
                         MacroDefKind::BuiltInAttr(_, expander)
@@ -1410,8 +1418,18 @@ enum Resolved {
                         }
                     }
 
-                    let call_id = || {
-                        attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def)
+                    let mut call_id = || {
+                        let active_attrs = self.prev_active_attrs.entry(ast_id).or_default();
+                        active_attrs.push(*attr_id);
+
+                        attr_macro_as_call_id(
+                            self.db,
+                            file_ast_id,
+                            attr,
+                            AttrMacroAttrIds::from_many(active_attrs),
+                            self.def_map.krate,
+                            def,
+                        )
                     };
                     if matches!(def,
                         MacroDefId { kind: MacroDefKind::BuiltInAttr(_, exp), .. }
@@ -1429,7 +1447,7 @@ enum Resolved {
                                 let diag = DefDiagnostic::invalid_derive_target(
                                     directive.module_id,
                                     ast_id,
-                                    attr.id,
+                                    *attr_id,
                                 );
                                 self.def_map.diagnostics.push(diag);
                                 return recollect_without(self);
@@ -1442,7 +1460,7 @@ enum Resolved {
                             Some(derive_macros) => {
                                 let call_id = call_id();
                                 let mut len = 0;
-                                for (idx, (path, call_site)) in derive_macros.enumerate() {
+                                for (idx, (path, call_site, _)) in derive_macros.enumerate() {
                                     let ast_id = AstIdWithPath::new(
                                         file_id,
                                         ast_id.value,
@@ -1453,7 +1471,7 @@ enum Resolved {
                                         depth: directive.depth + 1,
                                         kind: MacroDirectiveKind::Derive {
                                             ast_id,
-                                            derive_attr: attr.id,
+                                            derive_attr: *attr_id,
                                             derive_pos: idx,
                                             ctxt: call_site.ctx,
                                             derive_macro_id: call_id,
@@ -1469,13 +1487,13 @@ enum Resolved {
                                 // Check the comment in [`builtin_attr_macro`].
                                 self.def_map.modules[directive.module_id]
                                     .scope
-                                    .init_derive_attribute(ast_id, attr.id, call_id, len + 1);
+                                    .init_derive_attribute(ast_id, *attr_id, call_id, len + 1);
                             }
                             None => {
                                 let diag = DefDiagnostic::malformed_derive(
                                     directive.module_id,
                                     ast_id,
-                                    attr.id,
+                                    *attr_id,
                                 );
                                 self.def_map.diagnostics.push(diag);
                             }
@@ -1522,8 +1540,14 @@ enum Resolved {
             self.def_map.modules[module_id].scope.add_macro_invoc(ptr.map(|(_, it)| it), call_id);
         }
 
-        for (module_id, depth, container, macro_call_id) in resolved {
-            self.collect_macro_expansion(module_id, macro_call_id, depth, container);
+        for (module_id, depth, container, macro_call_id, attr_macro_item) in resolved {
+            self.collect_macro_expansion(
+                module_id,
+                macro_call_id,
+                depth,
+                container,
+                attr_macro_item,
+            );
         }
 
         res
@@ -1535,6 +1559,7 @@ fn collect_macro_expansion(
         macro_call_id: MacroCallId,
         depth: usize,
         container: ItemContainerId,
+        attr_macro_item: Option<AstId<ast::Item>>,
     ) {
         if depth > self.def_map.recursion_limit() as usize {
             cov_mark::hit!(macro_expansion_overflow);
@@ -1545,6 +1570,34 @@ fn collect_macro_expansion(
 
         let item_tree = self.db.file_item_tree(file_id);
 
+        // Derive helpers that are in scope for an item are also in scope for attribute macro expansions
+        // of that item (but not derive or fn like macros).
+        // FIXME: This is a hack. The proper way to do this is by having a chain of derive helpers scope,
+        // where the next scope in the chain is the parent hygiene context of the span. Unfortunately
+        // it's difficult to implement with our current name resolution and hygiene system.
+        // This hack is also incorrect since it ignores item in blocks. But the main reason to bring derive
+        // helpers into scope in this case is to help with:
+        // ```
+        // #[derive(DeriveWithHelper)]
+        // #[helper]
+        // #[attr_macro]
+        // struct Foo;
+        // ```
+        // Where `attr_macro`'s input will include `#[helper]` but not the derive, and it will likely therefore
+        // also include it in its output. Therefore I hope not supporting blocks is fine at least for now.
+        if let Some(attr_macro_item) = attr_macro_item
+            && let Some(derive_helpers) = self.def_map.derive_helpers_in_scope.get(&attr_macro_item)
+        {
+            let derive_helpers = derive_helpers.clone();
+            for item in item_tree.top_level_items() {
+                self.def_map
+                    .derive_helpers_in_scope
+                    .entry(InFile::new(file_id, item.ast_id()))
+                    .or_default()
+                    .extend(derive_helpers.iter().cloned());
+            }
+        }
+
         let mod_dir = if macro_call_id.is_include_macro(self.db) {
             ModDir::root()
         } else {
@@ -1712,16 +1765,17 @@ fn collect(&mut self, items: &[ModItemId], container: ItemContainerId) {
         };
 
         let mut process_mod_item = |item: ModItemId| {
-            let attrs = self.item_tree.attrs(db, krate, item.ast_id());
-            if let Some(cfg) = attrs.cfg()
-                && !self.is_cfg_enabled(&cfg)
-            {
-                let ast_id = item.ast_id().erase();
-                self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg);
-                return;
-            }
+            let attrs = match self.item_tree.attrs(item.ast_id()) {
+                Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
+                None => Attrs::EMPTY,
+                Some(AttrsOrCfg::CfgDisabled(cfg)) => {
+                    let ast_id = item.ast_id().erase();
+                    self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg.0);
+                    return;
+                }
+            };
 
-            if let Err(()) = self.resolve_attributes(&attrs, item, container) {
+            if let Err(()) = self.resolve_attributes(attrs, item, container) {
                 // Do not process the item. It has at least one non-builtin attribute, so the
                 // fixed-point algorithm is required to resolve the rest of them.
                 return;
@@ -1733,7 +1787,7 @@ fn collect(&mut self, items: &[ModItemId], container: ItemContainerId) {
                 self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map);
 
             match item {
-                ModItemId::Mod(m) => self.collect_module(m, &attrs),
+                ModItemId::Mod(m) => self.collect_module(m, attrs),
                 ModItemId::Use(item_tree_id) => {
                     let id =
                         UseLoc { container: module, id: InFile::new(self.file_id(), item_tree_id) }
@@ -2006,7 +2060,7 @@ fn process_macro_use_extern_crate<'a>(
                 );
                 return;
             };
-            for (path, _) in paths {
+            for (path, _, _) in paths {
                 if let Some(name) = path.as_ident() {
                     single_imports.push(name.clone());
                 }
@@ -2020,7 +2074,7 @@ fn process_macro_use_extern_crate<'a>(
         );
     }
 
-    fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: &Attrs) {
+    fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: Attrs<'_>) {
         let path_attr = attrs.by_key(sym::path).string_value_unescape();
         let is_macro_use = attrs.by_key(sym::macro_use).exists();
         let module = &self.item_tree[module_ast_id];
@@ -2061,23 +2115,18 @@ fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: &Attrs) {
                     self.file_id(),
                     &module.name,
                     path_attr.as_deref(),
+                    self.def_collector.def_map.krate,
                 ) {
                     Ok((file_id, is_mod_rs, mod_dir)) => {
                         let item_tree = db.file_item_tree(file_id.into());
-                        let krate = self.def_collector.def_map.krate;
-                        let is_enabled = item_tree
-                            .top_level_attrs(db, krate)
-                            .cfg()
-                            .and_then(|cfg| self.is_cfg_enabled(&cfg).not().then_some(cfg))
-                            .map_or(Ok(()), Err);
-                        match is_enabled {
-                            Err(cfg) => {
+                        match item_tree.top_level_attrs() {
+                            AttrsOrCfg::CfgDisabled(cfg) => {
                                 self.emit_unconfigured_diagnostic(
                                     InFile::new(self.file_id(), module_ast_id.erase()),
-                                    &cfg,
+                                    &cfg.0,
                                 );
                             }
-                            Ok(()) => {
+                            AttrsOrCfg::Enabled { attrs } => {
                                 let module_id = self.push_child_module(
                                     module.name.clone(),
                                     ast_id.value,
@@ -2093,11 +2142,8 @@ fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: &Attrs) {
                                     mod_dir,
                                 }
                                 .collect_in_top_module(item_tree.top_level_items());
-                                let is_macro_use = is_macro_use
-                                    || item_tree
-                                        .top_level_attrs(db, krate)
-                                        .by_key(sym::macro_use)
-                                        .exists();
+                                let is_macro_use =
+                                    is_macro_use || attrs.as_ref().by_key(sym::macro_use).exists();
                                 if is_macro_use {
                                     self.import_all_legacy_macros(module_id);
                                 }
@@ -2185,36 +2231,16 @@ fn push_child_module(
     /// assumed to be resolved already.
     fn resolve_attributes(
         &mut self,
-        attrs: &Attrs,
+        attrs: Attrs<'_>,
         mod_item: ModItemId,
         container: ItemContainerId,
     ) -> Result<(), ()> {
-        let mut ignore_up_to = self
+        let ignore_up_to = self
             .def_collector
             .skip_attrs
             .get(&InFile::new(self.file_id(), mod_item.ast_id()))
             .copied();
-        let iter = attrs
-            .iter()
-            .dedup_by(|a, b| {
-                // FIXME: this should not be required, all attributes on an item should have a
-                // unique ID!
-                // Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes:
-                //     #[cfg_attr(not(off), unresolved, unresolved)]
-                //     struct S;
-                // We should come up with a different way to ID attributes.
-                a.id == b.id
-            })
-            .skip_while(|attr| match ignore_up_to {
-                Some(id) if attr.id == id => {
-                    ignore_up_to = None;
-                    true
-                }
-                Some(_) => true,
-                None => false,
-            });
-
-        for attr in iter {
+        for (attr_id, attr) in attrs.iter_after(ignore_up_to) {
             if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) {
                 continue;
             }
@@ -2229,6 +2255,7 @@ fn resolve_attributes(
                 depth: self.macro_depth + 1,
                 kind: MacroDirectiveKind::Attr {
                     ast_id,
+                    attr_id,
                     attr: attr.clone(),
                     mod_item,
                     tree: self.tree_id,
@@ -2246,7 +2273,13 @@ fn resolve_attributes(
     fn collect_macro_rules(&mut self, ast_id: ItemTreeAstId<MacroRules>, module: ModuleId) {
         let krate = self.def_collector.def_map.krate;
         let mac = &self.item_tree[ast_id];
-        let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
+        let attrs = match self.item_tree.attrs(ast_id.upcast()) {
+            Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
+            None => Attrs::EMPTY,
+            Some(AttrsOrCfg::CfgDisabled(_)) => {
+                unreachable!("we only get here if the macro is not cfg'ed out")
+            }
+        };
         let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
 
         let export_attr = || attrs.by_key(sym::macro_export);
@@ -2331,7 +2364,13 @@ fn collect_macro_rules(&mut self, ast_id: ItemTreeAstId<MacroRules>, module: Mod
     fn collect_macro_def(&mut self, ast_id: ItemTreeAstId<Macro2>, module: ModuleId) {
         let krate = self.def_collector.def_map.krate;
         let mac = &self.item_tree[ast_id];
-        let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
+        let attrs = match self.item_tree.attrs(ast_id.upcast()) {
+            Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
+            None => Attrs::EMPTY,
+            Some(AttrsOrCfg::CfgDisabled(_)) => {
+                unreachable!("we only get here if the macro is not cfg'ed out")
+            }
+        };
         let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
 
         // Case 1: builtin macros
@@ -2460,6 +2499,7 @@ fn collect_macro_call(
                         call_id,
                         self.macro_depth + 1,
                         container,
+                        None,
                     );
                 }
 
@@ -2515,10 +2555,6 @@ fn borrow_modules(
         Some((a, b))
     }
 
-    fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool {
-        self.def_collector.cfg_options.check(cfg) != Some(false)
-    }
-
     fn emit_unconfigured_diagnostic(&mut self, ast_id: ErasedAstId, cfg: &CfgExpr) {
         self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
             self.module_id,
@@ -2558,6 +2594,7 @@ fn do_collect_defs(db: &dyn DefDatabase, def_map: DefMap) -> DefMap {
             proc_macros: Default::default(),
             from_glob_import: Default::default(),
             skip_attrs: Default::default(),
+            prev_active_attrs: Default::default(),
             is_proc_macro: false,
             unresolved_extern_crates: Default::default(),
         };
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
index c495a07..6a07c56 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
@@ -17,8 +17,8 @@ pub enum DefDiagnosticKind {
     UnconfiguredCode { ast_id: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
     UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
     UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
-    InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
-    MalformedDerive { ast: AstId<ast::Adt>, id: usize },
+    InvalidDeriveTarget { ast: AstId<ast::Item>, id: AttrId },
+    MalformedDerive { ast: AstId<ast::Adt>, id: AttrId },
     MacroDefError { ast: AstId<ast::Macro>, message: String },
     MacroError { ast: AstId<ast::Item>, path: ModPath, err: ExpandErrorKind },
 }
@@ -119,10 +119,7 @@ pub(super) fn invalid_derive_target(
         ast: AstId<ast::Item>,
         id: AttrId,
     ) -> Self {
-        Self {
-            in_module: container,
-            kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() },
-        }
+        Self { in_module: container, kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id } }
     }
 
     pub(super) fn malformed_derive(
@@ -130,9 +127,6 @@ pub(super) fn malformed_derive(
         ast: AstId<ast::Adt>,
         id: AttrId,
     ) -> Self {
-        Self {
-            in_module: container,
-            kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() },
-        }
+        Self { in_module: container, kind: DefDiagnosticKind::MalformedDerive { ast, id } }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
index 0c50f13..140b77a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
@@ -1,6 +1,6 @@
 //! This module resolves `mod foo;` declaration to file.
 use arrayvec::ArrayVec;
-use base_db::AnchoredPath;
+use base_db::{AnchoredPath, Crate};
 use hir_expand::{EditionedFileId, name::Name};
 
 use crate::{HirFileId, db::DefDatabase};
@@ -62,6 +62,7 @@ pub(super) fn resolve_declaration(
         file_id: HirFileId,
         name: &Name,
         attr_path: Option<&str>,
+        krate: Crate,
     ) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> {
         let name = name.as_str();
 
@@ -91,7 +92,7 @@ pub(super) fn resolve_declaration(
                 if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
                     return Ok((
                         // FIXME: Edition, is this rightr?
-                        EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
+                        EditionedFileId::new(db, file_id, orig_file_id.edition(db), krate),
                         is_mod_rs,
                         mod_dir,
                     ));
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
index cd88821..cd45afe 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
@@ -3,8 +3,10 @@
 use hir_expand::name::{AsName, Name};
 use intern::sym;
 
-use crate::attr::Attrs;
-use crate::tt::{Leaf, TokenTree, TopSubtree, TtElement};
+use crate::{
+    item_tree::Attrs,
+    tt::{Leaf, TokenTree, TopSubtree, TtElement},
+};
 
 #[derive(Debug, PartialEq, Eq)]
 pub struct ProcMacroDef {
@@ -29,8 +31,8 @@ pub(super) fn to_basedb_kind(&self) -> hir_expand::proc_macro::ProcMacroKind {
     }
 }
 
-impl Attrs {
-    pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
+impl Attrs<'_> {
+    pub(crate) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
         if self.is_proc_macro() {
             Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang })
         } else if self.is_proc_macro_attribute() {
@@ -51,15 +53,10 @@ pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
         }
     }
 
-    pub fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
+    pub(crate) fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
         let derive = self.by_key(sym::proc_macro_derive).tt_values().next()?;
         parse_macro_name_and_helper_attrs(derive)
     }
-
-    pub fn parse_rustc_builtin_macro(&self) -> Option<(Name, Box<[Name]>)> {
-        let derive = self.by_key(sym::rustc_builtin_macro).tt_values().next()?;
-        parse_macro_name_and_helper_attrs(derive)
-    }
 }
 
 // This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
@@ -84,14 +81,11 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &TopSubtree) -> Option<(Name
             let helpers = tt::TokenTreesView::new(&tt.token_trees().flat_tokens()[3..]).try_into_subtree()?;
             let helpers = helpers
                 .iter()
-                .filter(
-                    |tt| !matches!(tt, TtElement::Leaf(Leaf::Punct(comma)) if comma.char == ','),
-                )
-                .map(|tt| match tt {
+                .filter_map(|tt| match tt {
                     TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
                     _ => None,
                 })
-                .collect::<Option<Box<[_]>>>()?;
+                .collect::<Box<[_]>>();
 
             Some((trait_name.as_name(), helpers))
         }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index b5afbf3..ccea043 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -186,15 +186,15 @@ pub fn resolve_path_in_type_ns_with_prefix_info(
             Path::Normal(it) => &it.mod_path,
             Path::LangItem(l, seg) => {
                 let type_ns = match *l {
-                    LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
-                    LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
-                    LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
-                    LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it),
+                    LangItemTarget::UnionId(it) => TypeNs::AdtId(it.into()),
+                    LangItemTarget::TypeAliasId(it) => TypeNs::TypeAliasId(it),
+                    LangItemTarget::StructId(it) => TypeNs::AdtId(it.into()),
+                    LangItemTarget::EnumVariantId(it) => TypeNs::EnumVariantId(it),
                     LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()),
-                    LangItemTarget::Trait(it) => TypeNs::TraitId(it),
-                    LangItemTarget::Function(_)
-                    | LangItemTarget::ImplDef(_)
-                    | LangItemTarget::Static(_) => return None,
+                    LangItemTarget::TraitId(it) => TypeNs::TraitId(it),
+                    LangItemTarget::FunctionId(_)
+                    | LangItemTarget::ImplId(_)
+                    | LangItemTarget::StaticId(_) => return None,
                 };
                 return Some((
                     type_ns,
@@ -334,14 +334,14 @@ pub fn resolve_path_in_value_ns_with_prefix_info(
                 return Some((
                     ResolveValueResult::ValueNs(
                         match *l {
-                            LangItemTarget::Function(it) => ValueNs::FunctionId(it),
-                            LangItemTarget::Static(it) => ValueNs::StaticId(it),
-                            LangItemTarget::Struct(it) => ValueNs::StructId(it),
-                            LangItemTarget::EnumVariant(it) => ValueNs::EnumVariantId(it),
-                            LangItemTarget::Union(_)
-                            | LangItemTarget::ImplDef(_)
-                            | LangItemTarget::TypeAlias(_)
-                            | LangItemTarget::Trait(_)
+                            LangItemTarget::FunctionId(it) => ValueNs::FunctionId(it),
+                            LangItemTarget::StaticId(it) => ValueNs::StaticId(it),
+                            LangItemTarget::StructId(it) => ValueNs::StructId(it),
+                            LangItemTarget::EnumVariantId(it) => ValueNs::EnumVariantId(it),
+                            LangItemTarget::UnionId(_)
+                            | LangItemTarget::ImplId(_)
+                            | LangItemTarget::TypeAliasId(_)
+                            | LangItemTarget::TraitId(_)
                             | LangItemTarget::EnumId(_) => return None,
                         },
                         None,
@@ -351,15 +351,15 @@ pub fn resolve_path_in_value_ns_with_prefix_info(
             }
             Path::LangItem(l, Some(_)) => {
                 let type_ns = match *l {
-                    LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
-                    LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
-                    LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
-                    LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it),
+                    LangItemTarget::UnionId(it) => TypeNs::AdtId(it.into()),
+                    LangItemTarget::TypeAliasId(it) => TypeNs::TypeAliasId(it),
+                    LangItemTarget::StructId(it) => TypeNs::AdtId(it.into()),
+                    LangItemTarget::EnumVariantId(it) => TypeNs::EnumVariantId(it),
                     LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()),
-                    LangItemTarget::Trait(it) => TypeNs::TraitId(it),
-                    LangItemTarget::Function(_)
-                    | LangItemTarget::ImplDef(_)
-                    | LangItemTarget::Static(_) => return None,
+                    LangItemTarget::TraitId(it) => TypeNs::TraitId(it),
+                    LangItemTarget::FunctionId(_)
+                    | LangItemTarget::ImplId(_)
+                    | LangItemTarget::StaticId(_) => return None,
                 };
                 // Remaining segments start from 0 because lang paths have no segments other than the remaining.
                 return Some((
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
index ebbf87c..e8ccf56 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
@@ -21,7 +21,7 @@
 use crate::{
     ConstId, EnumId, EnumVariantId, EnumVariantLoc, ExternBlockId, FunctionId, HasModule, ImplId,
     ItemContainerId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
-    attr::Attrs,
+    attrs::AttrFlags,
     db::DefDatabase,
     expr_store::{
         ExpressionStore, ExpressionStoreSourceMap,
@@ -31,7 +31,6 @@
     },
     hir::{ExprId, PatId, generics::GenericParams},
     item_tree::{FieldsShape, RawVisibility, visibility_from_ast},
-    lang_item::LangItem,
     src::HasSource,
     type_ref::{TraitRef, TypeBound, TypeRefId},
 };
@@ -48,12 +47,13 @@ pub struct StructSignature {
     pub store: Arc<ExpressionStore>,
     pub flags: StructFlags,
     pub shape: FieldsShape,
-    pub repr: Option<ReprOptions>,
 }
 
 bitflags! {
     #[derive(Debug, Copy, Clone, PartialEq, Eq)]
     pub struct StructFlags: u8 {
+        /// Indicates whether this struct has `#[repr]`.
+        const HAS_REPR = 1 << 0;
         /// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute.
         const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1;
         /// Indicates whether the struct has a `#[fundamental]` attribute.
@@ -75,26 +75,28 @@ impl StructSignature {
     pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
         let loc = id.lookup(db);
         let InFile { file_id, value: source } = loc.source(db);
-        let attrs = db.attrs(id.into());
+        let attrs = AttrFlags::query(db, id.into());
 
         let mut flags = StructFlags::empty();
-        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
             flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
         }
-        if attrs.by_key(sym::fundamental).exists() {
+        if attrs.contains(AttrFlags::FUNDAMENTAL) {
             flags |= StructFlags::FUNDAMENTAL;
         }
-        if let Some(lang) = attrs.lang_item() {
+        if attrs.contains(AttrFlags::HAS_REPR) {
+            flags |= StructFlags::HAS_REPR;
+        }
+        if let Some(lang) = attrs.lang_item_with_attrs(db, id.into()) {
             match lang {
-                LangItem::PhantomData => flags |= StructFlags::IS_PHANTOM_DATA,
-                LangItem::OwnedBox => flags |= StructFlags::IS_BOX,
-                LangItem::ManuallyDrop => flags |= StructFlags::IS_MANUALLY_DROP,
-                LangItem::UnsafeCell => flags |= StructFlags::IS_UNSAFE_CELL,
-                LangItem::UnsafePinned => flags |= StructFlags::IS_UNSAFE_PINNED,
+                _ if lang == sym::phantom_data => flags |= StructFlags::IS_PHANTOM_DATA,
+                _ if lang == sym::owned_box => flags |= StructFlags::IS_BOX,
+                _ if lang == sym::manually_drop => flags |= StructFlags::IS_MANUALLY_DROP,
+                _ if lang == sym::unsafe_cell => flags |= StructFlags::IS_UNSAFE_CELL,
+                _ if lang == sym::unsafe_pinned => flags |= StructFlags::IS_UNSAFE_PINNED,
                 _ => (),
             }
         }
-        let repr = attrs.repr();
         let shape = adt_shape(source.kind());
 
         let (store, generic_params, source_map) = lower_generic_params(
@@ -112,11 +114,19 @@ pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc<Self>, Arc<ExpressionSt
                 flags,
                 shape,
                 name: as_name_opt(source.name()),
-                repr,
             }),
             Arc::new(source_map),
         )
     }
+
+    #[inline]
+    pub fn repr(&self, db: &dyn DefDatabase, id: StructId) -> Option<ReprOptions> {
+        if self.flags.contains(StructFlags::HAS_REPR) {
+            AttrFlags::repr(db, id.into())
+        } else {
+            None
+        }
+    }
 }
 
 #[inline]
@@ -134,22 +144,22 @@ pub struct UnionSignature {
     pub generic_params: Arc<GenericParams>,
     pub store: Arc<ExpressionStore>,
     pub flags: StructFlags,
-    pub repr: Option<ReprOptions>,
 }
 
 impl UnionSignature {
     pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
         let loc = id.lookup(db);
-        let attrs = db.attrs(id.into());
+        let attrs = AttrFlags::query(db, id.into());
         let mut flags = StructFlags::empty();
-        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
             flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
         }
-        if attrs.by_key(sym::fundamental).exists() {
+        if attrs.contains(AttrFlags::FUNDAMENTAL) {
             flags |= StructFlags::FUNDAMENTAL;
         }
-
-        let repr = attrs.repr();
+        if attrs.contains(AttrFlags::HAS_REPR) {
+            flags |= StructFlags::HAS_REPR;
+        }
 
         let InFile { file_id, value: source } = loc.source(db);
         let (store, generic_params, source_map) = lower_generic_params(
@@ -165,7 +175,6 @@ pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc<Self>, Arc<ExpressionSto
                 generic_params,
                 store,
                 flags,
-                repr,
                 name: as_name_opt(source.name()),
             }),
             Arc::new(source_map),
@@ -186,20 +195,17 @@ pub struct EnumSignature {
     pub generic_params: Arc<GenericParams>,
     pub store: Arc<ExpressionStore>,
     pub flags: EnumFlags,
-    pub repr: Option<ReprOptions>,
 }
 
 impl EnumSignature {
     pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
         let loc = id.lookup(db);
-        let attrs = db.attrs(id.into());
+        let attrs = AttrFlags::query(db, id.into());
         let mut flags = EnumFlags::empty();
-        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
             flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
         }
 
-        let repr = attrs.repr();
-
         let InFile { file_id, value: source } = loc.source(db);
         let (store, generic_params, source_map) = lower_generic_params(
             db,
@@ -215,15 +221,14 @@ pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc<Self>, Arc<ExpressionStor
                 generic_params,
                 store,
                 flags,
-                repr,
                 name: as_name_opt(source.name()),
             }),
             Arc::new(source_map),
         )
     }
 
-    pub fn variant_body_type(&self) -> IntegerType {
-        match self.repr {
+    pub fn variant_body_type(db: &dyn DefDatabase, id: EnumId) -> IntegerType {
+        match AttrFlags::repr(db, id.into()) {
             Some(ReprOptions { int: Some(builtin), .. }) => builtin,
             _ => IntegerType::Pointer(true),
         }
@@ -251,9 +256,9 @@ pub fn query(db: &dyn DefDatabase, id: ConstId) -> (Arc<Self>, Arc<ExpressionSto
         let loc = id.lookup(db);
 
         let module = loc.container.module(db);
-        let attrs = db.attrs(id.into());
+        let attrs = AttrFlags::query(db, id.into());
         let mut flags = ConstFlags::empty();
-        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+        if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
             flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
         }
         let source = loc.source(db);
@@ -306,9 +311,9 @@ pub fn query(db: &dyn DefDatabase, id: StaticId) -> (Arc<Self>, Arc<ExpressionSt
         let loc = id.lookup(db);
 
         let module = loc.container.module(db);
-        let attrs = db.attrs(id.into());
+        let attrs = AttrFlags::query(db, id.into());
         let mut flags = StaticFlags::empty();
-        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+        if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
             flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
         }
 
@@ -433,7 +438,7 @@ pub fn query(db: &dyn DefDatabase, id: TraitId) -> (Arc<Self>, Arc<ExpressionSto
         let loc = id.lookup(db);
 
         let mut flags = TraitFlags::empty();
-        let attrs = db.attrs(id.into());
+        let attrs = AttrFlags::query(db, id.into());
         let source = loc.source(db);
         if source.value.auto_token().is_some() {
             flags.insert(TraitFlags::AUTO);
@@ -444,34 +449,23 @@ pub fn query(db: &dyn DefDatabase, id: TraitId) -> (Arc<Self>, Arc<ExpressionSto
         if source.value.eq_token().is_some() {
             flags.insert(TraitFlags::ALIAS);
         }
-        if attrs.by_key(sym::fundamental).exists() {
+        if attrs.contains(AttrFlags::FUNDAMENTAL) {
             flags |= TraitFlags::FUNDAMENTAL;
         }
-        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
             flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
         }
-        if attrs.by_key(sym::rustc_paren_sugar).exists() {
+        if attrs.contains(AttrFlags::RUSTC_PAREN_SUGAR) {
             flags |= TraitFlags::RUSTC_PAREN_SUGAR;
         }
-        if attrs.by_key(sym::rustc_coinductive).exists() {
+        if attrs.contains(AttrFlags::RUSTC_COINDUCTIVE) {
             flags |= TraitFlags::COINDUCTIVE;
         }
-        let mut skip_array_during_method_dispatch =
-            attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists();
-        let mut skip_boxed_slice_during_method_dispatch = false;
-        for tt in attrs.by_key(sym::rustc_skip_during_method_dispatch).tt_values() {
-            for tt in tt.iter() {
-                if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt {
-                    skip_array_during_method_dispatch |= ident.sym == sym::array;
-                    skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice;
-                }
-            }
-        }
 
-        if skip_array_during_method_dispatch {
+        if attrs.contains(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH) {
             flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH;
         }
-        if skip_boxed_slice_during_method_dispatch {
+        if attrs.contains(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) {
             flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH;
         }
 
@@ -503,7 +497,8 @@ pub struct FnFlags: u16 {
         const HAS_TARGET_FEATURE = 1 << 9;
         const DEPRECATED_SAFE_2024 = 1 << 10;
         const EXPLICIT_SAFE = 1 << 11;
-        const RUSTC_INTRINSIC = 1 << 12;
+        const HAS_LEGACY_CONST_GENERICS = 1 << 12;
+        const RUSTC_INTRINSIC = 1 << 13;
     }
 }
 
@@ -516,8 +511,6 @@ pub struct FunctionSignature {
     pub ret_type: Option<TypeRefId>,
     pub abi: Option<Symbol>,
     pub flags: FnFlags,
-    // FIXME: we should put this behind a fn flags + query to avoid bloating the struct
-    pub legacy_const_generics_indices: Option<Box<Box<[u32]>>>,
 }
 
 impl FunctionSignature {
@@ -529,23 +522,26 @@ pub fn query(
         let module = loc.container.module(db);
 
         let mut flags = FnFlags::empty();
-        let attrs = db.attrs(id.into());
-        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+        let attrs = AttrFlags::query(db, id.into());
+        if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
             flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
         }
 
-        if attrs.by_key(sym::target_feature).exists() {
+        if attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
             flags.insert(FnFlags::HAS_TARGET_FEATURE);
         }
-        if attrs.by_key(sym::rustc_intrinsic).exists() {
+
+        if attrs.contains(AttrFlags::RUSTC_INTRINSIC) {
             flags.insert(FnFlags::RUSTC_INTRINSIC);
         }
-        let legacy_const_generics_indices = attrs.rustc_legacy_const_generics();
+        if attrs.contains(AttrFlags::HAS_LEGACY_CONST_GENERICS) {
+            flags.insert(FnFlags::HAS_LEGACY_CONST_GENERICS);
+        }
 
         let source = loc.source(db);
 
         if source.value.unsafe_token().is_some() {
-            if attrs.by_key(sym::rustc_deprecated_safe_2024).exists() {
+            if attrs.contains(AttrFlags::RUSTC_DEPRECATED_SAFE_2024) {
                 flags.insert(FnFlags::DEPRECATED_SAFE_2024);
             } else {
                 flags.insert(FnFlags::UNSAFE);
@@ -587,7 +583,6 @@ pub fn query(
                 ret_type,
                 abi,
                 flags,
-                legacy_const_generics_indices,
                 name,
             }),
             Arc::new(source_map),
@@ -636,6 +631,19 @@ pub fn has_target_feature(&self) -> bool {
         self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
     }
 
+    #[inline]
+    pub fn legacy_const_generics_indices<'db>(
+        &self,
+        db: &'db dyn DefDatabase,
+        id: FunctionId,
+    ) -> Option<&'db [u32]> {
+        if !self.flags.contains(FnFlags::HAS_LEGACY_CONST_GENERICS) {
+            return None;
+        }
+
+        AttrFlags::legacy_const_generic_indices(db, id).as_deref()
+    }
+
     pub fn is_intrinsic(db: &dyn DefDatabase, id: FunctionId) -> bool {
         let data = db.function_signature(id);
         data.flags.contains(FnFlags::RUSTC_INTRINSIC)
@@ -679,11 +687,11 @@ pub fn query(
         let loc = id.lookup(db);
 
         let mut flags = TypeAliasFlags::empty();
-        let attrs = db.attrs(id.into());
-        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
+        let attrs = AttrFlags::query(db, id.into());
+        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
             flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL);
         }
-        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
+        if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
             flags.insert(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
         }
         if matches!(loc.container, ItemContainerId::ExternBlockId(_)) {
@@ -866,7 +874,7 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
     let mut has_fields = false;
     for (ty, field) in fields.value {
         has_fields = true;
-        match Attrs::is_cfg_enabled_for(db, &field, col.span_map(), cfg_options) {
+        match AttrFlags::is_cfg_enabled_for(&field, cfg_options) {
             Ok(()) => {
                 let type_ref =
                     col.lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator);
@@ -928,7 +936,6 @@ pub(crate) fn of(
         let loc = e.lookup(db);
         let source = loc.source(db);
         let ast_id_map = db.ast_id_map(source.file_id);
-        let span_map = db.span_map(source.file_id);
 
         let mut diagnostics = ThinVec::new();
         let cfg_options = loc.container.krate.cfg_options(db);
@@ -940,7 +947,7 @@ pub(crate) fn of(
             .variants()
             .filter_map(|variant| {
                 let ast_id = ast_id_map.ast_id(&variant);
-                match Attrs::is_cfg_enabled_for(db, &variant, span_map.as_ref(), cfg_options) {
+                match AttrFlags::is_cfg_enabled_for(&variant, cfg_options) {
                     Ok(()) => {
                         let enum_variant =
                             EnumVariantLoc { id: source.with_value(ast_id), parent: e, index }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
index 367b543..153fd19 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/src.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
@@ -7,7 +7,7 @@
 
 use crate::{
     AstIdLoc, GenericDefId, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup,
-    UseId, VariantId, attr::Attrs, db::DefDatabase,
+    UseId, VariantId, attrs::AttrFlags, db::DefDatabase,
 };
 
 pub trait HasSource {
@@ -145,15 +145,13 @@ fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Se
                 (lookup.source(db).map(|it| it.kind()), lookup.container)
             }
         };
-        let span_map = db.span_map(src.file_id);
         let mut map = ArenaMap::new();
         match &src.value {
             ast::StructKind::Tuple(fl) => {
                 let cfg_options = container.krate.cfg_options(db);
                 let mut idx = 0;
                 for fd in fl.fields() {
-                    let enabled =
-                        Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
+                    let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
                     if !enabled {
                         continue;
                     }
@@ -168,8 +166,7 @@ fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Se
                 let cfg_options = container.krate.cfg_options(db);
                 let mut idx = 0;
                 for fd in fl.fields() {
-                    let enabled =
-                        Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
+                    let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
                     if !enabled {
                         continue;
                     }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
index 12a1c15..3bb9c36 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
@@ -190,7 +190,15 @@ fn mod_at_position(&self, def_map: &DefMap, position: FilePosition) -> LocalModu
         let mut res = DefMap::ROOT;
         for (module, data) in def_map.modules() {
             let src = data.definition_source(self);
-            if src.file_id != position.file_id {
+            // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
+            // `position.file_id` is created before the def map, causing it to have to wrong crate
+            // attached often, which means it won't compare equal. This should not be a problem in real
+            // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
+            // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
+            let Some(file_id) = src.file_id.file_id() else {
+                continue;
+            };
+            if file_id.file_id(self) != position.file_id.file_id(self) {
                 continue;
             }
 
@@ -230,7 +238,15 @@ fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<
         let mut fn_def = None;
         for (_, module) in def_map.modules() {
             let file_id = module.definition_source(self).file_id;
-            if file_id != position.file_id {
+            // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
+            // `position.file_id` is created before the def map, causing it to have to wrong crate
+            // attached often, which means it won't compare equal. This should not be a problem in real
+            // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
+            // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
+            let Some(file_id) = file_id.file_id() else {
+                continue;
+            };
+            if file_id.file_id(self) != position.file_id.file_id(self) {
                 continue;
             }
             for decl in module.scope.declarations() {
@@ -253,26 +269,25 @@ fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<
                     };
                     if size != Some(new_size) {
                         size = Some(new_size);
-                        fn_def = Some(it);
+                        fn_def = Some((it, file_id));
                     }
                 }
             }
         }
 
         // Find the innermost block expression that has a `DefMap`.
-        let def_with_body = fn_def?.into();
+        let (def_with_body, file_id) = fn_def?;
+        let def_with_body = def_with_body.into();
         let source_map = self.body_with_source_map(def_with_body).1;
         let scopes = self.expr_scopes(def_with_body);
 
-        let root_syntax_node = self.parse(position.file_id).syntax_node();
+        let root_syntax_node = self.parse(file_id).syntax_node();
         let scope_iter =
             algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
                 let block = ast::BlockExpr::cast(node)?;
                 let expr = ast::Expr::from(block);
-                let expr_id = source_map
-                    .node_expr(InFile::new(position.file_id.into(), &expr))?
-                    .as_expr()
-                    .unwrap();
+                let expr_id =
+                    source_map.node_expr(InFile::new(file_id.into(), &expr))?.as_expr().unwrap();
                 let scope = scopes.scope_for(expr_id).unwrap();
                 Some(scope)
             });
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 80a3c08..4fa476a 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -23,6 +23,8 @@
 query-group.workspace = true
 salsa.workspace = true
 salsa-macros.workspace = true
+arrayvec.workspace = true
+thin-vec.workspace = true
 
 # local deps
 stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
index 986f876..e1807cd 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
@@ -1,200 +1,397 @@
-//! A higher level attributes based on TokenTree, with also some shortcuts.
-use std::iter;
-use std::{borrow::Cow, fmt, ops};
+//! Defines the basics of attributes lowering.
+//!
+//! The heart and soul of this module is [`expand_cfg_attr()`], alongside its sibling
+//! [`expand_cfg_attr_with_doc_comments()`]. It is used to implement all attribute lowering
+//! in r-a. Its basic job is to list attributes; however, attributes do not necessarily map
+//! into [`ast::Attr`], because `cfg_attr` can map to zero, one, or more attributes
+//! (`#[cfg_attr(predicate, attr1, attr2, ...)]`). To bridge this gap, this module defines
+//! [`Meta`], which represents a desugared attribute. Various bits of r-a need different
+//! things from [`Meta`], therefore it contains many parts. The basic idea is:
+//!
+//!  - There are three kinds of attributes, `path = value`, `path`, and `path(token_tree)`.
+//!  - Most bits of rust-analyzer only need to deal with some paths. Therefore, we keep
+//!    the path only if it has up to 2 segments, or one segment for `path = value`.
+//!    We also only keep the value in `path = value` if it is a literal. However, we always
+//!    save the all relevant ranges of attributes (the path range, and the full attribute range)
+//!    for parts of r-a (e.g. name resolution) that need a faithful representation of the
+//!    attribute.
+//!
+//! [`expand_cfg_attr()`] expands `cfg_attr`s as it goes (as its name implies), to list
+//! all attributes.
+//!
+//! Another thing to note is that we need to be able to map an attribute back to a range
+//! (for diagnostic purposes etc.). This is only ever needed for attributes that participate
+//! in name resolution. An attribute is mapped back by its [`AttrId`], which is just an
+//! index into the item tree attributes list. To minimize the risk of bugs, we have one
+//! place (here) and one function ([`is_item_tree_filtered_attr()`]) that decides whether
+//! an attribute participate in name resolution.
 
+use std::{
+    borrow::Cow, cell::OnceCell, convert::Infallible, fmt, iter::Peekable, ops::ControlFlow,
+};
+
+use ::tt::{TextRange, TextSize};
+use arrayvec::ArrayVec;
 use base_db::Crate;
 use cfg::{CfgExpr, CfgOptions};
 use either::Either;
-use intern::{Interned, Symbol, sym};
-
+use intern::{Interned, Symbol};
 use mbe::{DelimiterKind, Punct};
-use smallvec::{SmallVec, smallvec};
-use span::{Span, SyntaxContext};
-use syntax::unescape;
-use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast};
-use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree};
-use triomphe::ThinArc;
+use parser::T;
+use smallvec::SmallVec;
+use span::{RealSpanMap, Span, SyntaxContext};
+use syntax::{
+    AstNode, NodeOrToken, SyntaxNode, SyntaxToken,
+    ast::{self, TokenTreeChildren},
+    unescape,
+};
+use syntax_bridge::DocCommentDesugarMode;
 
 use crate::{
+    AstId,
     db::ExpandDatabase,
     mod_path::ModPath,
-    name::Name,
     span_map::SpanMapRef,
-    tt::{self, TopSubtree, token_to_literal},
+    tt::{self, TopSubtree},
 };
 
-/// Syntactical attributes, without filtering of `cfg_attr`s.
-#[derive(Default, Debug, Clone, PartialEq, Eq)]
-pub struct RawAttrs {
-    // FIXME: This can become `Box<[Attr]>` if https://internals.rust-lang.org/t/layout-of-dst-box/21728?u=chrefr is accepted.
-    entries: Option<ThinArc<(), Attr>>,
+#[derive(Debug)]
+pub struct AttrPath {
+    /// This can be empty if the path is not of 1 or 2 segments exactly.
+    pub segments: ArrayVec<SyntaxToken, 2>,
+    pub range: TextRange,
+    // FIXME: This shouldn't be textual, `#[test]` needs name resolution.
+    // And if textual, it shouldn't be here, it should be in hir-def/src/attrs.rs. But some macros
+    // fully qualify `test` as `core::prelude::vX::test`, and this is more than 2 segments, so hir-def
+    // attrs can't find it. But this will mean we have to push every up-to-4-segments path, which
+    // may impact perf. So it was easier to just hack it here.
+    pub is_test: bool,
 }
 
-impl ops::Deref for RawAttrs {
-    type Target = [Attr];
-
-    fn deref(&self) -> &[Attr] {
-        match &self.entries {
-            Some(it) => &it.slice,
-            None => &[],
-        }
-    }
-}
-
-impl RawAttrs {
-    pub const EMPTY: Self = Self { entries: None };
-
-    pub fn new(
-        db: &dyn ExpandDatabase,
-        owner: &dyn ast::HasAttrs,
-        span_map: SpanMapRef<'_>,
-    ) -> Self {
-        let entries: Vec<_> = Self::attrs_iter::<true>(db, owner, span_map).collect();
-
-        let entries = if entries.is_empty() {
-            None
-        } else {
-            Some(ThinArc::from_header_and_iter((), entries.into_iter()))
-        };
-
-        RawAttrs { entries }
-    }
-
-    /// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded.
-    pub fn new_expanded(
-        db: &dyn ExpandDatabase,
-        owner: &dyn ast::HasAttrs,
-        span_map: SpanMapRef<'_>,
-        cfg_options: &CfgOptions,
-    ) -> Self {
-        let entries: Vec<_> =
-            Self::attrs_iter_expanded::<true>(db, owner, span_map, cfg_options).collect();
-
-        let entries = if entries.is_empty() {
-            None
-        } else {
-            Some(ThinArc::from_header_and_iter((), entries.into_iter()))
-        };
-
-        RawAttrs { entries }
-    }
-
-    pub fn attrs_iter<const DESUGAR_COMMENTS: bool>(
-        db: &dyn ExpandDatabase,
-        owner: &dyn ast::HasAttrs,
-        span_map: SpanMapRef<'_>,
-    ) -> impl Iterator<Item = Attr> {
-        collect_attrs(owner).filter_map(move |(id, attr)| match attr {
-            Either::Left(attr) => {
-                attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
+impl AttrPath {
+    #[inline]
+    fn extract(path: &ast::Path) -> Self {
+        let mut is_test = false;
+        let segments = (|| {
+            let mut segments = ArrayVec::new();
+            let segment2 = path.segment()?.name_ref()?.syntax().first_token()?;
+            if segment2.text() == "test" {
+                // `#[test]` or `#[core::prelude::vX::test]`.
+                is_test = true;
             }
-            Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| {
-                let span = span_map.span_for_range(comment.syntax().text_range());
-                let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
-                Attr {
-                    id,
-                    input: Some(Box::new(AttrInput::Literal(tt::Literal {
-                        symbol: text,
-                        span,
-                        kind,
-                        suffix: None,
-                    }))),
-                    path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))),
-                    ctxt: span.ctx,
+            let segment1 = path.qualifier();
+            if let Some(segment1) = segment1 {
+                if segment1.qualifier().is_some() {
+                    None
+                } else {
+                    let segment1 = segment1.segment()?.name_ref()?.syntax().first_token()?;
+                    segments.push(segment1);
+                    segments.push(segment2);
+                    Some(segments)
                 }
-            }),
-            Either::Right(_) => None,
-        })
-    }
-
-    pub fn attrs_iter_expanded<const DESUGAR_COMMENTS: bool>(
-        db: &dyn ExpandDatabase,
-        owner: &dyn ast::HasAttrs,
-        span_map: SpanMapRef<'_>,
-        cfg_options: &CfgOptions,
-    ) -> impl Iterator<Item = Attr> {
-        Self::attrs_iter::<DESUGAR_COMMENTS>(db, owner, span_map)
-            .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
-    }
-
-    pub fn merge(&self, other: Self) -> Self {
-        match (&self.entries, other.entries) {
-            (None, None) => Self::EMPTY,
-            (None, entries @ Some(_)) => Self { entries },
-            (Some(entries), None) => Self { entries: Some(entries.clone()) },
-            (Some(a), Some(b)) => {
-                let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1);
-                let items = a
-                    .slice
-                    .iter()
-                    .cloned()
-                    .chain(b.slice.iter().map(|it| {
-                        let mut it = it.clone();
-                        let id = it.id.ast_index() + last_ast_index;
-                        it.id = AttrId::new(id, it.id.is_inner_attr());
-                        it
-                    }))
-                    .collect::<Vec<_>>();
-                Self { entries: Some(ThinArc::from_header_and_iter((), items.into_iter())) }
+            } else {
+                segments.push(segment2);
+                Some(segments)
             }
+        })();
+        AttrPath {
+            segments: segments.unwrap_or(ArrayVec::new()),
+            range: path.syntax().text_range(),
+            is_test,
         }
     }
 
-    /// Processes `cfg_attr`s
-    pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
-        let has_cfg_attrs =
-            self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr));
-        if !has_cfg_attrs {
-            return self;
+    #[inline]
+    pub fn is1(&self, segment: &str) -> bool {
+        self.segments.len() == 1 && self.segments[0].text() == segment
+    }
+}
+
+#[derive(Debug)]
+pub enum Meta {
+    /// `name` is `None` if not a single token. `value` is a literal or `None`.
+    NamedKeyValue {
+        path_range: TextRange,
+        name: Option<SyntaxToken>,
+        value: Option<SyntaxToken>,
+    },
+    TokenTree {
+        path: AttrPath,
+        tt: ast::TokenTree,
+    },
+    Path {
+        path: AttrPath,
+    },
+}
+
+impl Meta {
+    #[inline]
+    pub fn path_range(&self) -> TextRange {
+        match self {
+            Meta::NamedKeyValue { path_range, .. } => *path_range,
+            Meta::TokenTree { path, .. } | Meta::Path { path } => path.range,
+        }
+    }
+
+    fn extract(iter: &mut Peekable<TokenTreeChildren>) -> Option<(Self, TextSize)> {
+        let mut start_offset = None;
+        if let Some(NodeOrToken::Token(colon1)) = iter.peek()
+            && colon1.kind() == T![:]
+        {
+            start_offset = Some(colon1.text_range().start());
+            iter.next();
+            iter.next_if(|it| it.as_token().is_some_and(|it| it.kind() == T![:]));
+        }
+        let first_segment = iter
+            .next_if(|it| it.as_token().is_some_and(|it| it.kind().is_any_identifier()))?
+            .into_token()?;
+        let mut is_test = first_segment.text() == "test";
+        let start_offset = start_offset.unwrap_or_else(|| first_segment.text_range().start());
+
+        let mut segments_len = 1;
+        let mut second_segment = None;
+        let mut path_range = first_segment.text_range();
+        while iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
+            && let _ = iter.next()
+            && iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
+            && let _ = iter.next()
+            && let Some(NodeOrToken::Token(segment)) = iter.peek()
+            && segment.kind().is_any_identifier()
+        {
+            segments_len += 1;
+            is_test = segment.text() == "test";
+            second_segment = Some(segment.clone());
+            path_range = TextRange::new(path_range.start(), segment.text_range().end());
+            iter.next();
         }
 
-        let cfg_options = krate.cfg_options(db);
-        let new_attrs = self
-            .iter()
-            .cloned()
-            .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
-            .collect::<Vec<_>>();
-        let entries = if new_attrs.is_empty() {
-            None
-        } else {
-            Some(ThinArc::from_header_and_iter((), new_attrs.into_iter()))
+        let segments = |first, second| {
+            let mut segments = ArrayVec::new();
+            if segments_len <= 2 {
+                segments.push(first);
+                if let Some(second) = second {
+                    segments.push(second);
+                }
+            }
+            segments
         };
-        RawAttrs { entries }
+        let meta = match iter.peek() {
+            Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
+                iter.next();
+                let value = match iter.peek() {
+                    Some(NodeOrToken::Token(token)) if token.kind().is_literal() => {
+                        // No need to consume it, it will be consumed by `extract_and_eat_comma()`.
+                        Some(token.clone())
+                    }
+                    _ => None,
+                };
+                let name = if second_segment.is_none() { Some(first_segment) } else { None };
+                Meta::NamedKeyValue { path_range, name, value }
+            }
+            Some(NodeOrToken::Node(tt)) => Meta::TokenTree {
+                path: AttrPath {
+                    segments: segments(first_segment, second_segment),
+                    range: path_range,
+                    is_test,
+                },
+                tt: tt.clone(),
+            },
+            _ => Meta::Path {
+                path: AttrPath {
+                    segments: segments(first_segment, second_segment),
+                    range: path_range,
+                    is_test,
+                },
+            },
+        };
+        Some((meta, start_offset))
     }
 
-    pub fn is_empty(&self) -> bool {
-        self.entries.is_none()
+    fn extract_possibly_unsafe(
+        iter: &mut Peekable<TokenTreeChildren>,
+        container: &ast::TokenTree,
+    ) -> Option<(Self, TextRange)> {
+        if iter.peek().is_some_and(|it| it.as_token().is_some_and(|it| it.kind() == T![unsafe])) {
+            iter.next();
+            let tt = iter.next()?.into_node()?;
+            let result = Self::extract(&mut TokenTreeChildren::new(&tt).peekable()).map(
+                |(meta, start_offset)| (meta, TextRange::new(start_offset, tt_end_offset(&tt))),
+            );
+            while iter.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
+            result
+        } else {
+            Self::extract(iter).map(|(meta, start_offset)| {
+                let end_offset = 'find_end_offset: {
+                    for it in iter {
+                        if let NodeOrToken::Token(it) = it
+                            && it.kind() == T![,]
+                        {
+                            break 'find_end_offset it.text_range().start();
+                        }
+                    }
+                    tt_end_offset(container)
+                };
+                (meta, TextRange::new(start_offset, end_offset))
+            })
+        }
     }
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct AttrId {
-    id: u32,
+fn tt_end_offset(tt: &ast::TokenTree) -> TextSize {
+    tt.syntax().last_token().unwrap().text_range().start()
 }
 
-// FIXME: This only handles a single level of cfg_attr nesting
-// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again
-impl AttrId {
-    const INNER_ATTR_SET_BIT: u32 = 1 << 31;
+/// The callback is passed a desugared form of the attribute ([`Meta`]), a [`SyntaxNode`] fully containing it
+/// (note: it may not be the direct parent), the range within the [`SyntaxNode`] bounding the attribute,
+/// and the outermost `ast::Attr`. Note that one node may map to multiple [`Meta`]s due to `cfg_attr`.
+#[inline]
+pub fn expand_cfg_attr<'a, BreakValue>(
+    attrs: impl Iterator<Item = ast::Attr>,
+    cfg_options: impl FnMut() -> &'a CfgOptions,
+    mut callback: impl FnMut(Meta, &SyntaxNode, TextRange, &ast::Attr) -> ControlFlow<BreakValue>,
+) -> Option<BreakValue> {
+    expand_cfg_attr_with_doc_comments::<Infallible, _>(
+        attrs.map(Either::Left),
+        cfg_options,
+        move |Either::Left((meta, container, range, top_attr))| {
+            callback(meta, container, range, top_attr)
+        },
+    )
+}
 
-    pub fn new(id: usize, is_inner: bool) -> Self {
-        assert!(id <= !Self::INNER_ATTR_SET_BIT as usize);
-        let id = id as u32;
-        Self { id: if is_inner { id | Self::INNER_ATTR_SET_BIT } else { id } }
-    }
+#[inline]
+pub fn expand_cfg_attr_with_doc_comments<'a, DocComment, BreakValue>(
+    mut attrs: impl Iterator<Item = Either<ast::Attr, DocComment>>,
+    mut cfg_options: impl FnMut() -> &'a CfgOptions,
+    mut callback: impl FnMut(
+        Either<(Meta, &SyntaxNode, TextRange, &ast::Attr), DocComment>,
+    ) -> ControlFlow<BreakValue>,
+) -> Option<BreakValue> {
+    let mut stack = SmallVec::<[_; 1]>::new();
+    let result = attrs.try_for_each(|top_attr| {
+        let top_attr = match top_attr {
+            Either::Left(it) => it,
+            Either::Right(comment) => return callback(Either::Right(comment)),
+        };
+        if let Some((attr_name, tt)) = top_attr.as_simple_call()
+            && attr_name == "cfg_attr"
+        {
+            let mut tt_iter = TokenTreeChildren::new(&tt).peekable();
+            let cfg = cfg::CfgExpr::parse_from_ast(&mut tt_iter);
+            if cfg_options().check(&cfg) != Some(false) {
+                stack.push((tt_iter, tt));
+                while let Some((tt_iter, tt)) = stack.last_mut() {
+                    let Some((attr, range)) = Meta::extract_possibly_unsafe(tt_iter, tt) else {
+                        stack.pop();
+                        continue;
+                    };
+                    if let Meta::TokenTree { path, tt: nested_tt } = &attr
+                        && path.is1("cfg_attr")
+                    {
+                        let mut nested_tt_iter = TokenTreeChildren::new(nested_tt).peekable();
+                        let cfg = cfg::CfgExpr::parse_from_ast(&mut nested_tt_iter);
+                        if cfg_options().check(&cfg) != Some(false) {
+                            stack.push((nested_tt_iter, nested_tt.clone()));
+                        }
+                    } else {
+                        callback(Either::Left((attr, tt.syntax(), range, &top_attr)))?;
+                    }
+                }
+            }
+        } else if let Some(ast_meta) = top_attr.meta()
+            && let Some(path) = ast_meta.path()
+        {
+            let path = AttrPath::extract(&path);
+            let meta = if let Some(tt) = ast_meta.token_tree() {
+                Meta::TokenTree { path, tt }
+            } else if let Some(value) = ast_meta.expr() {
+                let value =
+                    if let ast::Expr::Literal(value) = value { Some(value.token()) } else { None };
+                let name =
+                    if path.segments.len() == 1 { Some(path.segments[0].clone()) } else { None };
+                Meta::NamedKeyValue { name, value, path_range: path.range }
+            } else {
+                Meta::Path { path }
+            };
+            callback(Either::Left((
+                meta,
+                ast_meta.syntax(),
+                ast_meta.syntax().text_range(),
+                &top_attr,
+            )))?;
+        }
+        ControlFlow::Continue(())
+    });
+    result.break_value()
+}
 
-    pub fn ast_index(&self) -> usize {
-        (self.id & !Self::INNER_ATTR_SET_BIT) as usize
-    }
+#[inline]
+pub(crate) fn is_item_tree_filtered_attr(name: &str) -> bool {
+    matches!(
+        name,
+        "doc"
+            | "stable"
+            | "unstable"
+            | "target_feature"
+            | "allow"
+            | "expect"
+            | "warn"
+            | "deny"
+            | "forbid"
+            | "repr"
+            | "inline"
+            | "track_caller"
+            | "must_use"
+    )
+}
 
-    pub fn is_inner_attr(&self) -> bool {
-        self.id & Self::INNER_ATTR_SET_BIT != 0
-    }
+/// This collects attributes exactly as the item tree needs them. This is used for the item tree,
+/// as well as for resolving [`AttrId`]s.
+pub fn collect_item_tree_attrs<'a, BreakValue>(
+    owner: &dyn ast::HasAttrs,
+    cfg_options: impl Fn() -> &'a CfgOptions,
+    mut on_attr: impl FnMut(Meta, &SyntaxNode, &ast::Attr, TextRange) -> ControlFlow<BreakValue>,
+) -> Option<Either<BreakValue, CfgExpr>> {
+    let attrs = ast::attrs_including_inner(owner);
+    expand_cfg_attr(
+        attrs,
+        || cfg_options(),
+        |attr, container, range, top_attr| {
+            // We filter builtin attributes that we don't need for nameres, because this saves memory.
+            // I only put the most common attributes, but if some attribute becomes common feel free to add it.
+            // Notice, however: for an attribute to be filtered out, it *must* not be shadowable with a macro!
+            let filter = match &attr {
+                Meta::NamedKeyValue { name: Some(name), .. } => {
+                    is_item_tree_filtered_attr(name.text())
+                }
+                Meta::TokenTree { path, tt } if path.segments.len() == 1 => {
+                    let name = path.segments[0].text();
+                    if name == "cfg" {
+                        let cfg =
+                            CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable());
+                        if cfg_options().check(&cfg) == Some(false) {
+                            return ControlFlow::Break(Either::Right(cfg));
+                        }
+                        true
+                    } else {
+                        is_item_tree_filtered_attr(name)
+                    }
+                }
+                Meta::Path { path } => {
+                    path.segments.len() == 1 && is_item_tree_filtered_attr(path.segments[0].text())
+                }
+                _ => false,
+            };
+            if !filter && let ControlFlow::Break(v) = on_attr(attr, container, top_attr, range) {
+                return ControlFlow::Break(Either::Left(v));
+            }
+            ControlFlow::Continue(())
+        },
+    )
 }
 
 #[derive(Debug, Clone, PartialEq, Eq)]
 pub struct Attr {
-    pub id: AttrId,
     pub path: Interned<ModPath>,
     pub input: Option<Box<AttrInput>>,
     pub ctxt: SyntaxContext,
@@ -218,131 +415,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 }
 
 impl Attr {
-    fn from_src(
-        db: &dyn ExpandDatabase,
-        ast: ast::Meta,
-        span_map: SpanMapRef<'_>,
-        id: AttrId,
-    ) -> Option<Attr> {
-        let path = ast.path()?;
-        let range = path.syntax().text_range();
-        let path = Interned::new(ModPath::from_src(db, path, &mut |range| {
-            span_map.span_for_range(range).ctx
-        })?);
-        let span = span_map.span_for_range(range);
-        let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
-            let token = lit.token();
-            Some(Box::new(AttrInput::Literal(token_to_literal(token.text(), span))))
-        } else if let Some(tt) = ast.token_tree() {
-            let tree = syntax_node_to_token_tree(
-                tt.syntax(),
-                span_map,
-                span,
-                DocCommentDesugarMode::ProcMacro,
-            );
-            Some(Box::new(AttrInput::TokenTree(tree)))
-        } else {
-            None
-        };
-        Some(Attr { id, path, input, ctxt: span.ctx })
-    }
-
-    fn from_tt(
-        db: &dyn ExpandDatabase,
-        mut tt: tt::TokenTreesView<'_>,
-        id: AttrId,
-    ) -> Option<Attr> {
-        if matches!(tt.flat_tokens(),
-            [tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..]
-            if *sym == sym::unsafe_
-        ) {
-            match tt.iter().nth(1) {
-                Some(tt::TtElement::Subtree(_, iter)) => tt = iter.remaining(),
-                _ => return None,
-            }
-        }
-        let first = tt.flat_tokens().first()?;
-        let ctxt = first.first_span().ctx;
-        let (path, input) = {
-            let mut iter = tt.iter();
-            let start = iter.savepoint();
-            let mut input = tt::TokenTreesView::new(&[]);
-            let mut path = iter.from_savepoint(start);
-            let mut path_split_savepoint = iter.savepoint();
-            while let Some(tt) = iter.next() {
-                path = iter.from_savepoint(start);
-                if !matches!(
-                    tt,
-                    tt::TtElement::Leaf(
-                        tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
-                    )
-                ) {
-                    input = path_split_savepoint.remaining();
-                    break;
-                }
-                path_split_savepoint = iter.savepoint();
-            }
-            (path, input)
-        };
-
-        let path = Interned::new(ModPath::from_tt(db, path)?);
-
-        let input = match (input.flat_tokens().first(), input.try_into_subtree()) {
-            (_, Some(tree)) => {
-                Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree))))
-            }
-            (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => {
-                match input.flat_tokens().get(1) {
-                    Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
-                        Some(Box::new(AttrInput::Literal(lit.clone())))
-                    }
-                    _ => None,
-                }
-            }
-            _ => None,
-        };
-        Some(Attr { id, path, input, ctxt })
-    }
-
-    pub fn path(&self) -> &ModPath {
-        &self.path
-    }
-
-    pub fn expand_cfg_attr(
-        self,
-        db: &dyn ExpandDatabase,
-        cfg_options: &CfgOptions,
-    ) -> impl IntoIterator<Item = Self> {
-        let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
-        if !is_cfg_attr {
-            return smallvec![self];
-        }
-
-        let subtree = match self.token_tree_value() {
-            Some(it) => it,
-            _ => return smallvec![self.clone()],
-        };
-
-        let (cfg, parts) = match parse_cfg_attr_input(subtree) {
-            Some(it) => it,
-            None => return smallvec![self.clone()],
-        };
-        let index = self.id;
-        let attrs = parts.filter_map(|attr| Attr::from_tt(db, attr, index));
-
-        let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
-        let cfg = CfgExpr::parse(&cfg);
-        if cfg_options.check(&cfg) == Some(false) {
-            smallvec![]
-        } else {
-            cov_mark::hit!(cfg_attr_active);
-
-            attrs.collect::<SmallVec<[_; 1]>>()
-        }
-    }
-}
-
-impl Attr {
     /// #[path = "string"]
     pub fn string_value(&self) -> Option<&Symbol> {
         match self.input.as_deref()? {
@@ -403,30 +475,26 @@ pub fn token_tree_value(&self) -> Option<&TopSubtree> {
     pub fn parse_path_comma_token_tree<'a>(
         &'a self,
         db: &'a dyn ExpandDatabase,
-    ) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> {
+    ) -> Option<impl Iterator<Item = (ModPath, Span, tt::TokenTreesView<'a>)> + 'a> {
         let args = self.token_tree_value()?;
 
         if args.top_subtree().delimiter.kind != DelimiterKind::Parenthesis {
             return None;
         }
-        let paths = args
-            .token_trees()
-            .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
-            .filter_map(move |tts| {
-                let span = tts.flat_tokens().first()?.first_span();
-                Some((ModPath::from_tt(db, tts)?, span))
-            });
-
-        Some(paths)
+        Some(parse_path_comma_token_tree(db, args))
     }
+}
 
-    pub fn cfg(&self) -> Option<CfgExpr> {
-        if *self.path.as_ident()? == sym::cfg {
-            self.token_tree_value().map(CfgExpr::parse)
-        } else {
-            None
-        }
-    }
+fn parse_path_comma_token_tree<'a>(
+    db: &'a dyn ExpandDatabase,
+    args: &'a tt::TopSubtree,
+) -> impl Iterator<Item = (ModPath, Span, tt::TokenTreesView<'a>)> {
+    args.token_trees()
+        .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
+        .filter_map(move |tts| {
+            let span = tts.flat_tokens().first()?.first_span();
+            Some((ModPath::from_tt(db, tts)?, span, tts))
+        })
 }
 
 fn unescape(s: &str) -> Option<Cow<'_, str>> {
@@ -455,58 +523,104 @@ fn unescape(s: &str) -> Option<Cow<'_, str>> {
     }
 }
 
-pub fn collect_attrs(
-    owner: &dyn ast::HasAttrs,
-) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
-    let inner_attrs =
-        inner_attributes(owner.syntax()).into_iter().flatten().zip(iter::repeat(true));
-    let outer_attrs = ast::AttrDocCommentIter::from_syntax_node(owner.syntax())
-        .filter(|el| match el {
-            Either::Left(attr) => attr.kind().is_outer(),
-            Either::Right(comment) => comment.is_outer(),
-        })
-        .zip(iter::repeat(false));
-    outer_attrs
-        .chain(inner_attrs)
-        .enumerate()
-        .map(|(id, (attr, is_inner))| (AttrId::new(id, is_inner), attr))
+/// This is an index of an attribute *that always points to the item tree attributes*.
+///
+/// Outer attributes are counted first, then inner attributes. This does not support
+/// out-of-line modules, which may have attributes spread across 2 files!
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct AttrId {
+    id: u32,
 }
 
-fn inner_attributes(
-    syntax: &SyntaxNode,
-) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
-    let node = match_ast! {
-        match syntax {
-            ast::SourceFile(_) => syntax.clone(),
-            ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
-            ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
-            ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
-            ast::Module(it) => it.item_list()?.syntax().clone(),
-            ast::BlockExpr(it) => {
-                if !it.may_carry_attributes() {
-                    return None
+impl AttrId {
+    #[inline]
+    pub fn from_item_tree_index(id: u32) -> Self {
+        Self { id }
+    }
+
+    #[inline]
+    pub fn item_tree_index(self) -> u32 {
+        self.id
+    }
+
+    /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
+    /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
+    /// attribute, and its desugared [`Meta`].
+    pub fn find_attr_range<N: ast::HasAttrs>(
+        self,
+        db: &dyn ExpandDatabase,
+        krate: Crate,
+        owner: AstId<N>,
+    ) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
+        self.find_attr_range_with_source(db, krate, &owner.to_node(db))
+    }
+
+    /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
+    /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
+    /// attribute, and its desugared [`Meta`].
+    pub fn find_attr_range_with_source(
+        self,
+        db: &dyn ExpandDatabase,
+        krate: Crate,
+        owner: &dyn ast::HasAttrs,
+    ) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
+        let cfg_options = OnceCell::new();
+        let mut index = 0;
+        let result = collect_item_tree_attrs(
+            owner,
+            || cfg_options.get_or_init(|| krate.cfg_options(db)),
+            |meta, container, top_attr, range| {
+                if index == self.id {
+                    return ControlFlow::Break((top_attr.clone(), container.clone(), range, meta));
                 }
-                syntax.clone()
+                index += 1;
+                ControlFlow::Continue(())
             },
-            _ => return None,
+        );
+        match result {
+            Some(Either::Left(it)) => it,
+            _ => {
+                panic!("used an incorrect `AttrId`; crate={krate:?}, attr_id={self:?}");
+            }
         }
-    };
+    }
 
-    let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
-        Either::Left(attr) => attr.kind().is_inner(),
-        Either::Right(comment) => comment.is_inner(),
-    });
-    Some(attrs)
-}
-
-// Input subtree is: `(cfg, $(attr),+)`
-// Split it up into a `cfg` subtree and the `attr` subtrees.
-fn parse_cfg_attr_input(
-    subtree: &TopSubtree,
-) -> Option<(tt::TokenTreesView<'_>, impl Iterator<Item = tt::TokenTreesView<'_>>)> {
-    let mut parts = subtree
-        .token_trees()
-        .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))));
-    let cfg = parts.next()?;
-    Some((cfg, parts.filter(|it| !it.is_empty())))
+    pub fn find_derive_range(
+        self,
+        db: &dyn ExpandDatabase,
+        krate: Crate,
+        owner: AstId<ast::Adt>,
+        derive_index: u32,
+    ) -> TextRange {
+        let (_, _, derive_attr_range, derive_attr) = self.find_attr_range(db, krate, owner);
+        let Meta::TokenTree { tt, .. } = derive_attr else {
+            return derive_attr_range;
+        };
+        // Fake the span map, as we don't really need spans here, just the offsets of the node in the file.
+        let span_map = RealSpanMap::absolute(span::EditionedFileId::current_edition(
+            span::FileId::from_raw(0),
+        ));
+        let tt = syntax_bridge::syntax_node_to_token_tree(
+            tt.syntax(),
+            SpanMapRef::RealSpanMap(&span_map),
+            span_map.span_for_range(tt.syntax().text_range()),
+            DocCommentDesugarMode::ProcMacro,
+        );
+        let Some((_, _, derive_tts)) =
+            parse_path_comma_token_tree(db, &tt).nth(derive_index as usize)
+        else {
+            return derive_attr_range;
+        };
+        let (Some(first_tt), Some(last_tt)) =
+            (derive_tts.flat_tokens().first(), derive_tts.flat_tokens().last())
+        else {
+            return derive_attr_range;
+        };
+        let start = first_tt.first_span().range.start();
+        let end = match last_tt {
+            tt::TokenTree::Leaf(it) => it.span().range.end(),
+            tt::TokenTree::Subtree(it) => it.delimiter.close.range.end(),
+        };
+        TextRange::new(start, end)
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs
index 0fa412a..6582f4b 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs
@@ -392,12 +392,7 @@ fn to_adt_syntax(
     tt: &tt::TopSubtree,
     call_site: Span,
 ) -> Result<(ast::Adt, span::SpanMap<SyntaxContext>), ExpandError> {
-    let (parsed, tm) = crate::db::token_tree_to_syntax_node(
-        db,
-        tt,
-        crate::ExpandTo::Items,
-        parser::Edition::CURRENT_FIXME,
-    );
+    let (parsed, tm) = crate::db::token_tree_to_syntax_node(db, tt, crate::ExpandTo::Items);
     let macro_items = ast::MacroItems::cast(parsed.syntax_node())
         .ok_or_else(|| ExpandError::other(call_site, "invalid item definition"))?;
     let item =
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
index 78fac8f..3d630cf 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
@@ -18,7 +18,7 @@
 use syntax_bridge::syntax_node_to_token_tree;
 
 use crate::{
-    EditionedFileId, ExpandError, ExpandResult, Lookup as _, MacroCallId,
+    EditionedFileId, ExpandError, ExpandResult, MacroCallId,
     builtin::quote::{WithDelimiter, dollar_crate},
     db::ExpandDatabase,
     hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
@@ -230,9 +230,9 @@ fn assert_expand(
     let mut iter = tt.iter();
 
     let cond = expect_fragment(
+        db,
         &mut iter,
         parser::PrefixEntryPoint::Expr,
-        id.lookup(db).krate.data(db).edition,
         tt.top_subtree().delimiter.delim_span(),
     );
     _ = iter.expect_char(',');
@@ -772,7 +772,7 @@ fn relative_file(
     if res == call_site && !allow_recursion {
         Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
     } else {
-        Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
+        Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition, lookup.krate))
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs
index d5ebd6e..227a62f 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs
@@ -1,373 +1,346 @@
 //! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
-use std::iter::Peekable;
+use std::{cell::OnceCell, ops::ControlFlow};
 
+use ::tt::TextRange;
 use base_db::Crate;
-use cfg::{CfgAtom, CfgExpr};
-use intern::{Symbol, sym};
-use rustc_hash::FxHashSet;
+use cfg::CfgExpr;
+use parser::T;
+use smallvec::SmallVec;
 use syntax::{
-    AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
-    ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList},
+    AstNode, PreorderWithTokens, SyntaxElement, SyntaxNode, SyntaxToken, WalkEvent,
+    ast::{self, HasAttrs, TokenTreeChildren},
 };
-use tracing::{debug, warn};
+use syntax_bridge::DocCommentDesugarMode;
 
-use crate::{MacroCallLoc, MacroDefKind, db::ExpandDatabase, proc_macro::ProcMacroKind};
+use crate::{
+    attrs::{AttrId, Meta, expand_cfg_attr, is_item_tree_filtered_attr},
+    db::ExpandDatabase,
+    fixup::{self, SyntaxFixupUndoInfo},
+    span_map::SpanMapRef,
+    tt::{self, DelimSpan, Span},
+};
 
-fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
-    if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
-        return None;
-    }
-    let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?;
-    let enabled = krate.cfg_options(db).check(&cfg) != Some(false);
-    Some(enabled)
+struct ItemIsCfgedOut;
+
+#[derive(Debug)]
+struct ExpandedAttrToProcess {
+    range: TextRange,
 }
 
-fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option<bool> {
-    if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
-        return None;
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum NextExpandedAttrState {
+    NotStarted,
+    InTheMiddle,
+}
+
+#[derive(Debug)]
+struct AstAttrToProcess {
+    range: TextRange,
+    expanded_attrs: SmallVec<[ExpandedAttrToProcess; 1]>,
+    expanded_attrs_idx: usize,
+    next_expanded_attr: NextExpandedAttrState,
+    pound_span: Span,
+    brackets_span: DelimSpan,
+    /// If `Some`, this is an inner attribute.
+    excl_span: Option<Span>,
+}
+
+fn macro_input_callback(
+    db: &dyn ExpandDatabase,
+    is_derive: bool,
+    censor_item_tree_attr_ids: &[AttrId],
+    krate: Crate,
+    default_span: Span,
+    span_map: SpanMapRef<'_>,
+) -> impl FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf>) {
+    let cfg_options = OnceCell::new();
+    let cfg_options = move || *cfg_options.get_or_init(|| krate.cfg_options(db));
+
+    let mut should_strip_attr = {
+        let mut item_tree_attr_id = 0;
+        let mut censor_item_tree_attr_ids_index = 0;
+        move || {
+            let mut result = false;
+            if let Some(&next_censor_attr_id) =
+                censor_item_tree_attr_ids.get(censor_item_tree_attr_ids_index)
+                && next_censor_attr_id.item_tree_index() == item_tree_attr_id
+            {
+                censor_item_tree_attr_ids_index += 1;
+                result = true;
+            }
+            item_tree_attr_id += 1;
+            result
+        }
+    };
+
+    let mut attrs = Vec::new();
+    let mut attrs_idx = 0;
+    let mut has_inner_attrs_owner = false;
+    let mut in_attr = false;
+    let mut done_with_attrs = false;
+    let mut did_top_attrs = false;
+    move |preorder, event| {
+        match event {
+            WalkEvent::Enter(SyntaxElement::Node(node)) => {
+                if done_with_attrs {
+                    return (true, Vec::new());
+                }
+
+                if ast::Attr::can_cast(node.kind()) {
+                    in_attr = true;
+                    let node_range = node.text_range();
+                    while attrs
+                        .get(attrs_idx)
+                        .is_some_and(|it: &AstAttrToProcess| it.range != node_range)
+                    {
+                        attrs_idx += 1;
+                    }
+                } else if !in_attr && let Some(has_attrs) = ast::AnyHasAttrs::cast(node.clone()) {
+                    // Attributes of the form `key = value` have `ast::Expr` in them, which returns `Some` for
+                    // `AnyHasAttrs::cast()`, so we also need to check `in_attr`.
+
+                    if has_inner_attrs_owner {
+                        has_inner_attrs_owner = false;
+                        return (true, Vec::new());
+                    }
+
+                    if did_top_attrs && !is_derive {
+                        // Derives need all attributes handled, but attribute macros need only the top attributes handled.
+                        done_with_attrs = true;
+                        return (true, Vec::new());
+                    }
+                    did_top_attrs = true;
+
+                    if let Some(inner_attrs_node) = has_attrs.inner_attributes_node()
+                        && inner_attrs_node != *node
+                    {
+                        has_inner_attrs_owner = true;
+                    }
+
+                    let node_attrs = ast::attrs_including_inner(&has_attrs);
+
+                    attrs.clear();
+                    node_attrs.clone().for_each(|attr| {
+                        let span_for = |token: Option<SyntaxToken>| {
+                            token
+                                .map(|token| span_map.span_for_range(token.text_range()))
+                                .unwrap_or(default_span)
+                        };
+                        attrs.push(AstAttrToProcess {
+                            range: attr.syntax().text_range(),
+                            pound_span: span_for(attr.pound_token()),
+                            brackets_span: DelimSpan {
+                                open: span_for(attr.l_brack_token()),
+                                close: span_for(attr.r_brack_token()),
+                            },
+                            excl_span: attr
+                                .excl_token()
+                                .map(|token| span_map.span_for_range(token.text_range())),
+                            expanded_attrs: SmallVec::new(),
+                            expanded_attrs_idx: 0,
+                            next_expanded_attr: NextExpandedAttrState::NotStarted,
+                        });
+                    });
+
+                    attrs_idx = 0;
+                    let strip_current_item = expand_cfg_attr(
+                        node_attrs,
+                        &cfg_options,
+                        |attr, _container, range, top_attr| {
+                            // Find the attr.
+                            while attrs[attrs_idx].range != top_attr.syntax().text_range() {
+                                attrs_idx += 1;
+                            }
+
+                            let mut strip_current_attr = false;
+                            match attr {
+                                Meta::NamedKeyValue { name, .. } => {
+                                    if name
+                                        .is_none_or(|name| !is_item_tree_filtered_attr(name.text()))
+                                    {
+                                        strip_current_attr = should_strip_attr();
+                                    }
+                                }
+                                Meta::TokenTree { path, tt } => {
+                                    if path.segments.len() != 1
+                                        || !is_item_tree_filtered_attr(path.segments[0].text())
+                                    {
+                                        strip_current_attr = should_strip_attr();
+                                    }
+
+                                    if path.segments.len() == 1 {
+                                        let name = path.segments[0].text();
+
+                                        if name == "cfg" {
+                                            let cfg_expr = CfgExpr::parse_from_ast(
+                                                &mut TokenTreeChildren::new(&tt).peekable(),
+                                            );
+                                            if cfg_options().check(&cfg_expr) == Some(false) {
+                                                return ControlFlow::Break(ItemIsCfgedOut);
+                                            }
+                                            strip_current_attr = true;
+                                        }
+                                    }
+                                }
+                                Meta::Path { path } => {
+                                    if path.segments.len() != 1
+                                        || !is_item_tree_filtered_attr(path.segments[0].text())
+                                    {
+                                        strip_current_attr = should_strip_attr();
+                                    }
+                                }
+                            }
+
+                            if !strip_current_attr {
+                                attrs[attrs_idx]
+                                    .expanded_attrs
+                                    .push(ExpandedAttrToProcess { range });
+                            }
+
+                            ControlFlow::Continue(())
+                        },
+                    );
+                    attrs_idx = 0;
+
+                    if strip_current_item.is_some() {
+                        preorder.skip_subtree();
+                        attrs.clear();
+
+                        'eat_comma: {
+                            // If there is a comma after this node, eat it too.
+                            let mut events_until_comma = 0;
+                            for event in preorder.clone() {
+                                match event {
+                                    WalkEvent::Enter(SyntaxElement::Node(_))
+                                    | WalkEvent::Leave(_) => {}
+                                    WalkEvent::Enter(SyntaxElement::Token(token)) => {
+                                        let kind = token.kind();
+                                        if kind == T![,] {
+                                            break;
+                                        } else if !kind.is_trivia() {
+                                            break 'eat_comma;
+                                        }
+                                    }
+                                }
+                                events_until_comma += 1;
+                            }
+                            preorder.nth(events_until_comma);
+                        }
+
+                        return (false, Vec::new());
+                    }
+                }
+            }
+            WalkEvent::Leave(SyntaxElement::Node(node)) => {
+                if ast::Attr::can_cast(node.kind()) {
+                    in_attr = false;
+                    attrs_idx += 1;
+                }
+            }
+            WalkEvent::Enter(SyntaxElement::Token(token)) => {
+                if !in_attr {
+                    return (true, Vec::new());
+                }
+
+                let Some(ast_attr) = attrs.get_mut(attrs_idx) else {
+                    return (true, Vec::new());
+                };
+                let token_range = token.text_range();
+                let Some(expanded_attr) = ast_attr.expanded_attrs.get(ast_attr.expanded_attrs_idx)
+                else {
+                    // No expanded attributes in this `ast::Attr`, or we finished them all already, either way
+                    // the remaining tokens should be discarded.
+                    return (false, Vec::new());
+                };
+                match ast_attr.next_expanded_attr {
+                    NextExpandedAttrState::NotStarted => {
+                        if token_range.start() >= expanded_attr.range.start() {
+                            // We started the next attribute.
+                            let mut insert_tokens = Vec::with_capacity(3);
+                            insert_tokens.push(tt::Leaf::Punct(tt::Punct {
+                                char: '#',
+                                spacing: tt::Spacing::Alone,
+                                span: ast_attr.pound_span,
+                            }));
+                            if let Some(span) = ast_attr.excl_span {
+                                insert_tokens.push(tt::Leaf::Punct(tt::Punct {
+                                    char: '!',
+                                    spacing: tt::Spacing::Alone,
+                                    span,
+                                }));
+                            }
+                            insert_tokens.push(tt::Leaf::Punct(tt::Punct {
+                                char: '[',
+                                spacing: tt::Spacing::Alone,
+                                span: ast_attr.brackets_span.open,
+                            }));
+
+                            ast_attr.next_expanded_attr = NextExpandedAttrState::InTheMiddle;
+
+                            return (true, insert_tokens);
+                        } else {
+                            // Before any attribute or between the attributes.
+                            return (false, Vec::new());
+                        }
+                    }
+                    NextExpandedAttrState::InTheMiddle => {
+                        if token_range.start() >= expanded_attr.range.end() {
+                            // Finished the current attribute.
+                            let insert_tokens = vec![tt::Leaf::Punct(tt::Punct {
+                                char: ']',
+                                spacing: tt::Spacing::Alone,
+                                span: ast_attr.brackets_span.close,
+                            })];
+
+                            ast_attr.next_expanded_attr = NextExpandedAttrState::NotStarted;
+                            ast_attr.expanded_attrs_idx += 1;
+
+                            // It's safe to ignore the current token because between attributes
+                            // there is always at least one token we skip - either the closing bracket
+                            // in `#[]` or the comma in case of multiple attrs in `cfg_attr` expansion.
+                            return (false, insert_tokens);
+                        } else {
+                            // Still in the middle.
+                            return (true, Vec::new());
+                        }
+                    }
+                }
+            }
+            WalkEvent::Leave(SyntaxElement::Token(_)) => {}
+        }
+        (true, Vec::new())
     }
-    check_cfg_attr_value(db, &attr.token_tree()?, krate)
+}
+
+pub(crate) fn attr_macro_input_to_token_tree(
+    db: &dyn ExpandDatabase,
+    node: &SyntaxNode,
+    span_map: SpanMapRef<'_>,
+    span: Span,
+    is_derive: bool,
+    censor_item_tree_attr_ids: &[AttrId],
+    krate: Crate,
+) -> (tt::TopSubtree, SyntaxFixupUndoInfo) {
+    let fixups = fixup::fixup_syntax(span_map, node, span, DocCommentDesugarMode::ProcMacro);
+    (
+        syntax_bridge::syntax_node_to_token_tree_modified(
+            node,
+            span_map,
+            fixups.append,
+            fixups.remove,
+            span,
+            DocCommentDesugarMode::ProcMacro,
+            macro_input_callback(db, is_derive, censor_item_tree_attr_ids, krate, span, span_map),
+        ),
+        fixups.undo_info,
+    )
 }
 
 pub fn check_cfg_attr_value(
     db: &dyn ExpandDatabase,
-    attr: &TokenTree,
+    attr: &ast::TokenTree,
     krate: Crate,
 ) -> Option<bool> {
-    let cfg_expr = parse_from_attr_token_tree(attr)?;
-    let enabled = krate.cfg_options(db).check(&cfg_expr) != Some(false);
-    Some(enabled)
-}
-
-fn process_has_attrs_with_possible_comma<I: HasAttrs>(
-    db: &dyn ExpandDatabase,
-    items: impl Iterator<Item = I>,
-    krate: Crate,
-    remove: &mut FxHashSet<SyntaxElement>,
-) -> Option<()> {
-    for item in items {
-        let field_attrs = item.attrs();
-        'attrs: for attr in field_attrs {
-            if let Some(enabled) = check_cfg(db, &attr, krate) {
-                if enabled {
-                    debug!("censoring {:?}", attr.syntax());
-                    remove.insert(attr.syntax().clone().into());
-                } else {
-                    debug!("censoring {:?}", item.syntax());
-                    remove.insert(item.syntax().clone().into());
-                    // We need to remove the , as well
-                    remove_possible_comma(&item, remove);
-                    break 'attrs;
-                }
-            }
-
-            if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
-                if enabled {
-                    debug!("Removing cfg_attr tokens {:?}", attr);
-                    let meta = attr.meta()?;
-                    let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
-                    remove.extend(removes_from_cfg_attr);
-                } else {
-                    debug!("censoring type cfg_attr {:?}", item.syntax());
-                    remove.insert(attr.syntax().clone().into());
-                }
-            }
-        }
-    }
-    Some(())
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-enum CfgExprStage {
-    /// Stripping the CFGExpr part of the attribute
-    StrippigCfgExpr,
-    /// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute
-    FoundComma,
-    /// Everything following the attribute. This could be another attribute or the end of the attribute.
-    // FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute
-    // Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110
-    EverythingElse,
-}
-
-/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input.
-fn remove_tokens_within_cfg_attr(meta: Meta) -> Option<FxHashSet<SyntaxElement>> {
-    let mut remove: FxHashSet<SyntaxElement> = FxHashSet::default();
-    debug!("Enabling attribute {}", meta);
-    let meta_path = meta.path()?;
-    debug!("Removing {:?}", meta_path.syntax());
-    remove.insert(meta_path.syntax().clone().into());
-
-    let meta_tt = meta.token_tree()?;
-    debug!("meta_tt {}", meta_tt);
-    let mut stage = CfgExprStage::StrippigCfgExpr;
-    for tt in meta_tt.token_trees_and_tokens() {
-        debug!("Checking {:?}. Stage: {:?}", tt, stage);
-        match (stage, tt) {
-            (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => {
-                remove.insert(node.syntax().clone().into());
-            }
-            (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => {
-                if token.kind() == T![,] {
-                    stage = CfgExprStage::FoundComma;
-                }
-                remove.insert(token.into());
-            }
-            (CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token))
-                if (token.kind() == T![,] || token.kind() == T![')']) =>
-            {
-                // The end of the attribute or separator for the next attribute
-                stage = CfgExprStage::EverythingElse;
-                remove.insert(token.into());
-            }
-            (CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => {
-                remove.insert(node.syntax().clone().into());
-            }
-            (CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => {
-                remove.insert(token.into());
-            }
-            // This is an actual attribute
-            _ => {}
-        }
-    }
-    if stage != CfgExprStage::EverythingElse {
-        warn!("Invalid cfg_attr attribute. {:?}", meta_tt);
-        return None;
-    }
-    Some(remove)
-}
-/// Removes a possible comma after the [AstNode]
-fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet<SyntaxElement>) {
-    if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) {
-        res.insert(comma);
-    }
-}
-fn process_enum(
-    db: &dyn ExpandDatabase,
-    variants: VariantList,
-    krate: Crate,
-    remove: &mut FxHashSet<SyntaxElement>,
-) -> Option<()> {
-    'variant: for variant in variants.variants() {
-        for attr in variant.attrs() {
-            if let Some(enabled) = check_cfg(db, &attr, krate) {
-                if enabled {
-                    debug!("censoring {:?}", attr.syntax());
-                    remove.insert(attr.syntax().clone().into());
-                } else {
-                    // Rustc does not strip the attribute if it is enabled. So we will leave it
-                    debug!("censoring type {:?}", variant.syntax());
-                    remove.insert(variant.syntax().clone().into());
-                    // We need to remove the , as well
-                    remove_possible_comma(&variant, remove);
-                    continue 'variant;
-                }
-            }
-
-            if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
-                if enabled {
-                    debug!("Removing cfg_attr tokens {:?}", attr);
-                    let meta = attr.meta()?;
-                    let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
-                    remove.extend(removes_from_cfg_attr);
-                } else {
-                    debug!("censoring type cfg_attr {:?}", variant.syntax());
-                    remove.insert(attr.syntax().clone().into());
-                }
-            }
-        }
-        if let Some(fields) = variant.field_list() {
-            match fields {
-                ast::FieldList::RecordFieldList(fields) => {
-                    process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
-                }
-                ast::FieldList::TupleFieldList(fields) => {
-                    process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
-                }
-            }
-        }
-    }
-    Some(())
-}
-
-pub(crate) fn process_cfg_attrs(
-    db: &dyn ExpandDatabase,
-    node: &SyntaxNode,
-    loc: &MacroCallLoc,
-) -> Option<FxHashSet<SyntaxElement>> {
-    // FIXME: #[cfg_eval] is not implemented. But it is not stable yet
-    let is_derive = match loc.def.kind {
-        MacroDefKind::BuiltInDerive(..)
-        | MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true,
-        MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(),
-        _ => false,
-    };
-    let mut remove = FxHashSet::default();
-
-    let item = ast::Item::cast(node.clone())?;
-    for attr in item.attrs() {
-        if let Some(enabled) = check_cfg_attr(db, &attr, loc.krate) {
-            if enabled {
-                debug!("Removing cfg_attr tokens {:?}", attr);
-                let meta = attr.meta()?;
-                let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
-                remove.extend(removes_from_cfg_attr);
-            } else {
-                debug!("Removing type cfg_attr {:?}", item.syntax());
-                remove.insert(attr.syntax().clone().into());
-            }
-        }
-    }
-
-    if is_derive {
-        // Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level
-        // (cfg_attr is handled above, cfg is handled in the def map).
-        match item {
-            ast::Item::Struct(it) => match it.field_list()? {
-                ast::FieldList::RecordFieldList(fields) => {
-                    process_has_attrs_with_possible_comma(
-                        db,
-                        fields.fields(),
-                        loc.krate,
-                        &mut remove,
-                    )?;
-                }
-                ast::FieldList::TupleFieldList(fields) => {
-                    process_has_attrs_with_possible_comma(
-                        db,
-                        fields.fields(),
-                        loc.krate,
-                        &mut remove,
-                    )?;
-                }
-            },
-            ast::Item::Enum(it) => {
-                process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
-            }
-            ast::Item::Union(it) => {
-                process_has_attrs_with_possible_comma(
-                    db,
-                    it.record_field_list()?.fields(),
-                    loc.krate,
-                    &mut remove,
-                )?;
-            }
-            // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
-            _ => {}
-        }
-    }
-    Some(remove)
-}
-/// Parses a `cfg` attribute from the meta
-fn parse_from_attr_token_tree(tt: &TokenTree) -> Option<CfgExpr> {
-    let mut iter = tt
-        .token_trees_and_tokens()
-        .filter(is_not_whitespace)
-        .skip(1)
-        .take_while(is_not_closing_paren)
-        .peekable();
-    next_cfg_expr_from_syntax(&mut iter)
-}
-
-fn is_not_closing_paren(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
-    !matches!(element, NodeOrToken::Token(token) if (token.kind() == syntax::T![')']))
-}
-fn is_not_whitespace(element: &NodeOrToken<ast::TokenTree, syntax::SyntaxToken>) -> bool {
-    !matches!(element, NodeOrToken::Token(token) if (token.kind() == SyntaxKind::WHITESPACE))
-}
-
-fn next_cfg_expr_from_syntax<I>(iter: &mut Peekable<I>) -> Option<CfgExpr>
-where
-    I: Iterator<Item = NodeOrToken<ast::TokenTree, syntax::SyntaxToken>>,
-{
-    let name = match iter.next() {
-        None => return None,
-        Some(NodeOrToken::Token(element)) => match element.kind() {
-            syntax::T![ident] => Symbol::intern(element.text()),
-            _ => return Some(CfgExpr::Invalid),
-        },
-        Some(_) => return Some(CfgExpr::Invalid),
-    };
-    let result = match &name {
-        s if [&sym::all, &sym::any, &sym::not].contains(&s) => {
-            let mut preds = Vec::new();
-            let Some(NodeOrToken::Node(tree)) = iter.next() else {
-                return Some(CfgExpr::Invalid);
-            };
-            let mut tree_iter = tree
-                .token_trees_and_tokens()
-                .filter(is_not_whitespace)
-                .skip(1)
-                .take_while(is_not_closing_paren)
-                .peekable();
-            while tree_iter.peek().is_some() {
-                let pred = next_cfg_expr_from_syntax(&mut tree_iter);
-                if let Some(pred) = pred {
-                    preds.push(pred);
-                }
-            }
-            let group = match &name {
-                s if *s == sym::all => CfgExpr::All(preds.into_boxed_slice()),
-                s if *s == sym::any => CfgExpr::Any(preds.into_boxed_slice()),
-                s if *s == sym::not => {
-                    CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid)))
-                }
-                _ => unreachable!(),
-            };
-            Some(group)
-        }
-        _ => match iter.peek() {
-            Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => {
-                iter.next();
-                match iter.next() {
-                    Some(NodeOrToken::Token(value_token))
-                        if (value_token.kind() == syntax::SyntaxKind::STRING) =>
-                    {
-                        let value = value_token.text();
-                        Some(CfgExpr::Atom(CfgAtom::KeyValue {
-                            key: name,
-                            value: Symbol::intern(value.trim_matches('"')),
-                        }))
-                    }
-                    _ => None,
-                }
-            }
-            _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))),
-        },
-    };
-    if let Some(NodeOrToken::Token(element)) = iter.peek()
-        && element.kind() == syntax::T![,]
-    {
-        iter.next();
-    }
-    result
-}
-#[cfg(test)]
-mod tests {
-    use cfg::DnfExpr;
-    use expect_test::{Expect, expect};
-    use syntax::{AstNode, SourceFile, ast::Attr};
-
-    use crate::cfg_process::parse_from_attr_token_tree;
-
-    fn check_dnf_from_syntax(input: &str, expect: Expect) {
-        let parse = SourceFile::parse(input, span::Edition::CURRENT);
-        let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
-            Some(it) => it,
-            None => {
-                let node = std::any::type_name::<Attr>();
-                panic!("Failed to make ast node `{node}` from text {input}")
-            }
-        };
-        let node = node.clone_subtree();
-        assert_eq!(node.syntax().text_range().start(), 0.into());
-
-        let cfg = parse_from_attr_token_tree(&node.meta().unwrap().token_tree().unwrap()).unwrap();
-        let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
-        expect.assert_eq(&actual);
-    }
-    #[test]
-    fn cfg_from_attr() {
-        check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]);
-        check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]);
-    }
+    let cfg_expr = CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(attr).peekable());
+    krate.cfg_options(db).check(&cfg_expr)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index f9f10c1..5c517e6 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,11 +1,9 @@
 //! Defines database & queries for macro expansion.
 
 use base_db::{Crate, RootQueryDb};
-use either::Either;
 use mbe::MatchedArmIndex;
-use rustc_hash::FxHashSet;
 use span::{AstIdMap, Edition, Span, SyntaxContext};
-use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
+use syntax::{AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
 use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
 use triomphe::Arc;
 
@@ -13,9 +11,9 @@
     AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
     EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
     MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
-    attrs::{AttrId, AttrInput, RawAttrs, collect_attrs},
+    attrs::Meta,
     builtin::pseudo_derive_attr_expansion,
-    cfg_process,
+    cfg_process::attr_macro_input_to_token_tree,
     declarative::DeclarativeMacroExpander,
     fixup::{self, SyntaxFixupUndoInfo},
     hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
@@ -177,7 +175,7 @@ pub fn expand_speculative(
     let span_map = SpanMapRef::RealSpanMap(&span_map);
 
     // Build the subtree and token mapping for the speculative args
-    let (mut tt, undo_info) = match loc.kind {
+    let (mut tt, undo_info) = match &loc.kind {
         MacroCallKind::FnLike { .. } => (
             syntax_bridge::syntax_node_to_token_tree(
                 speculative_args,
@@ -200,48 +198,35 @@ pub fn expand_speculative(
             ),
             SyntaxFixupUndoInfo::NONE,
         ),
-        MacroCallKind::Derive { derive_attr_index: index, .. }
-        | MacroCallKind::Attr { invoc_attr_index: index, .. } => {
-            let censor = if let MacroCallKind::Derive { .. } = loc.kind {
-                censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?)
-            } else {
-                attr_source(index, &ast::Item::cast(speculative_args.clone())?)
-                    .into_iter()
-                    .map(|it| it.syntax().clone().into())
-                    .collect()
+        MacroCallKind::Derive { derive_macro_id, .. } => {
+            let MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } =
+                &derive_macro_id.loc(db).kind
+            else {
+                unreachable!("`derive_macro_id` should be `MacroCallKind::Attr`");
             };
-
-            let censor_cfg =
-                cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default();
-            let mut fixups = fixup::fixup_syntax(
-                span_map,
+            attr_macro_input_to_token_tree(
+                db,
                 speculative_args,
+                span_map,
                 span,
-                DocCommentDesugarMode::ProcMacro,
-            );
-            fixups.append.retain(|it, _| match it {
-                syntax::NodeOrToken::Token(_) => true,
-                it => !censor.contains(it) && !censor_cfg.contains(it),
-            });
-            fixups.remove.extend(censor);
-            fixups.remove.extend(censor_cfg);
-
-            (
-                syntax_bridge::syntax_node_to_token_tree_modified(
-                    speculative_args,
-                    span_map,
-                    fixups.append,
-                    fixups.remove,
-                    span,
-                    DocCommentDesugarMode::ProcMacro,
-                ),
-                fixups.undo_info,
+                true,
+                attr_ids,
+                loc.krate,
             )
         }
+        MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => attr_macro_input_to_token_tree(
+            db,
+            speculative_args,
+            span_map,
+            span,
+            false,
+            attr_ids,
+            loc.krate,
+        ),
     };
 
-    let attr_arg = match loc.kind {
-        MacroCallKind::Attr { invoc_attr_index, .. } => {
+    let attr_arg = match &loc.kind {
+        MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => {
             if loc.def.is_attribute_derive() {
                 // for pseudo-derive expansion we actually pass the attribute itself only
                 ast::Attr::cast(speculative_args.clone()).and_then(|attr| attr.token_tree()).map(
@@ -260,18 +245,21 @@ pub fn expand_speculative(
                 // Attributes may have an input token tree, build the subtree and map for this as well
                 // then try finding a token id for our token if it is inside this input subtree.
                 let item = ast::Item::cast(speculative_args.clone())?;
-                let attrs = RawAttrs::new_expanded(db, &item, span_map, loc.krate.cfg_options(db));
-                attrs.iter().find(|attr| attr.id == invoc_attr_index).and_then(|attr| {
-                    match attr.input.as_deref()? {
-                        AttrInput::TokenTree(tt) => {
-                            let mut attr_arg = tt.clone();
-                            attr_arg.top_subtree_delimiter_mut().kind =
-                                tt::DelimiterKind::Invisible;
-                            Some(attr_arg)
-                        }
-                        AttrInput::Literal(_) => None,
+                let (_, _, _, meta) =
+                    attr_ids.invoc_attr().find_attr_range_with_source(db, loc.krate, &item);
+                match meta {
+                    Meta::TokenTree { tt, .. } => {
+                        let mut attr_arg = syntax_bridge::syntax_node_to_token_tree(
+                            tt.syntax(),
+                            span_map,
+                            span,
+                            DocCommentDesugarMode::ProcMacro,
+                        );
+                        attr_arg.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
+                        Some(attr_arg)
                     }
-                })
+                    _ => None,
+                }
             }
         }
         _ => None,
@@ -299,7 +287,7 @@ pub fn expand_speculative(
         }
         MacroDefKind::Declarative(it, _) => db
             .decl_macro_expander(loc.krate, it)
-            .expand_unhygienic(tt, loc.kind.call_style(), span, loc.def.edition),
+            .expand_unhygienic(db, tt, loc.kind.call_style(), span),
         MacroDefKind::BuiltIn(_, it) => {
             it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
         }
@@ -315,8 +303,7 @@ pub fn expand_speculative(
     let expand_to = loc.expand_to();
 
     fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
-    let (node, rev_tmap) =
-        token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to, loc.def.edition);
+    let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to);
 
     let syntax_node = node.syntax_node();
     let token = rev_tmap
@@ -358,7 +345,6 @@ fn parse_macro_expansion(
 ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
     let _p = tracing::info_span!("parse_macro_expansion").entered();
     let loc = db.lookup_intern_macro_call(macro_file);
-    let def_edition = loc.def.edition;
     let expand_to = loc.expand_to();
     let mbe::ValueResult { value: (tt, matched_arm), err } = macro_expand(db, macro_file, loc);
 
@@ -369,7 +355,6 @@ fn parse_macro_expansion(
             CowArc::Owned(it) => it,
         },
         expand_to,
-        def_edition,
     );
     rev_token_map.matched_arm = matched_arm;
 
@@ -433,7 +418,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
     let (parse, map) = parse_with_map(db, loc.kind.file_id());
     let root = parse.syntax_node();
 
-    let (censor, item_node, span) = match loc.kind {
+    let (is_derive, censor_item_tree_attr_ids, item_node, span) = match &loc.kind {
         MacroCallKind::FnLike { ast_id, .. } => {
             let node = &ast_id.to_ptr(db).to_node(&root);
             let path_range = node
@@ -501,53 +486,29 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
         MacroCallKind::Derive { .. } => {
             unreachable!("`ExpandDatabase::macro_arg` called with `MacroCallKind::Derive`")
         }
-        MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+        MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
             let node = ast_id.to_ptr(db).to_node(&root);
-            let attr_source = attr_source(invoc_attr_index, &node);
+            let range = attr_ids
+                .invoc_attr()
+                .find_attr_range_with_source(db, loc.krate, &node)
+                .3
+                .path_range();
+            let span = map.span_for_range(range);
 
-            let span = map.span_for_range(
-                attr_source
-                    .as_ref()
-                    .and_then(|it| it.path())
-                    .map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
-            );
-            // If derive attribute we need to censor the derive input
-            if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive())
-                && ast::Adt::can_cast(node.syntax().kind())
-            {
-                let adt = ast::Adt::cast(node.syntax().clone()).unwrap();
-                let censor_derive_input = censor_derive_input(invoc_attr_index, &adt);
-                (censor_derive_input, node, span)
-            } else {
-                (attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span)
-            }
+            let is_derive = matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive());
+            (is_derive, &**attr_ids, node, span)
         }
     };
 
-    let (mut tt, undo_info) = {
-        let syntax = item_node.syntax();
-        let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default();
-        let mut fixups =
-            fixup::fixup_syntax(map.as_ref(), syntax, span, DocCommentDesugarMode::ProcMacro);
-        fixups.append.retain(|it, _| match it {
-            syntax::NodeOrToken::Token(_) => true,
-            it => !censor.contains(it) && !censor_cfg.contains(it),
-        });
-        fixups.remove.extend(censor);
-        fixups.remove.extend(censor_cfg);
-
-        (
-            syntax_bridge::syntax_node_to_token_tree_modified(
-                syntax,
-                map,
-                fixups.append,
-                fixups.remove,
-                span,
-                DocCommentDesugarMode::ProcMacro,
-            ),
-            fixups.undo_info,
-        )
-    };
+    let (mut tt, undo_info) = attr_macro_input_to_token_tree(
+        db,
+        item_node.syntax(),
+        map.as_ref(),
+        span,
+        is_derive,
+        censor_item_tree_attr_ids,
+        loc.krate,
+    );
 
     if loc.def.is_proc_macro() {
         // proc macros expect their inputs without parentheses, MBEs expect it with them included
@@ -557,31 +518,6 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
     (Arc::new(tt), undo_info, span)
 }
 
-// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
-/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped
-fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet<SyntaxElement> {
-    // FIXME: handle `cfg_attr`
-    cov_mark::hit!(derive_censoring);
-    collect_attrs(node)
-        .take(derive_attr_index.ast_index() + 1)
-        .filter_map(|(_, attr)| Either::left(attr))
-        // FIXME, this resolution should not be done syntactically
-        // derive is a proper macro now, no longer builtin
-        // But we do not have resolution at this stage, this means
-        // we need to know about all macro calls for the given ast item here
-        // so we require some kind of mapping...
-        .filter(|attr| attr.simple_name().as_deref() == Some("derive"))
-        .map(|it| it.syntax().clone().into())
-        .collect()
-}
-
-/// Attributes expect the invoking attribute to be stripped
-fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option<ast::Attr> {
-    // FIXME: handle `cfg_attr`
-    cov_mark::hit!(attribute_macro_attr_censoring);
-    collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr))
-}
-
 impl TokenExpander {
     fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
         match id.kind {
@@ -731,7 +667,6 @@ pub(crate) fn token_tree_to_syntax_node(
     db: &dyn ExpandDatabase,
     tt: &tt::TopSubtree,
     expand_to: ExpandTo,
-    edition: parser::Edition,
 ) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
     let entry_point = match expand_to {
         ExpandTo::Statements => syntax_bridge::TopEntryPoint::MacroStmts,
@@ -740,7 +675,7 @@ pub(crate) fn token_tree_to_syntax_node(
         ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
         ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
     };
-    syntax_bridge::token_tree_to_syntax_node(tt, entry_point, &mut |ctx| ctx.edition(db), edition)
+    syntax_bridge::token_tree_to_syntax_node(tt, entry_point, &mut |ctx| ctx.edition(db))
 }
 
 fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
index e4375e0..d2df9a1 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
@@ -1,17 +1,21 @@
 //! Compiled declarative macro expanders (`macro_rules!` and `macro`)
 
+use std::{cell::OnceCell, ops::ControlFlow};
+
 use base_db::Crate;
-use intern::sym;
 use span::{Edition, Span, SyntaxContext};
 use stdx::TupleExt;
-use syntax::{AstNode, ast};
+use syntax::{
+    AstNode, AstToken,
+    ast::{self, HasAttrs},
+};
 use syntax_bridge::DocCommentDesugarMode;
 use triomphe::Arc;
 
 use crate::{
     AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
     MacroCallStyle,
-    attrs::RawAttrs,
+    attrs::{Meta, expand_cfg_attr},
     db::ExpandDatabase,
     hygiene::{Transparency, apply_mark},
     tt,
@@ -42,6 +46,7 @@ pub fn expand(
             None => self
                 .mac
                 .expand(
+                    db,
                     &tt,
                     |s| {
                         s.ctx =
@@ -49,7 +54,6 @@ pub fn expand(
                     },
                     loc.kind.call_style(),
                     span,
-                    loc.def.edition,
                 )
                 .map_err(Into::into),
         }
@@ -57,10 +61,10 @@ pub fn expand(
 
     pub fn expand_unhygienic(
         &self,
+        db: &dyn ExpandDatabase,
         tt: tt::TopSubtree,
         call_style: MacroCallStyle,
         call_site: Span,
-        def_site_edition: Edition,
     ) -> ExpandResult<tt::TopSubtree> {
         match self.mac.err() {
             Some(_) => ExpandResult::new(
@@ -69,7 +73,7 @@ pub fn expand_unhygienic(
             ),
             None => self
                 .mac
-                .expand(&tt, |_| (), call_style, call_site, def_site_edition)
+                .expand(db, &tt, |_| (), call_style, call_site)
                 .map(TupleExt::head)
                 .map_err(Into::into),
         }
@@ -83,29 +87,28 @@ pub(crate) fn expander(
         let (root, map) = crate::db::parse_with_map(db, id.file_id);
         let root = root.syntax_node();
 
-        let transparency = |node| {
-            // ... would be nice to have the item tree here
-            let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db));
-            match attrs
-                .iter()
-                .find(|it| {
-                    it.path
-                        .as_ident()
-                        .map(|it| *it == sym::rustc_macro_transparency)
-                        .unwrap_or(false)
-                })?
-                .token_tree_value()?
-                .token_trees()
-                .flat_tokens()
-            {
-                [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym {
-                    s if *s == sym::transparent => Some(Transparency::Transparent),
-                    s if *s == sym::semitransparent => Some(Transparency::SemiTransparent),
-                    s if *s == sym::opaque => Some(Transparency::Opaque),
-                    _ => None,
+        let transparency = |node: ast::AnyHasAttrs| {
+            let cfg_options = OnceCell::new();
+            expand_cfg_attr(
+                node.attrs(),
+                || cfg_options.get_or_init(|| def_crate.cfg_options(db)),
+                |attr, _, _, _| {
+                    if let Meta::NamedKeyValue { name: Some(name), value, .. } = attr
+                        && name.text() == "rustc_macro_transparency"
+                        && let Some(value) = value.and_then(ast::String::cast)
+                        && let Ok(value) = value.value()
+                    {
+                        match &*value {
+                            "transparent" => ControlFlow::Break(Transparency::Transparent),
+                            "semitransparent" => ControlFlow::Break(Transparency::SemiTransparent),
+                            "opaque" => ControlFlow::Break(Transparency::Opaque),
+                            _ => ControlFlow::Continue(()),
+                        }
+                    } else {
+                        ControlFlow::Continue(())
+                    }
                 },
-                _ => None,
-            }
+            )
         };
         let ctx_edition = |ctx: SyntaxContext| {
             if ctx.is_root() {
@@ -136,7 +139,8 @@ pub(crate) fn expander(
                         "expected a token tree".into(),
                     )),
                 },
-                transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
+                transparency(ast::AnyHasAttrs::from(macro_rules))
+                    .unwrap_or(Transparency::SemiTransparent),
             ),
             ast::Macro::MacroDef(macro_def) => (
                 match macro_def.body() {
@@ -164,7 +168,7 @@ pub(crate) fn expander(
                         "expected a token tree".into(),
                     )),
                 },
-                transparency(&macro_def).unwrap_or(Transparency::Opaque),
+                transparency(macro_def.into()).unwrap_or(Transparency::Opaque),
             ),
         };
         let edition = ctx_edition(match id.file_id {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
index a7f3e27..fe557d6 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
@@ -55,30 +55,6 @@ fn from(value: FilePosition) -> Self {
     }
 }
 
-impl FilePositionWrapper<span::FileId> {
-    pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FilePosition {
-        FilePositionWrapper {
-            file_id: EditionedFileId::new(db, self.file_id, edition),
-            offset: self.offset,
-        }
-    }
-}
-
-impl FileRangeWrapper<span::FileId> {
-    pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FileRange {
-        FileRangeWrapper {
-            file_id: EditionedFileId::new(db, self.file_id, edition),
-            range: self.range,
-        }
-    }
-}
-
-impl<T> InFileWrapper<span::FileId, T> {
-    pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> InRealFile<T> {
-        InRealFile { file_id: EditionedFileId::new(db, self.file_id, edition), value: self.value }
-    }
-}
-
 impl HirFileRange {
     pub fn file_range(self) -> Option<FileRange> {
         Some(FileRange { file_id: self.file_id.file_id()?, range: self.range })
@@ -407,7 +383,7 @@ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
 
                 // Fall back to whole macro call.
                 let loc = db.lookup_intern_macro_call(mac_file);
-                loc.kind.original_call_range(db)
+                loc.kind.original_call_range(db, loc.krate)
             }
         }
     }
@@ -453,7 +429,10 @@ pub fn original_node_file_range(
                     Some(it) => it,
                     None => {
                         let loc = db.lookup_intern_macro_call(mac_file);
-                        (loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
+                        (
+                            loc.kind.original_call_range(db, loc.krate),
+                            SyntaxContext::root(loc.def.edition),
+                        )
                     }
                 }
             }
@@ -468,7 +447,7 @@ pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> Fil
                     Some(it) => it,
                     _ => {
                         let loc = db.lookup_intern_macro_call(mac_file);
-                        loc.kind.original_call_range(db)
+                        loc.kind.original_call_range(db, loc.krate)
                     }
                 }
             }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index fe77e15..876d870 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -523,6 +523,7 @@ fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, mut expect: Expect) {
             fixups.remove,
             span_map.span_for_range(TextRange::empty(0.into())),
             DocCommentDesugarMode::Mbe,
+            |_, _| (true, Vec::new()),
         );
 
         let actual = format!("{tt}\n");
@@ -535,7 +536,6 @@ fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, mut expect: Expect) {
             &tt,
             syntax_bridge::TopEntryPoint::MacroItems,
             &mut |_| parser::Edition::CURRENT,
-            parser::Edition::CURRENT,
         );
         assert!(
             parse.errors().is_empty(),
@@ -698,7 +698,7 @@ fn foo() {
 }
 "#,
             expect![[r#"
-fn foo () {a . __ra_fixup ;}
+fn foo () {a .__ra_fixup ;}
 "#]],
         )
     }
@@ -713,7 +713,7 @@ fn foo() {
 }
 "#,
             expect![[r#"
-fn foo () {a . __ra_fixup ; bar () ;}
+fn foo () {a .__ra_fixup ; bar () ;}
 "#]],
         )
     }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index 77f61dd8..157a531 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -25,18 +25,17 @@
 mod fixup;
 mod prettify_macro_expansion_;
 
-use attrs::collect_attrs;
-use rustc_hash::FxHashMap;
 use salsa::plumbing::{AsId, FromId};
 use stdx::TupleExt;
+use thin_vec::ThinVec;
 use triomphe::Arc;
 
 use core::fmt;
-use std::hash::Hash;
+use std::{hash::Hash, ops};
 
 use base_db::Crate;
 use either::Either;
-use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
+use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContext};
 use syntax::{
     SyntaxNode, SyntaxToken, TextRange, TextSize,
     ast::{self, AstNode},
@@ -317,9 +316,6 @@ pub enum MacroCallKind {
     Derive {
         ast_id: AstId<ast::Adt>,
         /// Syntactical index of the invoking `#[derive]` attribute.
-        ///
-        /// Outer attributes are counted first, then inner attributes. This does not support
-        /// out-of-line modules, which may have attributes spread across 2 files!
         derive_attr_index: AttrId,
         /// Index of the derive macro in the derive attribute
         derive_index: u32,
@@ -329,17 +325,68 @@ pub enum MacroCallKind {
     },
     Attr {
         ast_id: AstId<ast::Item>,
-        // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`
-        // but we need to fix the `cfg_attr` handling first.
+        // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`.
         attr_args: Option<Arc<tt::TopSubtree>>,
-        /// Syntactical index of the invoking `#[attribute]`.
+        /// This contains the list of all *active* attributes (derives and attr macros) preceding this
+        /// attribute, including this attribute. You can retrieve the [`AttrId`] of the current attribute
+        /// by calling [`invoc_attr()`] on this.
         ///
-        /// Outer attributes are counted first, then inner attributes. This does not support
-        /// out-of-line modules, which may have attributes spread across 2 files!
-        invoc_attr_index: AttrId,
+        /// The macro should not see the attributes here.
+        ///
+        /// [`invoc_attr()`]: AttrMacroAttrIds::invoc_attr
+        censored_attr_ids: AttrMacroAttrIds,
     },
 }
 
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AttrMacroAttrIds(AttrMacroAttrIdsRepr);
+
+impl AttrMacroAttrIds {
+    #[inline]
+    pub fn from_one(id: AttrId) -> Self {
+        Self(AttrMacroAttrIdsRepr::One(id))
+    }
+
+    #[inline]
+    pub fn from_many(ids: &[AttrId]) -> Self {
+        if let &[id] = ids {
+            Self(AttrMacroAttrIdsRepr::One(id))
+        } else {
+            Self(AttrMacroAttrIdsRepr::ManyDerives(ids.iter().copied().collect()))
+        }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+enum AttrMacroAttrIdsRepr {
+    One(AttrId),
+    ManyDerives(ThinVec<AttrId>),
+}
+
+impl ops::Deref for AttrMacroAttrIds {
+    type Target = [AttrId];
+
+    #[inline]
+    fn deref(&self) -> &Self::Target {
+        match &self.0 {
+            AttrMacroAttrIdsRepr::One(one) => std::slice::from_ref(one),
+            AttrMacroAttrIdsRepr::ManyDerives(many) => many,
+        }
+    }
+}
+
+impl AttrMacroAttrIds {
+    #[inline]
+    pub fn invoc_attr(&self) -> AttrId {
+        match &self.0 {
+            AttrMacroAttrIdsRepr::One(it) => *it,
+            AttrMacroAttrIdsRepr::ManyDerives(it) => {
+                *it.last().expect("should always have at least one `AttrId`")
+            }
+        }
+    }
+}
+
 impl MacroCallKind {
     pub(crate) fn call_style(&self) -> MacroCallStyle {
         match self {
@@ -597,34 +644,20 @@ pub fn is_env_or_option_env(&self) -> bool {
 
 impl MacroCallLoc {
     pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
-        match self.kind {
+        match &self.kind {
             MacroCallKind::FnLike { ast_id, .. } => {
                 ast_id.with_value(ast_id.to_node(db).syntax().clone())
             }
             MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
                 // FIXME: handle `cfg_attr`
-                ast_id.with_value(ast_id.to_node(db)).map(|it| {
-                    collect_attrs(&it)
-                        .nth(derive_attr_index.ast_index())
-                        .and_then(|it| match it.1 {
-                            Either::Left(attr) => Some(attr.syntax().clone()),
-                            Either::Right(_) => None,
-                        })
-                        .unwrap_or_else(|| it.syntax().clone())
-                })
+                let (attr, _, _, _) = derive_attr_index.find_attr_range(db, self.krate, *ast_id);
+                ast_id.with_value(attr.syntax().clone())
             }
-            MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+            MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
                 if self.def.is_attribute_derive() {
-                    // FIXME: handle `cfg_attr`
-                    ast_id.with_value(ast_id.to_node(db)).map(|it| {
-                        collect_attrs(&it)
-                            .nth(invoc_attr_index.ast_index())
-                            .and_then(|it| match it.1 {
-                                Either::Left(attr) => Some(attr.syntax().clone()),
-                                Either::Right(_) => None,
-                            })
-                            .unwrap_or_else(|| it.syntax().clone())
-                    })
+                    let (attr, _, _, _) =
+                        attr_ids.invoc_attr().find_attr_range(db, self.krate, *ast_id);
+                    ast_id.with_value(attr.syntax().clone())
                 } else {
                     ast_id.with_value(ast_id.to_node(db).syntax().clone())
                 }
@@ -729,7 +762,7 @@ pub fn original_call_range_with_input(self, db: &dyn ExpandDatabase) -> FileRang
     /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
     /// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
     /// attribute's range, and derives get only the specific derive that is being referred to.
-    pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
+    pub fn original_call_range(self, db: &dyn ExpandDatabase, krate: Crate) -> FileRange {
         let mut kind = self;
         let file_id = loop {
             match kind.file_id() {
@@ -751,24 +784,11 @@ pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
             }
             MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
                 // FIXME: should be the range of the macro name, not the whole derive
-                // FIXME: handle `cfg_attr`
-                collect_attrs(&ast_id.to_node(db))
-                    .nth(derive_attr_index.ast_index())
-                    .expect("missing derive")
-                    .1
-                    .expect_left("derive is a doc comment?")
-                    .syntax()
-                    .text_range()
+                derive_attr_index.find_attr_range(db, krate, ast_id).2
             }
             // FIXME: handle `cfg_attr`
-            MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
-                collect_attrs(&ast_id.to_node(db))
-                    .nth(invoc_attr_index.ast_index())
-                    .expect("missing attribute")
-                    .1
-                    .expect_left("attribute macro is a doc comment?")
-                    .syntax()
-                    .text_range()
+            MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
+                attr_ids.invoc_attr().find_attr_range(db, krate, ast_id).2
             }
         };
 
@@ -887,7 +907,8 @@ pub fn map_range_up_once(
         let span = self.exp_map.span_at(token.start());
         match &self.arg_map {
             SpanMap::RealSpanMap(_) => {
-                let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
+                let file_id =
+                    EditionedFileId::from_span_guess_origin(db, span.anchor.file_id).into();
                 let anchor_offset =
                     db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
                 InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@@ -943,7 +964,7 @@ pub fn map_node_range_up_rooted(
         start = start.min(span.range.start());
         end = end.max(span.range.end());
     }
-    let file_id = EditionedFileId::from_span(db, anchor.file_id);
+    let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
     let anchor_offset =
         db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
     Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
@@ -969,36 +990,12 @@ pub fn map_node_range_up(
         start = start.min(span.range.start());
         end = end.max(span.range.end());
     }
-    let file_id = EditionedFileId::from_span(db, anchor.file_id);
+    let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
     let anchor_offset =
         db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
     Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
 }
 
-/// Maps up the text range out of the expansion hierarchy back into the original file its from.
-/// This version will aggregate the ranges of all spans with the same anchor and syntax context.
-pub fn map_node_range_up_aggregated(
-    db: &dyn ExpandDatabase,
-    exp_map: &ExpansionSpanMap,
-    range: TextRange,
-) -> FxHashMap<(SpanAnchor, SyntaxContext), TextRange> {
-    let mut map = FxHashMap::default();
-    for span in exp_map.spans_for_range(range) {
-        let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range);
-        *range = TextRange::new(
-            range.start().min(span.range.start()),
-            range.end().max(span.range.end()),
-        );
-    }
-    for ((anchor, _), range) in &mut map {
-        let file_id = EditionedFileId::from_span(db, anchor.file_id);
-        let anchor_offset =
-            db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
-        *range += anchor_offset;
-    }
-    map
-}
-
 /// Looks up the span at the given offset.
 pub fn span_for_offset(
     db: &dyn ExpandDatabase,
@@ -1006,7 +1003,7 @@ pub fn span_for_offset(
     offset: TextSize,
 ) -> (FileRange, SyntaxContext) {
     let span = exp_map.span_at(offset);
-    let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
+    let file_id = EditionedFileId::from_span_guess_origin(db, span.anchor.file_id);
     let anchor_offset =
         db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
     (FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
@@ -1076,7 +1073,7 @@ pub fn from_call_site(call: &ast::MacroCall) -> ExpandTo {
     }
 }
 
-intern::impl_internable!(ModPath, attrs::AttrInput);
+intern::impl_internable!(ModPath);
 
 #[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
 #[doc(alias = "MacroFileId")]
@@ -1139,6 +1136,14 @@ pub fn file_id(self) -> Option<EditionedFileId> {
             HirFileId::MacroFile(_) => None,
         }
     }
+
+    #[inline]
+    pub fn krate(self, db: &dyn ExpandDatabase) -> Crate {
+        match self {
+            HirFileId::FileId(it) => it.krate(db),
+            HirFileId::MacroFile(it) => it.loc(db).krate,
+        }
+    }
 }
 
 impl PartialEq<EditionedFileId> for HirFileId {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index d84d978..e9805e3 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -2,7 +2,7 @@
 
 use std::{
     fmt::{self, Display as _},
-    iter,
+    iter::{self, Peekable},
 };
 
 use crate::{
@@ -12,10 +12,11 @@
     tt,
 };
 use base_db::Crate;
-use intern::sym;
+use intern::{Symbol, sym};
+use parser::T;
 use smallvec::SmallVec;
 use span::{Edition, SyntaxContext};
-use syntax::{AstNode, ast};
+use syntax::{AstNode, SyntaxToken, ast};
 
 #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
 pub struct ModPath {
@@ -64,6 +65,58 @@ pub const fn from_kind(kind: PathKind) -> ModPath {
         ModPath { kind, segments: SmallVec::new_const() }
     }
 
+    pub fn from_tokens(
+        db: &dyn ExpandDatabase,
+        span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
+        is_abs: bool,
+        segments: impl Iterator<Item = SyntaxToken>,
+    ) -> Option<ModPath> {
+        let mut segments = segments.peekable();
+        let mut result = SmallVec::new_const();
+        let path_kind = if is_abs {
+            PathKind::Abs
+        } else {
+            let first = segments.next()?;
+            match first.kind() {
+                T![crate] => PathKind::Crate,
+                T![self] => PathKind::Super(handle_super(&mut segments)),
+                T![super] => PathKind::Super(1 + handle_super(&mut segments)),
+                T![ident] => {
+                    let first_text = first.text();
+                    if first_text == "$crate" {
+                        let ctxt = span_for_range(first.text_range());
+                        resolve_crate_root(db, ctxt)
+                            .map(PathKind::DollarCrate)
+                            .unwrap_or(PathKind::Crate)
+                    } else {
+                        result.push(Name::new_symbol_root(Symbol::intern(first_text)));
+                        PathKind::Plain
+                    }
+                }
+                _ => return None,
+            }
+        };
+        for segment in segments {
+            if segment.kind() != T![ident] {
+                return None;
+            }
+            result.push(Name::new_symbol_root(Symbol::intern(segment.text())));
+        }
+        if result.is_empty() {
+            return None;
+        }
+        result.shrink_to_fit();
+        return Some(ModPath { kind: path_kind, segments: result });
+
+        fn handle_super(segments: &mut Peekable<impl Iterator<Item = SyntaxToken>>) -> u8 {
+            let mut result = 0;
+            while segments.next_if(|it| it.kind() == T![super]).is_some() {
+                result += 1;
+            }
+            result
+        }
+    }
+
     pub fn segments(&self) -> &[Name] {
         &self.segments
     }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
index e5a778a..8b0c0d7 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
@@ -1,13 +1,12 @@
 //! Span maps for real files and macro expansions.
 
 use span::{Span, SyntaxContext};
-use stdx::TupleExt;
 use syntax::{AstNode, TextRange, ast};
 use triomphe::Arc;
 
 pub use span::RealSpanMap;
 
-use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
+use crate::{HirFileId, MacroCallId, db::ExpandDatabase};
 
 pub type ExpansionSpanMap = span::SpanMap<SyntaxContext>;
 
@@ -110,26 +109,24 @@ pub(crate) fn real_span_map(
     // them anchors too, but only if they have no attributes attached, as those might be proc-macros
     // and using different anchors inside of them will prevent spans from being joinable.
     tree.items().for_each(|item| match &item {
-        ast::Item::ExternBlock(it)
-            if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) =>
-        {
+        ast::Item::ExternBlock(it) if ast::attrs_including_inner(it).next().is_none() => {
             if let Some(extern_item_list) = it.extern_item_list() {
                 pairs.extend(
                     extern_item_list.extern_items().map(ast::Item::from).map(item_to_entry),
                 );
             }
         }
-        ast::Item::Impl(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
+        ast::Item::Impl(it) if ast::attrs_including_inner(it).next().is_none() => {
             if let Some(assoc_item_list) = it.assoc_item_list() {
                 pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
             }
         }
-        ast::Item::Module(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
+        ast::Item::Module(it) if ast::attrs_including_inner(it).next().is_none() => {
             if let Some(item_list) = it.item_list() {
                 pairs.extend(item_list.items().map(item_to_entry));
             }
         }
-        ast::Item::Trait(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
+        ast::Item::Trait(it) if ast::attrs_including_inner(it).next().is_none() => {
             if let Some(assoc_item_list) = it.assoc_item_list() {
                 pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
             }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index 8adf95b..c60ecef 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -53,6 +53,7 @@
 base-db.workspace = true
 syntax.workspace = true
 span.workspace = true
+thin-vec = "0.2.14"
 
 [dev-dependencies]
 expect-test = "1.5.1"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
index d21108f..0a36c0e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
@@ -5,7 +5,7 @@
 
 use std::fmt;
 
-use hir_def::{TraitId, TypeAliasId, lang_item::LangItem};
+use hir_def::{TraitId, TypeAliasId};
 use rustc_type_ir::inherent::{IntoKind, Ty as _};
 use tracing::debug;
 use triomphe::Arc;
@@ -38,7 +38,7 @@ pub fn autoderef<'db>(
     env: Arc<TraitEnvironment<'db>>,
     ty: Canonical<'db, Ty<'db>>,
 ) -> impl Iterator<Item = Ty<'db>> + use<'db> {
-    let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+    let interner = DbInterner::new_with(db, env.krate);
     let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
     let (ty, _) = infcx.instantiate_canonical(&ty);
     let autoderef = Autoderef::new(&infcx, &env, ty);
@@ -301,36 +301,28 @@ fn interner(&self) -> DbInterner<'db> {
         self.infcx().interner
     }
 
-    #[inline]
-    fn db(&self) -> &'db dyn HirDatabase {
-        self.interner().db
-    }
-
     fn autoderef_traits(&mut self) -> Option<AutoderefTraits> {
+        let lang_items = self.interner().lang_items();
         match &mut self.traits {
             Some(it) => Some(*it),
             None => {
                 let traits = if self.use_receiver_trait {
                     (|| {
                         Some(AutoderefTraits {
-                            trait_: LangItem::Receiver
-                                .resolve_trait(self.db(), self.env().krate)?,
-                            trait_target: LangItem::ReceiverTarget
-                                .resolve_type_alias(self.db(), self.env().krate)?,
+                            trait_: lang_items.Receiver?,
+                            trait_target: lang_items.ReceiverTarget?,
                         })
                     })()
                     .or_else(|| {
                         Some(AutoderefTraits {
-                            trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?,
-                            trait_target: LangItem::DerefTarget
-                                .resolve_type_alias(self.db(), self.env().krate)?,
+                            trait_: lang_items.Deref?,
+                            trait_target: lang_items.DerefTarget?,
                         })
                     })?
                 } else {
                     AutoderefTraits {
-                        trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?,
-                        trait_target: LangItem::DerefTarget
-                            .resolve_type_alias(self.db(), self.env().krate)?,
+                        trait_: lang_items.Deref?,
+                        trait_target: lang_items.DerefTarget?,
                     }
                 };
                 Some(*self.traits.insert(traits))
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index 61f29b4..65250f9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -6,6 +6,7 @@
 use base_db::Crate;
 use hir_def::{
     ConstId, EnumVariantId, GeneralConstId, StaticId,
+    attrs::AttrFlags,
     expr_store::Body,
     hir::{Expr, ExprId},
     type_ref::LiteralConstRef,
@@ -83,7 +84,7 @@ pub fn intern_const_ref<'a>(
     ty: Ty<'a>,
     krate: Crate,
 ) -> Const<'a> {
-    let interner = DbInterner::new_with(db, Some(krate), None);
+    let interner = DbInterner::new_no_crate(db);
     let layout = db.layout_of_ty(ty, TraitEnvironment::empty(krate));
     let kind = match value {
         LiteralConstRef::Int(i) => {
@@ -128,7 +129,7 @@ pub fn usize_const<'db>(db: &'db dyn HirDatabase, value: Option<u128>, krate: Cr
     intern_const_ref(
         db,
         &value.map_or(LiteralConstRef::Unknown, LiteralConstRef::UInt),
-        Ty::new_uint(DbInterner::new_with(db, Some(krate), None), rustc_type_ir::UintTy::Usize),
+        Ty::new_uint(DbInterner::new_no_crate(db), rustc_type_ir::UintTy::Usize),
         krate,
     )
 }
@@ -183,7 +184,7 @@ pub(crate) fn const_eval_discriminant_variant<'db>(
     db: &'db dyn HirDatabase,
     variant_id: EnumVariantId,
 ) -> Result<i128, ConstEvalError<'db>> {
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     let def = variant_id.into();
     let body = db.body(def);
     let loc = variant_id.lookup(db);
@@ -200,7 +201,7 @@ pub(crate) fn const_eval_discriminant_variant<'db>(
         return Ok(value);
     }
 
-    let repr = db.enum_signature(loc.parent).repr;
+    let repr = AttrFlags::repr(db, loc.parent.into());
     let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed());
 
     let mir_body = db.monomorphized_mir_body(
@@ -292,7 +293,7 @@ pub(crate) fn const_eval_static_query<'db>(
     db: &'db dyn HirDatabase,
     def: StaticId,
 ) -> Result<Const<'db>, ConstEvalError<'db>> {
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     let body = db.monomorphized_mir_body(
         def.into(),
         GenericArgs::new_from_iter(interner, []),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
index 6095250..f251476 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -123,7 +123,7 @@ fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String {
 
 fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const<'_>, ConstEvalError<'_>> {
     let _tracing = setup_tracing();
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     let module_id = db.module_for_file(file_id.file_id(db));
     let def_map = module_id.def_map(db);
     let scope = &def_map[module_id.local_id].scope;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index 40e58aa..df05871 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -12,7 +12,7 @@
 use triomphe::Arc;
 
 use crate::{
-    ImplTraitId, InferenceResult, TraitEnvironment, TyDefId, ValueTyDefId,
+    ImplTraitId, TraitEnvironment, TyDefId, ValueTyDefId,
     consteval::ConstEvalError,
     dyn_compatibility::DynCompatibilityViolation,
     layout::{Layout, LayoutError},
@@ -23,10 +23,6 @@
 
 #[query_group::query_group]
 pub trait HirDatabase: DefDatabase + std::fmt::Debug {
-    #[salsa::invoke(crate::infer::infer_query)]
-    #[salsa::cycle(cycle_result = crate::infer::infer_cycle_result)]
-    fn infer<'db>(&'db self, def: DefWithBodyId) -> Arc<InferenceResult<'db>>;
-
     // region:mir
 
     // FXME: Collapse `mir_body_for_closure` into `mir_body`
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index f6992df..79dc6e3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -17,8 +17,8 @@
 
 use hir_def::{
     AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
-    ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, db::DefDatabase, hir::Pat,
-    item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
+    ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, attrs::AttrFlags,
+    db::DefDatabase, hir::Pat, item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
 };
 use hir_expand::{
     HirFileId,
@@ -201,7 +201,7 @@ fn validate_func(&mut self, func: FunctionId) {
 
             // Don't run the lint on extern "[not Rust]" fn items with the
             // #[no_mangle] attribute.
-            let no_mangle = self.db.attrs(func.into()).by_key(sym::no_mangle).exists();
+            let no_mangle = AttrFlags::query(self.db, func.into()).contains(AttrFlags::NO_MANGLE);
             if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) {
                 cov_mark::hit!(extern_func_no_mangle_ignored);
             } else {
@@ -563,7 +563,7 @@ fn validate_static(&mut self, static_id: StaticId) {
             cov_mark::hit!(extern_static_incorrect_case_ignored);
             return;
         }
-        if self.db.attrs(static_id.into()).by_key(sym::no_mangle).exists() {
+        if AttrFlags::query(self.db, static_id.into()).contains(AttrFlags::NO_MANGLE) {
             cov_mark::hit!(no_mangle_static_incorrect_case_ignored);
             return;
         }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
index 0eca0c0..ffbcea4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -8,7 +8,7 @@
 use either::Either;
 use hir_def::{
     AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup,
-    lang_item::LangItem,
+    lang_item::LangItems,
     resolver::{HasResolver, ValueNs},
 };
 use intern::sym;
@@ -76,10 +76,10 @@ pub fn collect(
         validate_lints: bool,
     ) -> Vec<BodyValidationDiagnostic> {
         let _p = tracing::info_span!("BodyValidationDiagnostic::collect").entered();
-        let infer = db.infer(owner);
+        let infer = InferenceResult::for_body(db, owner);
         let body = db.body(owner);
         let env = db.trait_environment_for_body(owner);
-        let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+        let interner = DbInterner::new_with(db, env.krate);
         let infcx =
             interner.infer_ctxt().build(TypingMode::typeck_for_body(interner, owner.into()));
         let mut validator = ExprValidator {
@@ -99,7 +99,7 @@ pub fn collect(
 struct ExprValidator<'db> {
     owner: DefWithBodyId,
     body: Arc<Body>,
-    infer: Arc<InferenceResult<'db>>,
+    infer: &'db InferenceResult<'db>,
     env: Arc<TraitEnvironment<'db>>,
     diagnostics: Vec<BodyValidationDiagnostic>,
     validate_lints: bool,
@@ -124,7 +124,7 @@ fn validate_body(&mut self) {
 
         for (id, expr) in body.exprs() {
             if let Some((variant, missed_fields, true)) =
-                record_literal_missing_fields(db, &self.infer, id, expr)
+                record_literal_missing_fields(db, self.infer, id, expr)
             {
                 self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
                     record: Either::Left(id),
@@ -155,7 +155,7 @@ fn validate_body(&mut self) {
 
         for (id, pat) in body.pats() {
             if let Some((variant, missed_fields, true)) =
-                record_pattern_missing_fields(db, &self.infer, id, pat)
+                record_pattern_missing_fields(db, self.infer, id, pat)
             {
                 self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
                     record: Either::Right(id),
@@ -187,7 +187,7 @@ fn validate_call(
             };
 
             let checker = filter_map_next_checker.get_or_insert_with(|| {
-                FilterMapNextChecker::new(&self.owner.resolver(self.db()), self.db())
+                FilterMapNextChecker::new(self.infcx.interner.lang_items(), self.db())
             });
 
             if checker.check(call_id, receiver, &callee).is_some() {
@@ -240,7 +240,7 @@ fn validate_match(&mut self, match_expr: ExprId, scrutinee_expr: ExprId, arms: &
                     .as_reference()
                     .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
                     .unwrap_or(false))
-                && types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer)
+                && types_of_subpatterns_do_match(arm.pat, &self.body, self.infer)
             {
                 // If we had a NotUsefulMatchArm diagnostic, we could
                 // check the usefulness of each pattern as we added it
@@ -388,7 +388,7 @@ fn lower_pattern<'a>(
         pat: PatId,
         have_errors: &mut bool,
     ) -> DeconstructedPat<'a, 'db> {
-        let mut patcx = match_check::PatCtxt::new(self.db(), &self.infer, &self.body);
+        let mut patcx = match_check::PatCtxt::new(self.db(), self.infer, &self.body);
         let pattern = patcx.lower_pattern(pat);
         let pattern = cx.lower_pat(&pattern);
         if !patcx.errors.is_empty() {
@@ -497,11 +497,9 @@ struct FilterMapNextChecker<'db> {
 }
 
 impl<'db> FilterMapNextChecker<'db> {
-    fn new(resolver: &hir_def::resolver::Resolver<'db>, db: &'db dyn HirDatabase) -> Self {
+    fn new(lang_items: &'db LangItems, db: &'db dyn HirDatabase) -> Self {
         // Find and store the FunctionIds for Iterator::filter_map and Iterator::next
-        let (next_function_id, filter_map_function_id) = match LangItem::IteratorNext
-            .resolve_function(db, resolver.krate())
-        {
+        let (next_function_id, filter_map_function_id) = match lang_items.IteratorNext {
             Some(next_function_id) => (
                 Some(next_function_id),
                 match next_function_id.lookup(db).container {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index fb942e3..c70c6b6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -2,7 +2,9 @@
 
 use std::{cell::LazyCell, fmt};
 
-use hir_def::{EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
+use hir_def::{
+    EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId, attrs::AttrFlags,
+};
 use intern::sym;
 use rustc_pattern_analysis::{
     IndexVec, PatCx, PrivateUninhabitedField,
@@ -118,7 +120,7 @@ fn is_uninhabited(&self, ty: Ty<'db>) -> bool {
     /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
     fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
         let is_local = adt.krate(self.db) == self.module.krate();
-        !is_local && self.db.attrs(adt.into()).by_key(sym::non_exhaustive).exists()
+        !is_local && AttrFlags::query(self.db, adt.into()).contains(AttrFlags::NON_EXHAUSTIVE)
     }
 
     fn variant_id_for_adt(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 53524d6..6160962 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -42,7 +42,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> MissingUnsafe
 
     let mut res = MissingUnsafeResult { fn_is_unsafe: is_unsafe, ..MissingUnsafeResult::default() };
     let body = db.body(def);
-    let infer = db.infer(def);
+    let infer = InferenceResult::for_body(db, def);
     let mut callback = |diag| match diag {
         UnsafeDiagnostic::UnsafeOperation { node, inside_unsafe_block, reason } => {
             if inside_unsafe_block == InsideUnsafeBlock::No {
@@ -55,7 +55,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> MissingUnsafe
             }
         }
     };
-    let mut visitor = UnsafeVisitor::new(db, &infer, &body, def, &mut callback);
+    let mut visitor = UnsafeVisitor::new(db, infer, &body, def, &mut callback);
     visitor.walk_expr(body.body_expr);
 
     if !is_unsafe {
@@ -144,7 +144,7 @@ struct UnsafeVisitor<'db> {
     inside_assignment: bool,
     inside_union_destructure: bool,
     callback: &'db mut dyn FnMut(UnsafeDiagnostic),
-    def_target_features: TargetFeatures,
+    def_target_features: TargetFeatures<'db>,
     // FIXME: This needs to be the edition of the span of each call.
     edition: Edition,
     /// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when
@@ -162,7 +162,7 @@ fn new(
     ) -> Self {
         let resolver = def.resolver(db);
         let def_target_features = match def {
-            DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())),
+            DefWithBodyId::FunctionId(func) => TargetFeatures::from_fn(db, func),
             _ => TargetFeatures::default(),
         };
         let krate = resolver.module().krate();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index 6767bd0..c76b8dc 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -17,7 +17,7 @@
     hir::generics::{TypeOrConstParamData, TypeParamProvenance, WherePredicate},
     item_scope::ItemInNs,
     item_tree::FieldsShape,
-    lang_item::LangItem,
+    lang_item::LangItems,
     nameres::DefMap,
     signatures::VariantFields,
     type_ref::{
@@ -47,7 +47,7 @@
 use triomphe::Arc;
 
 use crate::{
-    CallableDefId, FnAbi, ImplTraitId, MemoryMap, TraitEnvironment, consteval,
+    CallableDefId, FnAbi, ImplTraitId, InferenceResult, MemoryMap, TraitEnvironment, consteval,
     db::{HirDatabase, InternedClosure, InternedCoroutine},
     generics::generics,
     layout::Layout,
@@ -61,7 +61,7 @@
         infer::{DbInternerInferExt, traits::ObligationCause},
     },
     primitive,
-    utils::{self, detect_variant_from_bytes},
+    utils::{detect_variant_from_bytes, fn_traits},
 };
 
 pub trait HirWrite: fmt::Write {
@@ -309,8 +309,7 @@ fn display_source_code<'a>(
         allow_opaque: bool,
     ) -> Result<String, DisplaySourceCodeError> {
         let mut result = String::new();
-        let interner =
-            DbInterner::new_with(db, Some(module_id.krate()), module_id.containing_block());
+        let interner = DbInterner::new_with(db, module_id.krate());
         match self.hir_fmt(&mut HirFormatter {
             db,
             interner,
@@ -392,6 +391,11 @@ pub fn edition(&self) -> Edition {
         self.display_target.edition
     }
 
+    #[inline]
+    pub fn lang_items(&self) -> &'db LangItems {
+        self.interner.lang_items()
+    }
+
     pub fn write_joined<T: HirDisplay<'db>>(
         &mut self,
         iter: impl IntoIterator<Item = T>,
@@ -540,11 +544,7 @@ pub enum ClosureStyle {
 impl<'db, T: HirDisplay<'db>> HirDisplayWrapper<'_, 'db, T> {
     pub fn write_to<F: HirWrite>(&self, f: &mut F) -> Result<(), HirDisplayError> {
         let krate = self.display_target.krate;
-        let block = match self.display_kind {
-            DisplayKind::SourceCode { target_module_id, .. } => target_module_id.containing_block(),
-            DisplayKind::Diagnostics | DisplayKind::Test => None,
-        };
-        let interner = DbInterner::new_with(self.db, Some(krate), block);
+        let interner = DbInterner::new_with(self.db, krate);
         self.t.hir_fmt(&mut HirFormatter {
             db: self.db,
             interner,
@@ -1102,7 +1102,7 @@ fn hir_fmt(
                             bounds.iter().any(|bound| match bound.skip_binder() {
                                 ExistentialPredicate::Trait(trait_ref) => {
                                     let trait_ = trait_ref.def_id.0;
-                                    fn_traits(db, trait_).any(|it| it == trait_)
+                                    fn_traits(f.lang_items()).any(|it| it == trait_)
                                 }
                                 _ => false,
                             });
@@ -1146,7 +1146,7 @@ fn hir_fmt(
                             let contains_impl_fn = bounds().any(|bound| {
                                 if let ClauseKind::Trait(trait_ref) = bound.kind().skip_binder() {
                                     let trait_ = trait_ref.def_id().0;
-                                    fn_traits(db, trait_).any(|it| it == trait_)
+                                    fn_traits(f.lang_items()).any(|it| it == trait_)
                                 } else {
                                     false
                                 }
@@ -1394,7 +1394,7 @@ fn hir_fmt(
                 if let Some(sig) = sig {
                     let sig = sig.skip_binder();
                     let InternedClosure(def, _) = db.lookup_intern_closure(id);
-                    let infer = db.infer(def);
+                    let infer = InferenceResult::for_body(db, def);
                     let (_, kind) = infer.closure_info(id);
                     match f.closure_style {
                         ClosureStyle::ImplFn => write!(f, "impl {kind:?}(")?,
@@ -1588,8 +1588,7 @@ fn hir_fmt(
                         ..
                     }
                     | hir_def::hir::Expr::Async { .. } => {
-                        let future_trait =
-                            LangItem::Future.resolve_trait(db, owner.module(db).krate());
+                        let future_trait = f.lang_items().Future;
                         let output = future_trait.and_then(|t| {
                             t.trait_items(db)
                                 .associated_type_by_name(&Name::new_symbol_root(sym::Output))
@@ -1799,11 +1798,6 @@ fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError>
     }
 }
 
-fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator<Item = TraitId> + '_ {
-    let krate = trait_.lookup(db).container.krate();
-    utils::fn_traits(db, krate)
-}
-
 #[derive(Clone, Copy, PartialEq, Eq)]
 pub enum SizedByDefault {
     NotSized,
@@ -1815,7 +1809,7 @@ fn is_sized_trait(self, trait_: TraitId, db: &dyn DefDatabase) -> bool {
         match self {
             Self::NotSized => false,
             Self::Sized { anchor } => {
-                let sized_trait = LangItem::Sized.resolve_trait(db, anchor);
+                let sized_trait = hir_def::lang_item::lang_items(db, anchor).Sized;
                 Some(trait_) == sized_trait
             }
         }
@@ -1868,7 +1862,7 @@ fn write_bounds_like_dyn_trait<'db>(
                     }
                 }
                 if !is_fn_trait {
-                    is_fn_trait = fn_traits(f.db, trait_).any(|it| it == trait_);
+                    is_fn_trait = fn_traits(f.lang_items()).any(|it| it == trait_);
                 }
                 if !is_fn_trait && angle_open {
                     write!(f, ">")?;
@@ -1966,7 +1960,7 @@ fn write_bounds_like_dyn_trait<'db>(
         write!(f, ">")?;
     }
     if let SizedByDefault::Sized { anchor } = default_sized {
-        let sized_trait = LangItem::Sized.resolve_trait(f.db, anchor);
+        let sized_trait = hir_def::lang_item::lang_items(f.db, anchor).Sized;
         if !is_sized {
             if !first {
                 write!(f, " + ")?;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
index aebb6de..d76de4b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
@@ -1,29 +1,29 @@
 //! Utilities for computing drop info about types.
 
-use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags};
+use hir_def::{AdtId, signatures::StructFlags};
 use rustc_hash::FxHashSet;
 use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike};
 use stdx::never;
 use triomphe::Arc;
 
 use crate::{
-    TraitEnvironment, consteval,
-    db::HirDatabase,
+    InferenceResult, TraitEnvironment, consteval,
     method_resolution::TraitImpls,
     next_solver::{
-        SimplifiedType, Ty, TyKind,
+        DbInterner, SimplifiedType, Ty, TyKind,
         infer::{InferCtxt, traits::ObligationCause},
         obligation_ctxt::ObligationCtxt,
     },
 };
 
-fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
+fn has_destructor(interner: DbInterner<'_>, adt: AdtId) -> bool {
+    let db = interner.db;
     let module = match adt {
         AdtId::EnumId(id) => db.lookup_intern_enum(id).container,
         AdtId::StructId(id) => db.lookup_intern_struct(id).container,
         AdtId::UnionId(id) => db.lookup_intern_union(id).container,
     };
-    let Some(drop_trait) = LangItem::Drop.resolve_trait(db, module.krate()) else {
+    let Some(drop_trait) = interner.lang_items().Drop else {
         return false;
     };
     let impls = match module.containing_block() {
@@ -73,7 +73,7 @@ fn has_drop_glue_impl<'db>(
     match ty.kind() {
         TyKind::Adt(adt_def, subst) => {
             let adt_id = adt_def.def_id().0;
-            if has_destructor(db, adt_id) {
+            if has_destructor(infcx.interner, adt_id) {
                 return DropGlue::HasDropGlue;
             }
             match adt_id {
@@ -137,7 +137,7 @@ fn has_drop_glue_impl<'db>(
         TyKind::Slice(ty) => has_drop_glue_impl(infcx, ty, env, visited),
         TyKind::Closure(closure_id, subst) => {
             let owner = db.lookup_intern_closure(closure_id.0).0;
-            let infer = db.infer(owner);
+            let infer = InferenceResult::for_body(db, owner);
             let (captures, _) = infer.closure_info(closure_id.0);
             let env = db.trait_environment_for_body(owner);
             captures
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs
index 1bd5981..4c1590a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs
@@ -5,7 +5,7 @@
 use hir_def::{
     AssocItemId, ConstId, CrateRootModuleId, FunctionId, GenericDefId, HasModule, TraitId,
     TypeAliasId, TypeOrConstParamId, TypeParamId, hir::generics::LocalTypeOrConstParamId,
-    lang_item::LangItem, signatures::TraitFlags,
+    signatures::TraitFlags,
 };
 use rustc_hash::FxHashSet;
 use rustc_type_ir::{
@@ -53,7 +53,7 @@ pub fn dyn_compatibility(
     db: &dyn HirDatabase,
     trait_: TraitId,
 ) -> Option<DynCompatibilityViolation> {
-    let interner = DbInterner::new_with(db, Some(trait_.krate(db)), None);
+    let interner = DbInterner::new_no_crate(db);
     for super_trait in elaborate::supertrait_def_ids(interner, trait_.into()) {
         if let Some(v) = db.dyn_compatibility_of_trait(super_trait.0) {
             return if super_trait.0 == trait_ {
@@ -75,7 +75,7 @@ pub fn dyn_compatibility_with_callback<F>(
 where
     F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
 {
-    let interner = DbInterner::new_with(db, Some(trait_.krate(db)), None);
+    let interner = DbInterner::new_no_crate(db);
     for super_trait in elaborate::supertrait_def_ids(interner, trait_.into()).skip(1) {
         if db.dyn_compatibility_of_trait(super_trait.0).is_some() {
             cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?;
@@ -131,11 +131,11 @@ pub fn dyn_compatibility_of_trait_query(
 
 pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> bool {
     let krate = def.module(db).krate();
-    let Some(sized) = LangItem::Sized.resolve_trait(db, krate) else {
+    let interner = DbInterner::new_with(db, krate);
+    let Some(sized) = interner.lang_items().Sized else {
         return false;
     };
 
-    let interner = DbInterner::new_with(db, Some(krate), None);
     let predicates = GenericPredicates::query_explicit(db, def);
     // FIXME: We should use `explicit_predicates_of` here, which hasn't been implemented to
     // rust-analyzer yet
@@ -234,34 +234,34 @@ fn visit_ty(
             &mut self,
             ty: <DbInterner<'db> as rustc_type_ir::Interner>::Ty,
         ) -> Self::Result {
-            let interner = DbInterner::new_with(self.db, None, None);
+            let interner = DbInterner::new_no_crate(self.db);
             match ty.kind() {
                 rustc_type_ir::TyKind::Param(param) if param.index == 0 => ControlFlow::Break(()),
                 rustc_type_ir::TyKind::Param(_) => ControlFlow::Continue(()),
-                rustc_type_ir::TyKind::Alias(AliasTyKind::Projection, proj) => match self
-                    .allow_self_projection
-                {
-                    AllowSelfProjection::Yes => {
-                        let trait_ = proj.trait_def_id(DbInterner::new_with(self.db, None, None));
-                        let trait_ = match trait_ {
-                            SolverDefId::TraitId(id) => id,
-                            _ => unreachable!(),
-                        };
-                        if self.super_traits.is_none() {
-                            self.super_traits = Some(
-                                elaborate::supertrait_def_ids(interner, self.trait_.into())
-                                    .map(|super_trait| super_trait.0)
-                                    .collect(),
-                            )
+                rustc_type_ir::TyKind::Alias(AliasTyKind::Projection, proj) => {
+                    match self.allow_self_projection {
+                        AllowSelfProjection::Yes => {
+                            let trait_ = proj.trait_def_id(interner);
+                            let trait_ = match trait_ {
+                                SolverDefId::TraitId(id) => id,
+                                _ => unreachable!(),
+                            };
+                            if self.super_traits.is_none() {
+                                self.super_traits = Some(
+                                    elaborate::supertrait_def_ids(interner, self.trait_.into())
+                                        .map(|super_trait| super_trait.0)
+                                        .collect(),
+                                )
+                            }
+                            if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) {
+                                ControlFlow::Continue(())
+                            } else {
+                                ty.super_visit_with(self)
+                            }
                         }
-                        if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) {
-                            ControlFlow::Continue(())
-                        } else {
-                            ty.super_visit_with(self)
-                        }
+                        AllowSelfProjection::No => ty.super_visit_with(self),
                     }
-                    AllowSelfProjection::No => ty.super_visit_with(self),
-                },
+                }
                 _ => ty.super_visit_with(self),
             }
         }
@@ -401,7 +401,8 @@ fn receiver_is_dispatchable<'db>(
 ) -> bool {
     let sig = sig.instantiate_identity();
 
-    let interner: DbInterner<'_> = DbInterner::new_with(db, Some(trait_.krate(db)), None);
+    let module = trait_.module(db);
+    let interner = DbInterner::new_with(db, module.krate());
     let self_param_id = TypeParamId::from_unchecked(TypeOrConstParamId {
         parent: trait_.into(),
         local_id: LocalTypeOrConstParamId::from_raw(la_arena::RawIdx::from_u32(0)),
@@ -419,16 +420,13 @@ fn receiver_is_dispatchable<'db>(
         return false;
     };
 
-    let krate = func.module(db).krate();
-    let traits = (
-        LangItem::Unsize.resolve_trait(db, krate),
-        LangItem::DispatchFromDyn.resolve_trait(db, krate),
-    );
+    let lang_items = interner.lang_items();
+    let traits = (lang_items.Unsize, lang_items.DispatchFromDyn);
     let (Some(unsize_did), Some(dispatch_from_dyn_did)) = traits else {
         return false;
     };
 
-    let meta_sized_did = LangItem::MetaSized.resolve_trait(db, krate);
+    let meta_sized_did = lang_items.MetaSized;
     let Some(meta_sized_did) = meta_sized_did else {
         return false;
     };
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 15eb355..ab17379 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -37,10 +37,10 @@
     ItemContainerId, LocalFieldId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId,
     expr_store::{Body, ExpressionStore, HygieneId, path::Path},
     hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId},
-    lang_item::{LangItem, LangItemTarget, lang_item},
+    lang_item::LangItems,
     layout::Integer,
     resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
-    signatures::{ConstSignature, StaticSignature},
+    signatures::{ConstSignature, EnumSignature, StaticSignature},
     type_ref::{ConstRef, LifetimeRefId, TypeRef, TypeRefId},
 };
 use hir_expand::{mod_path::ModPath, name::Name};
@@ -54,9 +54,10 @@
     AliasTyKind, TypeFoldable,
     inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _},
 };
+use salsa::Update;
 use span::Edition;
 use stdx::never;
-use triomphe::Arc;
+use thin_vec::ThinVec;
 
 use crate::{
     ImplTraitId, IncorrectGenericsLenKind, PathLoweringDiagnostic, TargetFeatures,
@@ -94,7 +95,7 @@
 pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
 
 /// The entry point of type inference.
-pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult<'_>> {
+fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_> {
     let _p = tracing::info_span!("infer_query").entered();
     let resolver = def.resolver(db);
     let body = db.body(def);
@@ -107,7 +108,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
         DefWithBodyId::ConstId(c) => ctx.collect_const(c, &db.const_signature(c)),
         DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)),
         DefWithBodyId::VariantId(v) => {
-            ctx.return_ty = match db.enum_signature(v.lookup(db).parent).variant_body_type() {
+            ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) {
                 hir_def::layout::IntegerType::Pointer(signed) => match signed {
                     true => ctx.types.isize,
                     false => ctx.types.usize,
@@ -158,17 +159,14 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
 
     ctx.handle_opaque_type_uses();
 
-    Arc::new(ctx.resolve_all())
+    ctx.resolve_all()
 }
 
-pub(crate) fn infer_cycle_result(
-    db: &dyn HirDatabase,
-    _: DefWithBodyId,
-) -> Arc<InferenceResult<'_>> {
-    Arc::new(InferenceResult {
+fn infer_cycle_result(db: &dyn HirDatabase, _: DefWithBodyId) -> InferenceResult<'_> {
+    InferenceResult {
         has_errors: true,
-        ..InferenceResult::new(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed))
-    })
+        ..InferenceResult::new(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed))
+    }
 }
 
 /// Binding modes inferred for patterns.
@@ -198,7 +196,7 @@ pub enum InferenceTyDiagnosticSource {
     Signature,
 }
 
-#[derive(Debug, PartialEq, Eq, Clone)]
+#[derive(Debug, PartialEq, Eq, Clone, Update)]
 pub enum InferenceDiagnostic<'db> {
     NoSuchField {
         field: ExprOrPatId,
@@ -292,7 +290,7 @@ pub enum InferenceDiagnostic<'db> {
 }
 
 /// A mismatch between an expected and an inferred type.
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+#[derive(Clone, PartialEq, Eq, Debug, Hash, Update)]
 pub struct TypeMismatch<'db> {
     pub expected: Ty<'db>,
     pub actual: Ty<'db>,
@@ -338,7 +336,7 @@ pub struct TypeMismatch<'db> {
 ///    At some point, of course, `Box` should move out of the compiler, in which
 ///    case this is analogous to transforming a struct. E.g., Box<[i32; 4]> ->
 ///    Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
-#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
+#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, Update)]
 pub struct Adjustment<'db> {
     #[type_visitable(ignore)]
     #[type_foldable(identity)]
@@ -475,9 +473,10 @@ pub enum PointerCast {
 /// When you add a field that stores types (including `Substitution` and the like), don't forget
 /// `resolve_completely()`'ing  them in `InferenceContext::resolve_all()`. Inference variables must
 /// not appear in the final inference result.
-#[derive(Clone, PartialEq, Eq, Debug)]
+#[derive(Clone, PartialEq, Eq, Debug, Update)]
 pub struct InferenceResult<'db> {
     /// For each method call expr, records the function it resolves to.
+    #[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
     method_resolutions: FxHashMap<ExprId, (FunctionId, GenericArgs<'db>)>,
     /// For each field access expr, records the field it resolves to.
     field_resolutions: FxHashMap<ExprId, Either<FieldId, TupleFieldId>>,
@@ -488,28 +487,41 @@ pub struct InferenceResult<'db> {
     /// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
     /// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
     /// that which allows us to resolve a [`TupleFieldId`]s type.
-    tuple_field_access_types: FxHashMap<TupleId, Tys<'db>>,
-    /// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
-    diagnostics: Vec<InferenceDiagnostic<'db>>,
+    #[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))]
+    tuple_field_access_types: ThinVec<Tys<'db>>,
+
+    #[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
     pub(crate) type_of_expr: ArenaMap<ExprId, Ty<'db>>,
     /// For each pattern record the type it resolves to.
     ///
     /// **Note**: When a pattern type is resolved it may still contain
     /// unresolved or missing subpatterns or subpatterns of mismatched types.
+    #[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))]
     pub(crate) type_of_pat: ArenaMap<PatId, Ty<'db>>,
+    #[update(unsafe(with(crate::utils::unsafe_update_eq /* binding id is technically update */)))]
     pub(crate) type_of_binding: ArenaMap<BindingId, Ty<'db>>,
-    pub(crate) type_of_type_placeholder: ArenaMap<TypeRefId, Ty<'db>>,
+    #[update(unsafe(with(crate::utils::unsafe_update_eq /* type ref id is technically update */)))]
+    pub(crate) type_of_type_placeholder: FxHashMap<TypeRefId, Ty<'db>>,
     pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, Ty<'db>>,
-    pub(crate) type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>,
+
+    pub(crate) type_mismatches: Option<Box<FxHashMap<ExprOrPatId, TypeMismatch<'db>>>>,
     /// Whether there are any type-mismatching errors in the result.
     // FIXME: This isn't as useful as initially thought due to us falling back placeholders to
     // `TyKind::Error`.
     // Which will then mark this field.
     pub(crate) has_errors: bool,
+    /// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
+    #[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))]
+    diagnostics: ThinVec<InferenceDiagnostic<'db>>,
+
     /// Interned `Error` type to return references to.
     // FIXME: Remove this.
     error_ty: Ty<'db>,
+
+    #[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
+    pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment<'db>]>>,
     /// Stores the types which were implicitly dereferenced in pattern binding modes.
+    #[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))]
     pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty<'db>>>,
     /// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
     ///
@@ -525,13 +537,22 @@ pub struct InferenceResult<'db> {
     /// ```
     /// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
     pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
-    pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment<'db>]>>,
+
     pub(crate) closure_info: FxHashMap<InternedClosureId, (Vec<CapturedItem<'db>>, FnTrait)>,
     // FIXME: remove this field
     pub mutated_bindings_in_closure: FxHashSet<BindingId>,
+
     pub(crate) coercion_casts: FxHashSet<ExprId>,
 }
 
+#[salsa::tracked]
+impl<'db> InferenceResult<'db> {
+    #[salsa::tracked(returns(ref), cycle_result = infer_cycle_result)]
+    pub fn for_body(db: &'db dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'db> {
+        infer_query(db, def)
+    }
+}
+
 impl<'db> InferenceResult<'db> {
     fn new(error_ty: Ty<'db>) -> Self {
         Self {
@@ -595,25 +616,31 @@ pub fn assoc_resolutions_for_expr_or_pat(
         }
     }
     pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch<'db>> {
-        self.type_mismatches.get(&expr.into())
+        self.type_mismatches.as_deref()?.get(&expr.into())
     }
     pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch<'db>> {
-        self.type_mismatches.get(&pat.into())
+        self.type_mismatches.as_deref()?.get(&pat.into())
     }
     pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch<'db>)> {
-        self.type_mismatches.iter().map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch))
+        self.type_mismatches
+            .as_deref()
+            .into_iter()
+            .flatten()
+            .map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch))
     }
     pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch<'db>)> {
-        self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
-            ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
-            _ => None,
-        })
+        self.type_mismatches.as_deref().into_iter().flatten().filter_map(
+            |(expr_or_pat, mismatch)| match *expr_or_pat {
+                ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
+                _ => None,
+            },
+        )
     }
     pub fn placeholder_types(&self) -> impl Iterator<Item = (TypeRefId, &Ty<'db>)> {
-        self.type_of_type_placeholder.iter()
+        self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty))
     }
     pub fn type_of_type_placeholder(&self, type_ref: TypeRefId) -> Option<Ty<'db>> {
-        self.type_of_type_placeholder.get(type_ref).copied()
+        self.type_of_type_placeholder.get(&type_ref).copied()
     }
     pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem<'db>>, FnTrait) {
         self.closure_info.get(&closure).unwrap()
@@ -659,7 +686,7 @@ pub fn diagnostics(&self) -> &[InferenceDiagnostic<'db>] {
     }
 
     pub fn tuple_field_access_type(&self, id: TupleId) -> Tys<'db> {
-        self.tuple_field_access_types[&id]
+        self.tuple_field_access_types[id.0 as usize]
     }
 
     pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty<'db>]> {
@@ -816,11 +843,12 @@ pub(crate) struct InferenceContext<'body, 'db> {
     /// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext
     /// and resolve the path via its methods. This will ensure proper error reporting.
     pub(crate) resolver: Resolver<'db>,
-    target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
+    target_features: OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>,
     pub(crate) unstable_features: MethodResolutionUnstableFeatures,
     pub(crate) edition: Edition,
     pub(crate) generic_def: GenericDefId,
     pub(crate) table: unify::InferenceTable<'db>,
+    pub(crate) lang_items: &'db LangItems,
     /// The traits in scope, disregarding block modules. This is used for caching purposes.
     traits_in_scope: FxHashSet<TraitId>,
     pub(crate) result: InferenceResult<'db>,
@@ -926,6 +954,7 @@ fn new(
             unstable_features: MethodResolutionUnstableFeatures::from_def_map(
                 resolver.top_level_def_map(),
             ),
+            lang_items: table.interner().lang_items(),
             edition: resolver.krate().data(db).edition,
             table,
             tuple_field_accesses_rev: Default::default(),
@@ -960,12 +989,10 @@ fn krate(&self) -> Crate {
         self.resolver.krate()
     }
 
-    fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
+    fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
         let (target_features, target_feature_is_safe) = self.target_features.get_or_init(|| {
             let target_features = match self.owner {
-                DefWithBodyId::FunctionId(id) => {
-                    TargetFeatures::from_attrs(&self.db.attrs(id.into()))
-                }
+                DefWithBodyId::FunctionId(id) => TargetFeatures::from_fn(self.db, id),
                 _ => TargetFeatures::default(),
             };
             let target_feature_is_safe = match &self.krate().workspace_data(self.db).target {
@@ -1063,13 +1090,14 @@ fn resolve_all(self) -> InferenceResult<'db> {
         type_of_type_placeholder.shrink_to_fit();
         type_of_opaque.shrink_to_fit();
 
-        *has_errors |= !type_mismatches.is_empty();
-
-        for mismatch in (*type_mismatches).values_mut() {
-            mismatch.expected = table.resolve_completely(mismatch.expected);
-            mismatch.actual = table.resolve_completely(mismatch.actual);
+        if let Some(type_mismatches) = type_mismatches {
+            *has_errors = true;
+            for mismatch in type_mismatches.values_mut() {
+                mismatch.expected = table.resolve_completely(mismatch.expected);
+                mismatch.actual = table.resolve_completely(mismatch.actual);
+            }
+            type_mismatches.shrink_to_fit();
         }
-        type_mismatches.shrink_to_fit();
         diagnostics.retain_mut(|diagnostic| {
             use InferenceDiagnostic::*;
             match diagnostic {
@@ -1121,9 +1149,8 @@ fn resolve_all(self) -> InferenceResult<'db> {
         pat_adjustments.shrink_to_fit();
         result.tuple_field_access_types = tuple_field_accesses_rev
             .into_iter()
-            .enumerate()
-            .map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst)))
-            .inspect(|(_, subst)| {
+            .map(|subst| table.resolve_completely(subst))
+            .inspect(|subst| {
                 *has_errors = *has_errors || subst.iter().any(|ty| ty.references_non_lt_error());
             })
             .collect();
@@ -1520,7 +1547,10 @@ fn demand_eqtype(
     ) -> Result<(), ()> {
         let result = self.demand_eqtype_fixme_no_diag(expected, actual);
         if result.is_err() {
-            self.result.type_mismatches.insert(id, TypeMismatch { expected, actual });
+            self.result
+                .type_mismatches
+                .get_or_insert_default()
+                .insert(id, TypeMismatch { expected, actual });
         }
         result
     }
@@ -1837,21 +1867,13 @@ fn resolve_variant_on_alias(
         }
     }
 
-    fn resolve_lang_item(&self, item: LangItem) -> Option<LangItemTarget> {
-        let krate = self.resolver.krate();
-        lang_item(self.db, krate, item)
-    }
-
     fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> {
         trait_.trait_items(self.db).associated_type_by_name(&Name::new_symbol_root(sym::Output))
     }
 
     fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
-        let ItemContainerId::TraitId(trait_) = self
-            .resolve_lang_item(LangItem::IntoFutureIntoFuture)?
-            .as_function()?
-            .lookup(self.db)
-            .container
+        let ItemContainerId::TraitId(trait_) =
+            self.lang_items.IntoFutureIntoFuture?.lookup(self.db).container
         else {
             return None;
         };
@@ -1859,42 +1881,42 @@ fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
     }
 
     fn resolve_boxed_box(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(LangItem::OwnedBox)?.as_struct()?;
+        let struct_ = self.lang_items.OwnedBox?;
         Some(struct_.into())
     }
 
     fn resolve_range_full(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(LangItem::RangeFull)?.as_struct()?;
+        let struct_ = self.lang_items.RangeFull?;
         Some(struct_.into())
     }
 
     fn resolve_range(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(LangItem::Range)?.as_struct()?;
+        let struct_ = self.lang_items.Range?;
         Some(struct_.into())
     }
 
     fn resolve_range_inclusive(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(LangItem::RangeInclusiveStruct)?.as_struct()?;
+        let struct_ = self.lang_items.RangeInclusiveStruct?;
         Some(struct_.into())
     }
 
     fn resolve_range_from(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(LangItem::RangeFrom)?.as_struct()?;
+        let struct_ = self.lang_items.RangeFrom?;
         Some(struct_.into())
     }
 
     fn resolve_range_to(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(LangItem::RangeTo)?.as_struct()?;
+        let struct_ = self.lang_items.RangeTo?;
         Some(struct_.into())
     }
 
     fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(LangItem::RangeToInclusive)?.as_struct()?;
+        let struct_ = self.lang_items.RangeToInclusive?;
         Some(struct_.into())
     }
 
     fn resolve_va_list(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?;
+        let struct_ = self.lang_items.VaList?;
         Some(struct_.into())
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
index 54a06eb..89ebd2b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -7,7 +7,6 @@
 use hir_def::{
     TraitId,
     hir::{ClosureKind, ExprId, PatId},
-    lang_item::LangItem,
     type_ref::TypeRefId,
 };
 use rustc_type_ir::{
@@ -220,11 +219,12 @@ pub(super) fn infer_closure(
     }
 
     fn fn_trait_kind_from_def_id(&self, trait_id: TraitId) -> Option<rustc_type_ir::ClosureKind> {
-        let lang_item = self.db.lang_attr(trait_id.into())?;
-        match lang_item {
-            LangItem::Fn => Some(rustc_type_ir::ClosureKind::Fn),
-            LangItem::FnMut => Some(rustc_type_ir::ClosureKind::FnMut),
-            LangItem::FnOnce => Some(rustc_type_ir::ClosureKind::FnOnce),
+        match trait_id {
+            _ if self.lang_items.Fn == Some(trait_id) => Some(rustc_type_ir::ClosureKind::Fn),
+            _ if self.lang_items.FnMut == Some(trait_id) => Some(rustc_type_ir::ClosureKind::FnMut),
+            _ if self.lang_items.FnOnce == Some(trait_id) => {
+                Some(rustc_type_ir::ClosureKind::FnOnce)
+            }
             _ => None,
         }
     }
@@ -233,11 +233,14 @@ fn async_fn_trait_kind_from_def_id(
         &self,
         trait_id: TraitId,
     ) -> Option<rustc_type_ir::ClosureKind> {
-        let lang_item = self.db.lang_attr(trait_id.into())?;
-        match lang_item {
-            LangItem::AsyncFn => Some(rustc_type_ir::ClosureKind::Fn),
-            LangItem::AsyncFnMut => Some(rustc_type_ir::ClosureKind::FnMut),
-            LangItem::AsyncFnOnce => Some(rustc_type_ir::ClosureKind::FnOnce),
+        match trait_id {
+            _ if self.lang_items.AsyncFn == Some(trait_id) => Some(rustc_type_ir::ClosureKind::Fn),
+            _ if self.lang_items.AsyncFnMut == Some(trait_id) => {
+                Some(rustc_type_ir::ClosureKind::FnMut)
+            }
+            _ if self.lang_items.AsyncFnOnce == Some(trait_id) => {
+                Some(rustc_type_ir::ClosureKind::FnOnce)
+            }
             _ => None,
         }
     }
@@ -433,21 +436,20 @@ fn deduce_sig_from_projection(
         projection: PolyProjectionPredicate<'db>,
     ) -> Option<PolyFnSig<'db>> {
         let SolverDefId::TypeAliasId(def_id) = projection.item_def_id() else { unreachable!() };
-        let lang_item = self.db.lang_attr(def_id.into());
 
         // For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits,
         // for closures and async closures, respectively.
         match closure_kind {
-            ClosureKind::Closure if lang_item == Some(LangItem::FnOnceOutput) => {
+            ClosureKind::Closure if Some(def_id) == self.lang_items.FnOnceOutput => {
                 self.extract_sig_from_projection(projection)
             }
-            ClosureKind::Async if lang_item == Some(LangItem::AsyncFnOnceOutput) => {
+            ClosureKind::Async if Some(def_id) == self.lang_items.AsyncFnOnceOutput => {
                 self.extract_sig_from_projection(projection)
             }
             // It's possible we've passed the closure to a (somewhat out-of-fashion)
             // `F: FnOnce() -> Fut, Fut: Future<Output = T>` style bound. Let's still
             // guide inference here, since it's beneficial for the user.
-            ClosureKind::Async if lang_item == Some(LangItem::FnOnceOutput) => {
+            ClosureKind::Async if Some(def_id) == self.lang_items.FnOnceOutput => {
                 self.extract_sig_from_projection_and_future_bound(projection)
             }
             _ => None,
@@ -538,7 +540,7 @@ fn extract_sig_from_projection_and_future_bound(
                 && let ret_projection = bound.predicate.kind().rebind(ret_projection)
                 && let Some(ret_projection) = ret_projection.no_bound_vars()
                 && let SolverDefId::TypeAliasId(assoc_type) = ret_projection.def_id()
-                && self.db.lang_attr(assoc_type.into()) == Some(LangItem::FutureOutput)
+                && Some(assoc_type) == self.lang_items.FutureOutput
             {
                 return_ty = Some(ret_projection.term.expect_type());
                 break;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs
index 944b359..251e7f7 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs
@@ -31,10 +31,10 @@
 
 // The below functions handle capture and closure kind (Fn, FnMut, ..)
 
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
 pub(crate) struct HirPlace<'db> {
     pub(crate) local: BindingId,
-    pub(crate) projections: Vec<ProjectionElem<Infallible, Ty<'db>>>,
+    pub(crate) projections: Vec<ProjectionElem<'db, Infallible>>,
 }
 
 impl<'db> HirPlace<'db> {
@@ -76,7 +76,7 @@ pub enum CaptureKind {
     ByValue,
 }
 
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq, salsa::Update)]
 pub struct CapturedItem<'db> {
     pub(crate) place: HirPlace<'db>,
     pub(crate) kind: CaptureKind,
@@ -87,6 +87,7 @@ pub struct CapturedItem<'db> {
     /// copy all captures of the inner closure to the outer closure, and then we may
     /// truncate them, and we want the correct span to be reported.
     span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
+    #[update(unsafe(with(crate::utils::unsafe_update_eq)))]
     pub(crate) ty: EarlyBinder<'db, Ty<'db>>,
 }
 
@@ -101,7 +102,7 @@ pub fn has_field_projections(&self) -> bool {
     }
 
     pub fn ty(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         self.ty.instantiate(interner, subst.split_closure_args_untupled().parent_args)
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
index 4acf964..df24148 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -37,11 +37,10 @@
 
 use hir_def::{
     CallableDefId,
+    attrs::AttrFlags,
     hir::{ExprId, ExprOrPatId},
-    lang_item::LangItem,
     signatures::FunctionSignature,
 };
-use intern::sym;
 use rustc_ast_ir::Mutability;
 use rustc_type_ir::{
     BoundVar, DebruijnIndex, TyVid, TypeAndMut, TypeFoldable, TypeFolder, TypeSuperFoldable,
@@ -79,7 +78,7 @@
 trait CoerceDelegate<'db> {
     fn infcx(&self) -> &InferCtxt<'db>;
     fn env(&self) -> &TraitEnvironment<'db>;
-    fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget);
+    fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget);
 
     fn set_diverging(&mut self, diverging_ty: Ty<'db>);
 
@@ -612,10 +611,8 @@ fn coerce_unsized(&mut self, source: Ty<'db>, target: Ty<'db>) -> CoerceResult<'
             return Err(TypeError::Mismatch);
         }
 
-        let traits = (
-            LangItem::Unsize.resolve_trait(self.db(), self.env().krate),
-            LangItem::CoerceUnsized.resolve_trait(self.db(), self.env().krate),
-        );
+        let lang_items = self.interner().lang_items();
+        let traits = (lang_items.Unsize, lang_items.CoerceUnsized);
         let (Some(unsize_did), Some(coerce_unsized_did)) = traits else {
             debug!("missing Unsize or CoerceUnsized traits");
             return Err(TypeError::Mismatch);
@@ -855,14 +852,14 @@ fn coerce_from_fn_item(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
                             return Err(TypeError::IntrinsicCast);
                         }
 
-                        let attrs = self.db().attrs(def_id.into());
-                        if attrs.by_key(sym::rustc_force_inline).exists() {
+                        let attrs = AttrFlags::query(self.db(), def_id.into());
+                        if attrs.contains(AttrFlags::RUSTC_FORCE_INLINE) {
                             return Err(TypeError::ForceInlineCast);
                         }
 
-                        if b_hdr.safety.is_safe() && attrs.by_key(sym::target_feature).exists() {
+                        if b_hdr.safety.is_safe() && attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
                             let fn_target_features =
-                                TargetFeatures::from_attrs_no_implications(&attrs);
+                                TargetFeatures::from_fn_no_implications(self.db(), def_id);
                             // Allow the coercion if the current function has all the features that would be
                             // needed to call the coercee safely.
                             let (target_features, target_feature_is_safe) =
@@ -981,8 +978,9 @@ fn infcx(&self) -> &InferCtxt<'db> {
     fn env(&self) -> &TraitEnvironment<'db> {
         &self.0.table.trait_env
     }
+
     #[inline]
-    fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
+    fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
         self.0.target_features()
     }
 
@@ -1075,7 +1073,7 @@ fn try_find_coercion_lub(
 
         let is_force_inline = |ty: Ty<'db>| {
             if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(did)), _) = ty.kind() {
-                self.db.attrs(did.into()).by_key(sym::rustc_force_inline).exists()
+                AttrFlags::query(self.db, did.into()).contains(AttrFlags::RUSTC_FORCE_INLINE)
             } else {
                 false
             }
@@ -1514,7 +1512,7 @@ pub(crate) fn coerce_inner(
 
                 self.final_ty = Some(icx.types.error);
 
-                icx.result.type_mismatches.insert(
+                icx.result.type_mismatches.get_or_insert_default().insert(
                     expression.into(),
                     if label_expression_as_expected {
                         TypeMismatch { expected: found, actual: expected }
@@ -1551,7 +1549,7 @@ pub fn could_coerce<'db>(
 struct HirCoercionDelegate<'a, 'db> {
     infcx: &'a InferCtxt<'db>,
     env: &'a TraitEnvironment<'db>,
-    target_features: &'a TargetFeatures,
+    target_features: &'a TargetFeatures<'db>,
 }
 
 impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> {
@@ -1563,7 +1561,7 @@ fn infcx(&self) -> &InferCtxt<'db> {
     fn env(&self) -> &TraitEnvironment<'db> {
         self.env
     }
-    fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
+    fn target_features(&self) -> (&TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
         (self.target_features, TargetFeatureIsSafeInTarget::No)
     }
     fn set_diverging(&mut self, _diverging_ty: Ty<'db>) {}
@@ -1578,7 +1576,7 @@ fn coerce<'db>(
     env: Arc<TraitEnvironment<'db>>,
     tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
 ) -> Result<(Vec<Adjustment<'db>>, Ty<'db>), TypeError<DbInterner<'db>>> {
-    let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+    let interner = DbInterner::new_with(db, env.krate);
     let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
     let ((ty1_with_vars, ty2_with_vars), vars) = infcx.instantiate_canonical(tys);
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs
index 844eb02..0eb7a2f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs
@@ -11,6 +11,7 @@
 use hir_def::expr_store::path::Path;
 use hir_def::{hir::ExprOrPatId, resolver::Resolver};
 use la_arena::{Idx, RawIdx};
+use thin_vec::ThinVec;
 
 use crate::{
     InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringDiagnostic,
@@ -24,7 +25,7 @@
 // to our resolver and so we cannot have mutable reference, but we really want to have
 // ability to dispatch diagnostics during this work otherwise the code becomes a complete mess.
 #[derive(Debug, Default, Clone)]
-pub(super) struct Diagnostics<'db>(RefCell<Vec<InferenceDiagnostic<'db>>>);
+pub(super) struct Diagnostics<'db>(RefCell<ThinVec<InferenceDiagnostic<'db>>>);
 
 impl<'db> Diagnostics<'db> {
     pub(super) fn push(&self, diagnostic: InferenceDiagnostic<'db>) {
@@ -41,7 +42,7 @@ fn push_ty_diagnostics(
         );
     }
 
-    pub(super) fn finish(self) -> Vec<InferenceDiagnostic<'db>> {
+    pub(super) fn finish(self) -> ThinVec<InferenceDiagnostic<'db>> {
         self.0.into_inner()
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 7487660..01508b0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -10,7 +10,6 @@
         Array, AsmOperand, AsmOptions, BinaryOp, BindingAnnotation, Expr, ExprId, ExprOrPatId,
         LabelId, Literal, Pat, PatId, Statement, UnaryOp,
     },
-    lang_item::{LangItem, LangItemTarget},
     resolver::ValueNs,
 };
 use hir_def::{FunctionId, hir::ClosureKind};
@@ -71,6 +70,7 @@ pub(crate) fn infer_expr(
             if !could_unify {
                 self.result
                     .type_mismatches
+                    .get_or_insert_default()
                     .insert(tgt_expr.into(), TypeMismatch { expected: expected_ty, actual: ty });
             }
         }
@@ -100,6 +100,7 @@ pub(super) fn infer_expr_coerce(
                 Err(_) => {
                     self.result
                         .type_mismatches
+                        .get_or_insert_default()
                         .insert(expr.into(), TypeMismatch { expected: target, actual: ty });
                     target
                 }
@@ -293,6 +294,7 @@ fn infer_expr_coerce_never(
                 if !could_unify {
                     self.result
                         .type_mismatches
+                        .get_or_insert_default()
                         .insert(expr.into(), TypeMismatch { expected: expected_ty, actual: ty });
                 }
             }
@@ -874,14 +876,10 @@ fn infer_expr_inner(
                 Literal::CString(..) => Ty::new_ref(
                     self.interner(),
                     self.types.re_static,
-                    self.resolve_lang_item(LangItem::CStr)
-                        .and_then(LangItemTarget::as_struct)
-                        .map_or_else(
-                            || self.err_ty(),
-                            |strukt| {
-                                Ty::new_adt(self.interner(), strukt.into(), self.types.empty_args)
-                            },
-                        ),
+                    self.lang_items.CStr.map_or_else(
+                        || self.err_ty(),
+                        |strukt| Ty::new_adt(self.interner(), strukt.into(), self.types.empty_args),
+                    ),
                     Mutability::Not,
                 ),
                 Literal::Char(..) => self.types.char,
@@ -1188,6 +1186,7 @@ fn infer_async_block(
                     Err(_) => {
                         this.result
                             .type_mismatches
+                            .get_or_insert_default()
                             .insert(tgt_expr.into(), TypeMismatch { expected: target, actual: ty });
                         target
                     }
@@ -1279,7 +1278,7 @@ pub(crate) fn write_fn_trait_method_resolution(
                 }
             }
         }
-        let Some(trait_) = fn_x.get_id(self.db, self.table.trait_env.krate) else {
+        let Some(trait_) = fn_x.get_id(self.lang_items) else {
             return;
         };
         let trait_data = trait_.trait_items(self.db);
@@ -1456,11 +1455,10 @@ fn infer_block(
     ) -> Ty<'db> {
         let coerce_ty = expected.coercion_target_type(&mut self.table);
         let g = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
-        let prev_state = block_id.map(|block_id| {
+        let prev_env = block_id.map(|block_id| {
             let prev_env = self.table.trait_env.clone();
             TraitEnvironment::with_block(&mut self.table.trait_env, block_id);
-            let prev_block = self.table.infer_ctxt.interner.block.replace(block_id);
-            (prev_env, prev_block)
+            prev_env
         });
 
         let (break_ty, ty) =
@@ -1556,7 +1554,7 @@ fn infer_block(
                             )
                             .is_err()
                         {
-                            this.result.type_mismatches.insert(
+                            this.result.type_mismatches.get_or_insert_default().insert(
                                 expr.into(),
                                 TypeMismatch { expected: t, actual: this.types.unit },
                             );
@@ -1568,9 +1566,8 @@ fn infer_block(
                 }
             });
         self.resolver.reset_to_guard(g);
-        if let Some((prev_env, prev_block)) = prev_state {
+        if let Some(prev_env) = prev_env {
             self.table.trait_env = prev_env;
-            self.table.infer_ctxt.interner.block = prev_block;
         }
 
         break_ty.unwrap_or(ty)
@@ -2130,6 +2127,7 @@ pub(in super::super) fn check_call_arguments(
                     // Don't report type mismatches if there is a mismatch in args count.
                     self.result
                         .type_mismatches
+                        .get_or_insert_default()
                         .insert((*arg).into(), TypeMismatch { expected, actual: found });
                 }
             }
@@ -2188,9 +2186,11 @@ fn check_legacy_const_generics(&mut self, callee: Ty<'db>, args: &[ExprId]) -> B
         };
 
         let data = self.db.function_signature(func);
-        let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else {
+        let Some(legacy_const_generics_indices) = data.legacy_const_generics_indices(self.db, func)
+        else {
             return Default::default();
         };
+        let mut legacy_const_generics_indices = Box::<[u32]>::from(legacy_const_generics_indices);
 
         // only use legacy const generics if the param count matches with them
         if data.params.len() + legacy_const_generics_indices.len() != args.len() {
@@ -2199,9 +2199,8 @@ fn check_legacy_const_generics(&mut self, callee: Ty<'db>, args: &[ExprId]) -> B
             } else {
                 // there are more parameters than there should be without legacy
                 // const params; use them
-                let mut indices = legacy_const_generics_indices.as_ref().clone();
-                indices.sort();
-                return indices;
+                legacy_const_generics_indices.sort_unstable();
+                return legacy_const_generics_indices;
             }
         }
 
@@ -2214,9 +2213,8 @@ fn check_legacy_const_generics(&mut self, callee: Ty<'db>, args: &[ExprId]) -> B
             self.infer_expr(args[arg_idx as usize], &expected, ExprIsRead::Yes);
             // FIXME: evaluate and unify with the const
         }
-        let mut indices = legacy_const_generics_indices.as_ref().clone();
-        indices.sort();
-        indices
+        legacy_const_generics_indices.sort_unstable();
+        legacy_const_generics_indices
     }
 
     pub(super) fn with_breakable_ctx<T>(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
index a257547..87dcaa8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
@@ -18,7 +18,6 @@ pub(crate) fn infer_mut_body(&mut self) {
     }
 
     fn infer_mut_expr(&mut self, tgt_expr: ExprId, mut mutability: Mutability) {
-        let krate = self.krate();
         if let Some(adjustments) = self.result.expr_adjustments.get_mut(&tgt_expr) {
             let mut adjustments = adjustments.iter_mut().rev().peekable();
             while let Some(adj) = adjustments.next() {
@@ -32,7 +31,6 @@ fn infer_mut_expr(&mut self, tgt_expr: ExprId, mut mutability: Mutability) {
                             };
                             if let Some(infer_ok) = Self::try_mutable_overloaded_place_op(
                                 &self.table,
-                                krate,
                                 source_ty,
                                 None,
                                 PlaceOp::Deref,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/op.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/op.rs
index 88319a8..6fbac8f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/op.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/op.rs
@@ -2,7 +2,7 @@
 
 use std::collections::hash_map;
 
-use hir_def::{GenericParamId, TraitId, hir::ExprId, lang_item::LangItem};
+use hir_def::{GenericParamId, TraitId, hir::ExprId};
 use intern::{Symbol, sym};
 use rustc_ast_ir::Mutability;
 use rustc_type_ir::inherent::{IntoKind, Ty as _};
@@ -355,17 +355,18 @@ fn lookup_op_method(
 
     fn lang_item_for_bin_op(&self, op: BinaryOp) -> (Symbol, Option<TraitId>) {
         let (method_name, trait_lang_item) =
-            crate::lang_items::lang_items_for_bin_op(op).expect("invalid operator provided");
-        (method_name, trait_lang_item.resolve_trait(self.db, self.krate()))
+            crate::lang_items::lang_items_for_bin_op(self.lang_items, op)
+                .expect("invalid operator provided");
+        (method_name, trait_lang_item)
     }
 
     fn lang_item_for_unop(&self, op: UnaryOp) -> (Symbol, Option<TraitId>) {
         let (method_name, trait_lang_item) = match op {
-            UnaryOp::Not => (sym::not, LangItem::Not),
-            UnaryOp::Neg => (sym::neg, LangItem::Neg),
+            UnaryOp::Not => (sym::not, self.lang_items.Not),
+            UnaryOp::Neg => (sym::neg, self.lang_items.Neg),
             UnaryOp::Deref => panic!("Deref is not overloadable"),
         };
-        (method_name, trait_lang_item.resolve_trait(self.db, self.krate()))
+        (method_name, trait_lang_item)
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
index ece2bdc..a02e280 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -331,7 +331,7 @@ fn infer_pat(
                         return self.pat_ty_after_adjustment(pat);
                     }
                     Err(_) => {
-                        self.result.type_mismatches.insert(
+                        self.result.type_mismatches.get_or_insert_default().insert(
                             pat.into(),
                             TypeMismatch { expected, actual: ty_inserted_vars },
                         );
@@ -415,6 +415,7 @@ fn infer_pat(
                     Err(_) => {
                         self.result
                             .type_mismatches
+                            .get_or_insert_default()
                             .insert(pat.into(), TypeMismatch { expected, actual: lhs_ty });
                         // `rhs_ty` is returned so no further type mismatches are
                         // reported because of this mismatch.
@@ -431,7 +432,10 @@ fn infer_pat(
         let ty = self.insert_type_vars_shallow(ty);
         // FIXME: This never check is odd, but required with out we do inference right now
         if !expected.is_never() && !self.unify(ty, expected) {
-            self.result.type_mismatches.insert(pat.into(), TypeMismatch { expected, actual: ty });
+            self.result
+                .type_mismatches
+                .get_or_insert_default()
+                .insert(pat.into(), TypeMismatch { expected, actual: ty });
         }
         self.write_pat_ty(pat, ty);
         self.pat_ty_after_adjustment(pat)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/place_op.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/place_op.rs
index 50018bb..9544fb4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/place_op.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/place_op.rs
@@ -1,7 +1,6 @@
 //! Inference of *place operators*: deref and indexing (operators that create places, as opposed to values).
 
-use base_db::Crate;
-use hir_def::{hir::ExprId, lang_item::LangItem};
+use hir_def::hir::ExprId;
 use intern::sym;
 use rustc_ast_ir::Mutability;
 use rustc_type_ir::inherent::{IntoKind, Ty as _};
@@ -187,8 +186,8 @@ pub(super) fn try_overloaded_place_op(
         debug!("try_overloaded_place_op({:?},{:?})", base_ty, op);
 
         let (Some(imm_tr), imm_op) = (match op {
-            PlaceOp::Deref => (LangItem::Deref.resolve_trait(self.db, self.krate()), sym::deref),
-            PlaceOp::Index => (LangItem::Index.resolve_trait(self.db, self.krate()), sym::index),
+            PlaceOp::Deref => (self.lang_items.Deref, sym::deref),
+            PlaceOp::Index => (self.lang_items.Index, sym::index),
         }) else {
             // Bail if `Deref` or `Index` isn't defined.
             return None;
@@ -209,16 +208,16 @@ pub(super) fn try_overloaded_place_op(
 
     pub(super) fn try_mutable_overloaded_place_op(
         table: &InferenceTable<'db>,
-        krate: Crate,
         base_ty: Ty<'db>,
         opt_rhs_ty: Option<Ty<'db>>,
         op: PlaceOp,
     ) -> Option<InferOk<'db, MethodCallee<'db>>> {
         debug!("try_mutable_overloaded_place_op({:?},{:?})", base_ty, op);
 
+        let lang_items = table.interner().lang_items();
         let (Some(mut_tr), mut_op) = (match op {
-            PlaceOp::Deref => (LangItem::DerefMut.resolve_trait(table.db, krate), sym::deref_mut),
-            PlaceOp::Index => (LangItem::IndexMut.resolve_trait(table.db, krate), sym::index_mut),
+            PlaceOp::Deref => (lang_items.DerefMut, sym::deref_mut),
+            PlaceOp::Index => (lang_items.IndexMut, sym::index_mut),
         }) else {
             // Bail if `DerefMut` or `IndexMut` isn't defined.
             return None;
@@ -276,8 +275,7 @@ pub(super) fn convert_place_op_to_mutable(
                 ))
             }
         };
-        let method =
-            Self::try_mutable_overloaded_place_op(&self.table, self.krate(), base_ty, arg_ty, op);
+        let method = Self::try_mutable_overloaded_place_op(&self.table, base_ty, arg_ty, op);
         let method = match method {
             Some(ok) => self.table.register_infer_ok(ok),
             // Couldn't find the mutable variant of the place op, keep the
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index 0b56649..bc3c463 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -2,7 +2,7 @@
 
 use std::fmt;
 
-use hir_def::{AdtId, DefWithBodyId, GenericParamId, lang_item::LangItem};
+use hir_def::{AdtId, DefWithBodyId, GenericParamId};
 use hir_expand::name::Name;
 use intern::sym;
 use rustc_hash::FxHashSet;
@@ -113,7 +113,7 @@ fn could_unify_impl<'db>(
     tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
     select: for<'a> fn(&mut ObligationCtxt<'a, 'db>) -> Vec<NextSolverError<'db>>,
 ) -> bool {
-    let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+    let interner = DbInterner::new_with(db, env.krate);
     let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
     let cause = ObligationCause::dummy();
     let at = infcx.at(&cause, env.env);
@@ -148,7 +148,7 @@ pub(crate) fn new(
         trait_env: Arc<TraitEnvironment<'db>>,
         owner: Option<DefWithBodyId>,
     ) -> Self {
-        let interner = DbInterner::new_with(db, Some(trait_env.krate), trait_env.block);
+        let interner = DbInterner::new_with(db, trait_env.krate);
         let typing_mode = match owner {
             Some(owner) => TypingMode::typeck_for_body(interner, owner.into()),
             // IDE things wants to reveal opaque types.
@@ -174,7 +174,7 @@ pub(crate) fn type_is_copy_modulo_regions(&self, ty: Ty<'db>) -> bool {
     }
 
     pub(crate) fn type_var_is_sized(&self, self_ty: TyVid) -> bool {
-        let Some(sized_did) = LangItem::Sized.resolve_trait(self.db, self.trait_env.krate) else {
+        let Some(sized_did) = self.interner().lang_items().Sized else {
             return true;
         };
         self.obligations_for_self_ty(self_ty).into_iter().any(|obligation| {
@@ -520,13 +520,13 @@ fn callable_sig_from_fn_trait(
         ty: Ty<'db>,
         num_args: usize,
     ) -> Option<(FnTrait, Vec<Ty<'db>>, Ty<'db>)> {
+        let lang_items = self.interner().lang_items();
         for (fn_trait_name, output_assoc_name, subtraits) in [
             (FnTrait::FnOnce, sym::Output, &[FnTrait::Fn, FnTrait::FnMut][..]),
             (FnTrait::AsyncFnMut, sym::CallRefFuture, &[FnTrait::AsyncFn]),
             (FnTrait::AsyncFnOnce, sym::CallOnceFuture, &[]),
         ] {
-            let krate = self.trait_env.krate;
-            let fn_trait = fn_trait_name.get_id(self.db, krate)?;
+            let fn_trait = fn_trait_name.get_id(lang_items)?;
             let trait_data = fn_trait.trait_items(self.db);
             let output_assoc_type =
                 trait_data.associated_type_by_name(&Name::new_symbol_root(output_assoc_name))?;
@@ -558,7 +558,7 @@ fn callable_sig_from_fn_trait(
                 self.register_obligation(pred);
                 let return_ty = self.normalize_alias_ty(projection);
                 for &fn_x in subtraits {
-                    let fn_x_trait = fn_x.get_id(self.db, krate)?;
+                    let fn_x_trait = fn_x.get_id(lang_items)?;
                     let trait_ref = TraitRef::new(self.interner(), fn_x_trait.into(), args);
                     let pred = Predicate::upcast_from(trait_ref, self.interner());
                     if !self.try_obligation(pred).no_solution() {
@@ -658,7 +658,7 @@ fn short_circuit_trivial_tys(ty: Ty<'_>) -> Option<bool> {
             }
         }
 
-        let Some(sized) = LangItem::Sized.resolve_trait(self.db, self.trait_env.krate) else {
+        let Some(sized) = self.interner().lang_items().Sized else {
             return false;
         };
         let sized_pred = Predicate::upcast_from(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs
index d0d0aa7..18feb0f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs
@@ -1,6 +1,6 @@
 //! Functions to detect special lang items
 
-use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags};
+use hir_def::{AdtId, TraitId, lang_item::LangItems, signatures::StructFlags};
 use intern::{Symbol, sym};
 
 use crate::db::HirDatabase;
@@ -10,48 +10,51 @@ pub fn is_box(db: &dyn HirDatabase, adt: AdtId) -> bool {
     db.struct_signature(id).flags.contains(StructFlags::IS_BOX)
 }
 
-pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Symbol, LangItem)> {
+pub fn lang_items_for_bin_op(
+    lang_items: &LangItems,
+    op: syntax::ast::BinaryOp,
+) -> Option<(Symbol, Option<TraitId>)> {
     use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
     Some(match op {
         BinaryOp::LogicOp(_) => return None,
         BinaryOp::ArithOp(aop) => match aop {
-            ArithOp::Add => (sym::add, LangItem::Add),
-            ArithOp::Mul => (sym::mul, LangItem::Mul),
-            ArithOp::Sub => (sym::sub, LangItem::Sub),
-            ArithOp::Div => (sym::div, LangItem::Div),
-            ArithOp::Rem => (sym::rem, LangItem::Rem),
-            ArithOp::Shl => (sym::shl, LangItem::Shl),
-            ArithOp::Shr => (sym::shr, LangItem::Shr),
-            ArithOp::BitXor => (sym::bitxor, LangItem::BitXor),
-            ArithOp::BitOr => (sym::bitor, LangItem::BitOr),
-            ArithOp::BitAnd => (sym::bitand, LangItem::BitAnd),
+            ArithOp::Add => (sym::add, lang_items.Add),
+            ArithOp::Mul => (sym::mul, lang_items.Mul),
+            ArithOp::Sub => (sym::sub, lang_items.Sub),
+            ArithOp::Div => (sym::div, lang_items.Div),
+            ArithOp::Rem => (sym::rem, lang_items.Rem),
+            ArithOp::Shl => (sym::shl, lang_items.Shl),
+            ArithOp::Shr => (sym::shr, lang_items.Shr),
+            ArithOp::BitXor => (sym::bitxor, lang_items.BitXor),
+            ArithOp::BitOr => (sym::bitor, lang_items.BitOr),
+            ArithOp::BitAnd => (sym::bitand, lang_items.BitAnd),
         },
         BinaryOp::Assignment { op: Some(aop) } => match aop {
-            ArithOp::Add => (sym::add_assign, LangItem::AddAssign),
-            ArithOp::Mul => (sym::mul_assign, LangItem::MulAssign),
-            ArithOp::Sub => (sym::sub_assign, LangItem::SubAssign),
-            ArithOp::Div => (sym::div_assign, LangItem::DivAssign),
-            ArithOp::Rem => (sym::rem_assign, LangItem::RemAssign),
-            ArithOp::Shl => (sym::shl_assign, LangItem::ShlAssign),
-            ArithOp::Shr => (sym::shr_assign, LangItem::ShrAssign),
-            ArithOp::BitXor => (sym::bitxor_assign, LangItem::BitXorAssign),
-            ArithOp::BitOr => (sym::bitor_assign, LangItem::BitOrAssign),
-            ArithOp::BitAnd => (sym::bitand_assign, LangItem::BitAndAssign),
+            ArithOp::Add => (sym::add_assign, lang_items.AddAssign),
+            ArithOp::Mul => (sym::mul_assign, lang_items.MulAssign),
+            ArithOp::Sub => (sym::sub_assign, lang_items.SubAssign),
+            ArithOp::Div => (sym::div_assign, lang_items.DivAssign),
+            ArithOp::Rem => (sym::rem_assign, lang_items.RemAssign),
+            ArithOp::Shl => (sym::shl_assign, lang_items.ShlAssign),
+            ArithOp::Shr => (sym::shr_assign, lang_items.ShrAssign),
+            ArithOp::BitXor => (sym::bitxor_assign, lang_items.BitXorAssign),
+            ArithOp::BitOr => (sym::bitor_assign, lang_items.BitOrAssign),
+            ArithOp::BitAnd => (sym::bitand_assign, lang_items.BitAndAssign),
         },
         BinaryOp::CmpOp(cop) => match cop {
-            CmpOp::Eq { negated: false } => (sym::eq, LangItem::PartialEq),
-            CmpOp::Eq { negated: true } => (sym::ne, LangItem::PartialEq),
+            CmpOp::Eq { negated: false } => (sym::eq, lang_items.PartialEq),
+            CmpOp::Eq { negated: true } => (sym::ne, lang_items.PartialEq),
             CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
-                (sym::le, LangItem::PartialOrd)
+                (sym::le, lang_items.PartialOrd)
             }
             CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
-                (sym::lt, LangItem::PartialOrd)
+                (sym::lt, lang_items.PartialOrd)
             }
             CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
-                (sym::ge, LangItem::PartialOrd)
+                (sym::ge, lang_items.PartialOrd)
             }
             CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
-                (sym::gt, LangItem::PartialOrd)
+                (sym::gt, lang_items.PartialOrd)
             }
         },
         BinaryOp::Assignment { op: None } => return None,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index fc0b9d3..97660a6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -4,6 +4,7 @@
 
 use hir_def::{
     AdtId, LocalFieldId, StructId,
+    attrs::AttrFlags,
     layout::{LayoutCalculatorError, LayoutData},
 };
 use la_arena::{Idx, RawIdx};
@@ -20,7 +21,7 @@
 use triomphe::Arc;
 
 use crate::{
-    TraitEnvironment,
+    InferenceResult, TraitEnvironment,
     consteval::try_const_usize,
     db::HirDatabase,
     next_solver::{
@@ -143,7 +144,7 @@ fn layout_of_simd_ty<'db>(
     let Some(TyKind::Array(e_ty, e_len)) = fields
         .next()
         .filter(|_| fields.next().is_none())
-        .map(|f| (*f.1).instantiate(DbInterner::new_with(db, None, None), args).kind())
+        .map(|f| (*f.1).instantiate(DbInterner::new_no_crate(db), args).kind())
     else {
         return Err(LayoutError::InvalidSimdType);
     };
@@ -161,7 +162,7 @@ pub fn layout_of_ty_query<'db>(
     trait_env: Arc<TraitEnvironment<'db>>,
 ) -> Result<Arc<Layout>, LayoutError> {
     let krate = trait_env.krate;
-    let interner = DbInterner::new_with(db, Some(krate), trait_env.block);
+    let interner = DbInterner::new_with(db, krate);
     let Ok(target) = db.target_data_layout(krate) else {
         return Err(LayoutError::TargetLayoutNotAvailable);
     };
@@ -174,8 +175,7 @@ pub fn layout_of_ty_query<'db>(
         TyKind::Adt(def, args) => {
             match def.inner().id {
                 hir_def::AdtId::StructId(s) => {
-                    let data = db.struct_signature(s);
-                    let repr = data.repr.unwrap_or_default();
+                    let repr = AttrFlags::repr(db, s.into()).unwrap_or_default();
                     if repr.simd() {
                         return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target);
                     }
@@ -322,7 +322,7 @@ pub fn layout_of_ty_query<'db>(
         }
         TyKind::Closure(id, args) => {
             let def = db.lookup_intern_closure(id.0);
-            let infer = db.infer(def.0);
+            let infer = InferenceResult::for_body(db, def.0);
             let (captures, _) = infer.closure_info(id.0);
             let fields = captures
                 .iter()
@@ -401,7 +401,7 @@ fn field_ty<'a>(
     fd: LocalFieldId,
     args: &GenericArgs<'a>,
 ) -> Ty<'a> {
-    db.field_types(def)[fd].instantiate(DbInterner::new_with(db, None, None), args)
+    db.field_types(def)[fd].instantiate(DbInterner::new_no_crate(db), args)
 }
 
 fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index a8f04bf..ecebf79 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -4,9 +4,9 @@
 
 use hir_def::{
     AdtId, VariantId,
+    attrs::AttrFlags,
     signatures::{StructFlags, VariantFields},
 };
-use intern::sym;
 use rustc_abi::{Integer, ReprOptions, TargetDataLayout};
 use rustc_index::IndexVec;
 use smallvec::SmallVec;
@@ -44,15 +44,15 @@ pub fn layout_of_adt_query<'db>(
             r.push(handle_variant(s.into(), s.fields(db))?);
             (
                 r,
-                sig.repr.unwrap_or_default(),
+                AttrFlags::repr(db, s.into()).unwrap_or_default(),
                 sig.flags.intersects(StructFlags::IS_UNSAFE_CELL | StructFlags::IS_UNSAFE_PINNED),
             )
         }
         AdtId::UnionId(id) => {
-            let data = db.union_signature(id);
+            let repr = AttrFlags::repr(db, id.into());
             let mut r = SmallVec::new();
             r.push(handle_variant(id.into(), id.fields(db))?);
-            (r, data.repr.unwrap_or_default(), false)
+            (r, repr.unwrap_or_default(), false)
         }
         AdtId::EnumId(e) => {
             let variants = e.enum_variants(db);
@@ -61,7 +61,7 @@ pub fn layout_of_adt_query<'db>(
                 .iter()
                 .map(|&(v, _, _)| handle_variant(v.into(), v.fields(db)))
                 .collect::<Result<SmallVec<_>, _>>()?;
-            (r, db.enum_signature(e).repr.unwrap_or_default(), false)
+            (r, AttrFlags::repr(db, e.into()).unwrap_or_default(), false)
         }
     };
     let variants = variants
@@ -105,27 +105,12 @@ pub(crate) fn layout_of_adt_cycle_result<'db>(
 }
 
 fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>, Bound<u128>) {
-    let attrs = db.attrs(def.into());
-    let get = |name| {
-        let attr = attrs.by_key(name).tt_values();
-        for tree in attr {
-            if let Some(it) = tree.iter().next_as_view() {
-                let text = it.to_string().replace('_', "");
-                let (text, base) = match text.as_bytes() {
-                    [b'0', b'x', ..] => (&text[2..], 16),
-                    [b'0', b'o', ..] => (&text[2..], 8),
-                    [b'0', b'b', ..] => (&text[2..], 2),
-                    _ => (&*text, 10),
-                };
-
-                if let Ok(it) = u128::from_str_radix(text, base) {
-                    return Bound::Included(it);
-                }
-            }
-        }
-        Bound::Unbounded
+    let range = AttrFlags::rustc_layout_scalar_valid_range(db, def);
+    let get = |value| match value {
+        Some(it) => Bound::Included(it),
+        None => Bound::Unbounded,
     };
-    (get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end))
+    (get(range.start), get(range.end))
 }
 
 /// Finds the appropriate Integer type and signedness for the given
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
index f0fed83..878813a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
@@ -9,6 +9,7 @@
 use triomphe::Arc;
 
 use crate::{
+    InferenceResult,
     db::HirDatabase,
     layout::{Layout, LayoutError},
     next_solver::{DbInterner, GenericArgs},
@@ -80,7 +81,7 @@ fn eval_goal(
         })
         .unwrap();
     crate::attach_db(&db, || {
-        let interner = DbInterner::new_with(&db, None, None);
+        let interner = DbInterner::new_no_crate(&db);
         let goal_ty = match adt_or_type_alias_id {
             Either::Left(adt_id) => crate::next_solver::Ty::new_adt(
                 interner,
@@ -136,7 +137,7 @@ fn eval_expr(
             .find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
             .unwrap()
             .0;
-        let infer = db.infer(function_id.into());
+        let infer = InferenceResult::for_body(&db, function_id.into());
         let goal_ty = infer.type_of_binding[b];
         db.layout_of_ty(goal_ty, db.trait_environment(function_id.into()))
     })
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 8819307..6d3adec 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -477,14 +477,14 @@ pub fn callable_sig_from_fn_trait<'db>(
     trait_env: Arc<TraitEnvironment<'db>>,
     db: &'db dyn HirDatabase,
 ) -> Option<(FnTrait, PolyFnSig<'db>)> {
-    let krate = trait_env.krate;
-    let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
+    let mut table = InferenceTable::new(db, trait_env.clone(), None);
+    let lang_items = table.interner().lang_items();
+
+    let fn_once_trait = FnTrait::FnOnce.get_id(lang_items)?;
     let output_assoc_type = fn_once_trait
         .trait_items(db)
         .associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
 
-    let mut table = InferenceTable::new(db, trait_env.clone(), None);
-
     // Register two obligations:
     // - Self: FnOnce<?args_ty>
     // - <Self as FnOnce<?args_ty>>::Output == ?ret_ty
@@ -502,7 +502,7 @@ pub fn callable_sig_from_fn_trait<'db>(
         table.register_obligation(pred);
         let return_ty = table.normalize_alias_ty(projection);
         for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] {
-            let fn_x_trait = fn_x.get_id(db, krate)?;
+            let fn_x_trait = fn_x.get_id(lang_items)?;
             let trait_ref = TraitRef::new(table.interner(), fn_x_trait.into(), args);
             if !table
                 .try_obligation(Predicate::upcast_from(trait_ref, table.interner()))
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 3f187d2..cfd2a06 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -11,7 +11,6 @@
 use std::{cell::OnceCell, iter, mem};
 
 use arrayvec::ArrayVec;
-use base_db::Crate;
 use either::Either;
 use hir_def::{
     AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId,
@@ -24,7 +23,7 @@
         GenericParamDataRef, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
     },
     item_tree::FieldsShape,
-    lang_item::LangItem,
+    lang_item::LangItems,
     resolver::{HasResolver, LifetimeNs, Resolver, TypeNs, ValueNs},
     signatures::{FunctionSignature, TraitFlags, TypeAliasFlags},
     type_ref::{
@@ -40,14 +39,17 @@
 use rustc_pattern_analysis::Captures;
 use rustc_type_ir::{
     AliasTyKind, BoundVarIndexKind, ConstKind, DebruijnIndex, ExistentialPredicate,
-    ExistentialProjection, ExistentialTraitRef, FnSig, OutlivesPredicate,
+    ExistentialProjection, ExistentialTraitRef, FnSig, Interner, OutlivesPredicate, TermKind,
     TyKind::{self},
-    TypeVisitableExt, Upcast,
-    inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _},
+    TypeFoldable, TypeVisitableExt, Upcast, UpcastFrom, elaborate,
+    inherent::{
+        Clause as _, GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike,
+        Ty as _,
+    },
 };
-use salsa::plumbing::AsId;
 use smallvec::{SmallVec, smallvec};
 use stdx::{impl_from, never};
+use tracing::debug;
 use triomphe::{Arc, ThinArc};
 
 use crate::{
@@ -57,9 +59,9 @@
     generics::{Generics, generics, trait_self_param_idx},
     next_solver::{
         AliasTy, Binder, BoundExistentialPredicates, Clause, ClauseKind, Clauses, Const,
-        DbInterner, EarlyBinder, EarlyParamRegion, ErrorGuaranteed, GenericArg, GenericArgs,
-        ParamConst, ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate, TraitRef,
-        Ty, Tys, UnevaluatedConst, abi::Safety,
+        DbInterner, EarlyBinder, EarlyParamRegion, ErrorGuaranteed, FxIndexMap, GenericArg,
+        GenericArgs, ParamConst, ParamEnv, PolyFnSig, Predicate, Region, SolverDefId,
+        TraitPredicate, TraitRef, Ty, Tys, UnevaluatedConst, abi::Safety, util::BottomUpFolder,
     },
 };
 
@@ -166,6 +168,7 @@ pub(crate) fn for_fn_ret(interner: DbInterner<'db>) -> LifetimeElisionKind<'db>
 pub struct TyLoweringContext<'db, 'a> {
     pub db: &'db dyn HirDatabase,
     interner: DbInterner<'db>,
+    lang_items: &'db LangItems,
     resolver: &'a Resolver<'db>,
     store: &'a ExpressionStore,
     def: GenericDefId,
@@ -191,9 +194,12 @@ pub fn new(
     ) -> Self {
         let impl_trait_mode = ImplTraitLoweringState::new(ImplTraitLoweringMode::Disallowed);
         let in_binders = DebruijnIndex::ZERO;
+        let interner = DbInterner::new_with(db, resolver.krate());
         Self {
             db,
-            interner: DbInterner::new_with(db, Some(resolver.krate()), None),
+            // Can provide no block since we don't use it for trait solving.
+            interner,
+            lang_items: interner.lang_items(),
             resolver,
             def,
             generics: Default::default(),
@@ -490,7 +496,7 @@ pub fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty<'db>, Option<TypeN
                         // away instead of two.
                         let actual_opaque_type_data = self
                             .with_debruijn(DebruijnIndex::ZERO, |ctx| {
-                                ctx.lower_impl_trait(opaque_ty_id, bounds, self.resolver.krate())
+                                ctx.lower_impl_trait(opaque_ty_id, bounds)
                             });
                         self.impl_trait_mode.opaque_type_data[idx] = actual_opaque_type_data;
 
@@ -658,6 +664,8 @@ pub(crate) fn lower_type_bound<'b>(
         ignore_bindings: bool,
     ) -> impl Iterator<Item = Clause<'db>> + use<'b, 'a, 'db> {
         let interner = self.interner;
+        let meta_sized = self.lang_items.MetaSized;
+        let pointee_sized = self.lang_items.PointeeSized;
         let mut assoc_bounds = None;
         let mut clause = None;
         match bound {
@@ -666,10 +674,6 @@ pub(crate) fn lower_type_bound<'b>(
                 if let Some((trait_ref, mut ctx)) = self.lower_trait_ref_from_path(path, self_ty) {
                     // FIXME(sized-hierarchy): Remove this bound modifications once we have implemented
                     // sized-hierarchy correctly.
-                    let meta_sized = LangItem::MetaSized
-                        .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
-                    let pointee_sized = LangItem::PointeeSized
-                        .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
                     if meta_sized.is_some_and(|it| it == trait_ref.def_id.0) {
                         // Ignore this bound
                     } else if pointee_sized.is_some_and(|it| it == trait_ref.def_id.0) {
@@ -692,7 +696,7 @@ pub(crate) fn lower_type_bound<'b>(
                 }
             }
             &TypeBound::Path(path, TraitBoundModifier::Maybe) => {
-                let sized_trait = LangItem::Sized.resolve_trait(self.db, self.resolver.krate());
+                let sized_trait = self.lang_items.Sized;
                 // Don't lower associated type bindings as the only possible relaxed trait bound
                 // `?Sized` has no of them.
                 // If we got another trait here ignore the bound completely.
@@ -721,138 +725,250 @@ pub(crate) fn lower_type_bound<'b>(
 
     fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> {
         let interner = self.interner;
-        // FIXME: we should never create non-existential predicates in the first place
-        // For now, use an error type so we don't run into dummy binder issues
-        let self_ty = Ty::new_error(interner, ErrorGuaranteed);
+        let dummy_self_ty = dyn_trait_dummy_self(interner);
+        let mut region = None;
         // INVARIANT: The principal trait bound, if present, must come first. Others may be in any
         // order but should be in the same order for the same set but possibly different order of
         // bounds in the input.
         // INVARIANT: If this function returns `DynTy`, there should be at least one trait bound.
         // These invariants are utilized by `TyExt::dyn_trait()` and chalk.
-        let mut lifetime = None;
         let bounds = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| {
-            let mut lowered_bounds: Vec<
-                rustc_type_ir::Binder<DbInterner<'db>, ExistentialPredicate<DbInterner<'db>>>,
-            > = Vec::new();
+            let mut principal = None;
+            let mut auto_traits = SmallVec::<[_; 3]>::new();
+            let mut projections = Vec::new();
+            let mut had_error = false;
+
             for b in bounds {
                 let db = ctx.db;
-                ctx.lower_type_bound(b, self_ty, false).for_each(|b| {
-                    if let Some(bound) = b
-                        .kind()
-                        .map_bound(|c| match c {
-                            rustc_type_ir::ClauseKind::Trait(t) => {
-                                let id = t.def_id();
-                                let is_auto =
-                                    db.trait_signature(id.0).flags.contains(TraitFlags::AUTO);
-                                if is_auto {
-                                    Some(ExistentialPredicate::AutoTrait(t.def_id()))
-                                } else {
-                                    Some(ExistentialPredicate::Trait(
-                                        ExistentialTraitRef::new_from_args(
-                                            interner,
-                                            t.def_id(),
-                                            GenericArgs::new_from_iter(
-                                                interner,
-                                                t.trait_ref.args.iter().skip(1),
-                                            ),
-                                        ),
-                                    ))
+                ctx.lower_type_bound(b, dummy_self_ty, false).for_each(|b| {
+                    match b.kind().skip_binder() {
+                        rustc_type_ir::ClauseKind::Trait(t) => {
+                            let id = t.def_id();
+                            let is_auto = db.trait_signature(id.0).flags.contains(TraitFlags::AUTO);
+                            if is_auto {
+                                auto_traits.push(t.def_id().0);
+                            } else {
+                                if principal.is_some() {
+                                    // FIXME: Report an error.
+                                    had_error = true;
                                 }
+                                principal = Some(b.kind().rebind(t.trait_ref));
                             }
-                            rustc_type_ir::ClauseKind::Projection(p) => {
-                                Some(ExistentialPredicate::Projection(
-                                    ExistentialProjection::new_from_args(
-                                        interner,
-                                        p.def_id(),
-                                        GenericArgs::new_from_iter(
-                                            interner,
-                                            p.projection_term.args.iter().skip(1),
-                                        ),
-                                        p.term,
-                                    ),
-                                ))
+                        }
+                        rustc_type_ir::ClauseKind::Projection(p) => {
+                            projections.push(b.kind().rebind(p));
+                        }
+                        rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => {
+                            if region.is_some() {
+                                // FIXME: Report an error.
+                                had_error = true;
                             }
-                            rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => {
-                                lifetime = Some(outlives_predicate.1);
-                                None
-                            }
-                            rustc_type_ir::ClauseKind::RegionOutlives(_)
-                            | rustc_type_ir::ClauseKind::ConstArgHasType(_, _)
-                            | rustc_type_ir::ClauseKind::WellFormed(_)
-                            | rustc_type_ir::ClauseKind::ConstEvaluatable(_)
-                            | rustc_type_ir::ClauseKind::HostEffect(_)
-                            | rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(),
-                        })
-                        .transpose()
-                    {
-                        lowered_bounds.push(bound);
+                            region = Some(outlives_predicate.1);
+                        }
+                        rustc_type_ir::ClauseKind::RegionOutlives(_)
+                        | rustc_type_ir::ClauseKind::ConstArgHasType(_, _)
+                        | rustc_type_ir::ClauseKind::WellFormed(_)
+                        | rustc_type_ir::ClauseKind::ConstEvaluatable(_)
+                        | rustc_type_ir::ClauseKind::HostEffect(_)
+                        | rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(),
                     }
                 })
             }
 
-            let mut multiple_regular_traits = false;
-            let mut multiple_same_projection = false;
-            lowered_bounds.sort_unstable_by(|lhs, rhs| {
-                use std::cmp::Ordering;
-                match ((*lhs).skip_binder(), (*rhs).skip_binder()) {
-                    (ExistentialPredicate::Trait(_), ExistentialPredicate::Trait(_)) => {
-                        multiple_regular_traits = true;
-                        // Order doesn't matter - we error
-                        Ordering::Equal
-                    }
-                    (
-                        ExistentialPredicate::AutoTrait(lhs_id),
-                        ExistentialPredicate::AutoTrait(rhs_id),
-                    ) => lhs_id.0.cmp(&rhs_id.0),
-                    (ExistentialPredicate::Trait(_), _) => Ordering::Less,
-                    (_, ExistentialPredicate::Trait(_)) => Ordering::Greater,
-                    (ExistentialPredicate::AutoTrait(_), _) => Ordering::Less,
-                    (_, ExistentialPredicate::AutoTrait(_)) => Ordering::Greater,
-                    (
-                        ExistentialPredicate::Projection(lhs),
-                        ExistentialPredicate::Projection(rhs),
-                    ) => {
-                        let lhs_id = match lhs.def_id {
-                            SolverDefId::TypeAliasId(id) => id,
-                            _ => unreachable!(),
-                        };
-                        let rhs_id = match rhs.def_id {
-                            SolverDefId::TypeAliasId(id) => id,
-                            _ => unreachable!(),
-                        };
-                        // We only compare the `associated_ty_id`s. We shouldn't have
-                        // multiple bounds for an associated type in the correct Rust code,
-                        // and if we do, we error out.
-                        if lhs_id == rhs_id {
-                            multiple_same_projection = true;
+            if had_error {
+                return None;
+            }
+
+            if principal.is_none() && auto_traits.is_empty() {
+                // No traits is not allowed.
+                return None;
+            }
+
+            // `Send + Sync` is the same as `Sync + Send`.
+            auto_traits.sort_unstable();
+            // Duplicate auto traits are permitted.
+            auto_traits.dedup();
+
+            // Map the projection bounds onto a key that makes it easy to remove redundant
+            // bounds that are constrained by supertraits of the principal def id.
+            //
+            // Also make sure we detect conflicting bounds from expanding a trait alias and
+            // also specifying it manually, like:
+            // ```
+            // type Alias = Trait<Assoc = i32>;
+            // let _: &dyn Alias<Assoc = u32> = /* ... */;
+            // ```
+            let mut projection_bounds = FxIndexMap::default();
+            for proj in projections {
+                let key = (
+                    proj.skip_binder().def_id().expect_type_alias(),
+                    interner.anonymize_bound_vars(
+                        proj.map_bound(|proj| proj.projection_term.trait_ref(interner)),
+                    ),
+                );
+                if let Some(old_proj) = projection_bounds.insert(key, proj)
+                    && interner.anonymize_bound_vars(proj)
+                        != interner.anonymize_bound_vars(old_proj)
+                {
+                    // FIXME: Report "conflicting associated type" error.
+                }
+            }
+
+            // A stable ordering of associated types from the principal trait and all its
+            // supertraits. We use this to ensure that different substitutions of a trait
+            // don't result in `dyn Trait` types with different projections lists, which
+            // can be unsound: <https://github.com/rust-lang/rust/pull/136458>.
+            // We achieve a stable ordering by walking over the unsubstituted principal
+            // trait ref.
+            let mut ordered_associated_types = vec![];
+
+            if let Some(principal_trait) = principal {
+                for clause in elaborate::elaborate(
+                    interner,
+                    [Clause::upcast_from(
+                        TraitRef::identity(interner, principal_trait.def_id()),
+                        interner,
+                    )],
+                )
+                .filter_only_self()
+                {
+                    let clause = clause.instantiate_supertrait(interner, principal_trait);
+                    debug!("observing object predicate `{clause:?}`");
+
+                    let bound_predicate = clause.kind();
+                    match bound_predicate.skip_binder() {
+                        ClauseKind::Trait(pred) => {
+                            // FIXME(negative_bounds): Handle this correctly...
+                            let trait_ref = interner
+                                .anonymize_bound_vars(bound_predicate.rebind(pred.trait_ref));
+                            ordered_associated_types.extend(
+                                pred.trait_ref
+                                    .def_id
+                                    .0
+                                    .trait_items(self.db)
+                                    .associated_types()
+                                    .map(|item| (item, trait_ref)),
+                            );
                         }
-                        lhs_id.as_id().index().cmp(&rhs_id.as_id().index())
+                        ClauseKind::Projection(pred) => {
+                            let pred = bound_predicate.rebind(pred);
+                            // A `Self` within the original bound will be instantiated with a
+                            // `trait_object_dummy_self`, so check for that.
+                            let references_self = match pred.skip_binder().term.kind() {
+                                TermKind::Ty(ty) => {
+                                    ty.walk().any(|arg| arg == dummy_self_ty.into())
+                                }
+                                // FIXME(associated_const_equality): We should walk the const instead of not doing anything
+                                TermKind::Const(_) => false,
+                            };
+
+                            // If the projection output contains `Self`, force the user to
+                            // elaborate it explicitly to avoid a lot of complexity.
+                            //
+                            // The "classically useful" case is the following:
+                            // ```
+                            //     trait MyTrait: FnMut() -> <Self as MyTrait>::MyOutput {
+                            //         type MyOutput;
+                            //     }
+                            // ```
+                            //
+                            // Here, the user could theoretically write `dyn MyTrait<MyOutput = X>`,
+                            // but actually supporting that would "expand" to an infinitely-long type
+                            // `fix $ τ → dyn MyTrait<MyOutput = X, Output = <τ as MyTrait>::MyOutput`.
+                            //
+                            // Instead, we force the user to write
+                            // `dyn MyTrait<MyOutput = X, Output = X>`, which is uglier but works. See
+                            // the discussion in #56288 for alternatives.
+                            if !references_self {
+                                let key = (
+                                    pred.skip_binder().projection_term.def_id.expect_type_alias(),
+                                    interner.anonymize_bound_vars(pred.map_bound(|proj| {
+                                        proj.projection_term.trait_ref(interner)
+                                    })),
+                                );
+                                if !projection_bounds.contains_key(&key) {
+                                    projection_bounds.insert(key, pred);
+                                }
+                            }
+                        }
+                        _ => (),
                     }
                 }
+            }
+
+            // We compute the list of projection bounds taking the ordered associated types,
+            // and check if there was an entry in the collected `projection_bounds`. Those
+            // are computed by first taking the user-written associated types, then elaborating
+            // the principal trait ref, and only using those if there was no user-written.
+            // See note below about how we handle missing associated types with `Self: Sized`,
+            // which are not required to be provided, but are still used if they are provided.
+            let mut projection_bounds: Vec<_> = ordered_associated_types
+                .into_iter()
+                .filter_map(|key| projection_bounds.get(&key).copied())
+                .collect();
+
+            projection_bounds.sort_unstable_by_key(|proj| proj.skip_binder().def_id());
+
+            let principal = principal.map(|principal| {
+                principal.map_bound(|principal| {
+                    // Verify that `dummy_self` did not leak inside default type parameters.
+                    let args: Vec<_> = principal
+                        .args
+                        .iter()
+                        // Skip `Self`
+                        .skip(1)
+                        .map(|arg| {
+                            if arg.walk().any(|arg| arg == dummy_self_ty.into()) {
+                                // FIXME: Report an error.
+                                Ty::new_error(interner, ErrorGuaranteed).into()
+                            } else {
+                                arg
+                            }
+                        })
+                        .collect();
+
+                    ExistentialPredicate::Trait(ExistentialTraitRef::new(
+                        interner,
+                        principal.def_id,
+                        args,
+                    ))
+                })
             });
 
-            if multiple_regular_traits || multiple_same_projection {
-                return None;
-            }
+            let projections = projection_bounds.into_iter().map(|proj| {
+                proj.map_bound(|mut proj| {
+                    // Like for trait refs, verify that `dummy_self` did not leak inside default type
+                    // parameters.
+                    let references_self = proj.projection_term.args.iter().skip(1).any(|arg| {
+                        if arg.walk().any(|arg| arg == dummy_self_ty.into()) {
+                            return true;
+                        }
+                        false
+                    });
+                    if references_self {
+                        proj.projection_term =
+                            replace_dummy_self_with_error(interner, proj.projection_term);
+                    }
 
-            if !lowered_bounds.first().map_or(false, |b| {
-                matches!(
-                    b.as_ref().skip_binder(),
-                    ExistentialPredicate::Trait(_) | ExistentialPredicate::AutoTrait(_)
-                )
-            }) {
-                return None;
-            }
+                    ExistentialPredicate::Projection(ExistentialProjection::erase_self_ty(
+                        interner, proj,
+                    ))
+                })
+            });
 
-            // As multiple occurrences of the same auto traits *are* permitted, we deduplicate the
-            // bounds. We shouldn't have repeated elements besides auto traits at this point.
-            lowered_bounds.dedup();
+            let auto_traits = auto_traits.into_iter().map(|auto_trait| {
+                Binder::dummy(ExistentialPredicate::AutoTrait(auto_trait.into()))
+            });
 
-            Some(BoundExistentialPredicates::new_from_iter(interner, lowered_bounds))
+            // N.b. principal, projections, auto traits
+            Some(BoundExistentialPredicates::new_from_iter(
+                interner,
+                principal.into_iter().chain(projections).chain(auto_traits),
+            ))
         });
 
         if let Some(bounds) = bounds {
-            let region = match lifetime {
+            let region = match region {
                 Some(it) => match it.kind() {
                     rustc_type_ir::RegionKind::ReBound(BoundVarIndexKind::Bound(db), var) => {
                         Region::new_bound(
@@ -873,12 +989,7 @@ fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> {
         }
     }
 
-    fn lower_impl_trait(
-        &mut self,
-        def_id: SolverDefId,
-        bounds: &[TypeBound],
-        krate: Crate,
-    ) -> ImplTrait<'db> {
+    fn lower_impl_trait(&mut self, def_id: SolverDefId, bounds: &[TypeBound]) -> ImplTrait<'db> {
         let interner = self.interner;
         cov_mark::hit!(lower_rpit);
         let args = GenericArgs::identity_for_item(interner, def_id);
@@ -894,7 +1005,7 @@ fn lower_impl_trait(
             }
 
             if !ctx.unsized_types.contains(&self_ty) {
-                let sized_trait = LangItem::Sized.resolve_trait(self.db, krate);
+                let sized_trait = self.lang_items.Sized;
                 let sized_clause = sized_trait.map(|trait_id| {
                     let trait_ref = TraitRef::new_from_args(
                         interner,
@@ -935,6 +1046,26 @@ pub(crate) fn lower_lifetime(&mut self, lifetime: LifetimeRefId) -> Region<'db>
     }
 }
 
+fn dyn_trait_dummy_self(interner: DbInterner<'_>) -> Ty<'_> {
+    // This type must not appear anywhere except here.
+    Ty::new_fresh(interner, 0)
+}
+
+fn replace_dummy_self_with_error<'db, T: TypeFoldable<DbInterner<'db>>>(
+    interner: DbInterner<'db>,
+    t: T,
+) -> T {
+    let dyn_trait_dummy_self = dyn_trait_dummy_self(interner);
+    t.fold_with(&mut BottomUpFolder {
+        interner,
+        ty_op: |ty| {
+            if ty == dyn_trait_dummy_self { Ty::new_error(interner, ErrorGuaranteed) } else { ty }
+        },
+        lt_op: |lt| lt,
+        ct_op: |ct| ct,
+    })
+}
+
 pub(crate) fn lower_mutability(m: hir_def::type_ref::Mutability) -> Mutability {
     match m {
         hir_def::type_ref::Mutability::Shared => Mutability::Not,
@@ -1101,7 +1232,7 @@ pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> GenericDefId {
 /// the constructor function `(usize) -> Foo` which lives in the values
 /// namespace.
 pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBinder<'db, Ty<'db>> {
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     match def {
         TyDefId::BuiltinType(it) => EarlyBinder::bind(Ty::from_builtin_type(interner, it)),
         TyDefId::AdtId(it) => EarlyBinder::bind(Ty::new_adt(
@@ -1116,7 +1247,7 @@ pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBind
 /// Build the declared type of a function. This should not need to look at the
 /// function body.
 fn type_for_fn<'db>(db: &'db dyn HirDatabase, def: FunctionId) -> EarlyBinder<'db, Ty<'db>> {
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     EarlyBinder::bind(Ty::new_fn_def(
         interner,
         CallableDefId::FunctionId(def).into(),
@@ -1165,7 +1296,7 @@ fn type_for_struct_constructor<'db>(
         FieldsShape::Record => None,
         FieldsShape::Unit => Some(type_for_adt(db, def.into())),
         FieldsShape::Tuple => {
-            let interner = DbInterner::new_with(db, None, None);
+            let interner = DbInterner::new_no_crate(db);
             Some(EarlyBinder::bind(Ty::new_fn_def(
                 interner,
                 CallableDefId::StructId(def).into(),
@@ -1185,7 +1316,7 @@ fn type_for_enum_variant_constructor<'db>(
         FieldsShape::Record => None,
         FieldsShape::Unit => Some(type_for_adt(db, def.loc(db).parent.into())),
         FieldsShape::Tuple => {
-            let interner = DbInterner::new_with(db, None, None);
+            let interner = DbInterner::new_no_crate(db);
             Some(EarlyBinder::bind(Ty::new_fn_def(
                 interner,
                 CallableDefId::EnumVariantId(def).into(),
@@ -1216,7 +1347,7 @@ pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>(
     let type_alias_data = db.type_alias_signature(t);
     let mut diags = None;
     let resolver = t.resolver(db);
-    let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
+    let interner = DbInterner::new_no_crate(db);
     let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) {
         EarlyBinder::bind(Ty::new_foreign(interner, t.into()))
     } else {
@@ -1244,7 +1375,7 @@ pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result<'db>(
     db: &'db dyn HirDatabase,
     _adt: TypeAliasId,
 ) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) {
-    (EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None)
+    (EarlyBinder::bind(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)), None)
 }
 
 pub(crate) fn impl_self_ty_query<'db>(
@@ -1277,7 +1408,7 @@ pub(crate) fn impl_self_ty_with_diagnostics_cycle_result(
     db: &dyn HirDatabase,
     _impl_id: ImplId,
 ) -> (EarlyBinder<'_, Ty<'_>>, Diagnostics) {
-    (EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None)
+    (EarlyBinder::bind(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)), None)
 }
 
 pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstParamId) -> Ty<'db> {
@@ -1292,7 +1423,7 @@ pub(crate) fn const_param_ty_with_diagnostics_query<'db>(
     let (parent_data, store) = db.generic_params_and_store(def.parent());
     let data = &parent_data[def.local_id()];
     let resolver = def.parent().resolver(db);
-    let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
+    let interner = DbInterner::new_no_crate(db);
     let mut ctx = TyLoweringContext::new(
         db,
         &resolver,
@@ -1313,10 +1444,9 @@ pub(crate) fn const_param_ty_with_diagnostics_query<'db>(
 pub(crate) fn const_param_ty_with_diagnostics_cycle_result<'db>(
     db: &'db dyn HirDatabase,
     _: crate::db::HirDatabaseData,
-    def: ConstParamId,
+    _def: ConstParamId,
 ) -> (Ty<'db>, Diagnostics) {
-    let resolver = def.parent().resolver(db);
-    let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
+    let interner = DbInterner::new_no_crate(db);
     (Ty::new_error(interner, ErrorGuaranteed), None)
 }
 
@@ -1374,7 +1504,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
     assoc_name: Option<Name>,
 ) -> EarlyBinder<'db, Box<[Clause<'db>]>> {
     let generics = generics(db, def);
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     let resolver = def.resolver(db);
     let mut ctx = TyLoweringContext::new(
         db,
@@ -1401,9 +1531,7 @@ pub(crate) fn generic_predicates_for_param<'db>(
                             let TypeRef::Path(path) = &ctx.store[path.type_ref()] else {
                                 return false;
                             };
-                            let Some(pointee_sized) =
-                                LangItem::PointeeSized.resolve_trait(ctx.db, ctx.resolver.krate())
-                            else {
+                            let Some(pointee_sized) = ctx.lang_items.PointeeSized else {
                                 return false;
                             };
                             // Lower the path directly with `Resolver` instead of PathLoweringContext`
@@ -1466,9 +1594,13 @@ pub(crate) fn generic_predicates_for_param<'db>(
     let args = GenericArgs::identity_for_item(interner, def.into());
     if !args.is_empty() {
         let explicitly_unsized_tys = ctx.unsized_types;
-        if let Some(implicitly_sized_predicates) =
-            implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &args, &resolver)
-        {
+        if let Some(implicitly_sized_predicates) = implicitly_sized_clauses(
+            db,
+            ctx.lang_items,
+            param_id.parent,
+            &explicitly_unsized_tys,
+            &args,
+        ) {
             predicates.extend(implicitly_sized_predicates);
         };
     }
@@ -1520,8 +1652,7 @@ pub fn type_alias_bounds_with_diagnostics<'db>(
     }
 
     if !ctx.unsized_types.contains(&interner_ty) {
-        let sized_trait = LangItem::Sized
-            .resolve_trait(ctx.db, interner.krate.expect("Must have interner.krate"));
+        let sized_trait = ctx.lang_items.Sized;
         if let Some(sized_trait) = sized_trait {
             let trait_ref = TraitRef::new_from_args(
                 interner,
@@ -1625,7 +1756,7 @@ pub(crate) fn trait_environment_query<'db>(
     def: GenericDefId,
 ) -> Arc<TraitEnvironment<'db>> {
     let module = def.module(db);
-    let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
+    let interner = DbInterner::new_with(db, module.krate());
     let predicates = GenericPredicates::query_all(db, def);
     let traits_in_scope = predicates
         .iter_identity_copied()
@@ -1663,7 +1794,7 @@ pub(crate) fn generic_predicates_filtered_by<'db, F>(
 {
     let generics = generics(db, def);
     let resolver = def.resolver(db);
-    let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
+    let interner = DbInterner::new_no_crate(db);
     let mut ctx = TyLoweringContext::new(
         db,
         &resolver,
@@ -1671,7 +1802,7 @@ pub(crate) fn generic_predicates_filtered_by<'db, F>(
         def,
         LifetimeElisionKind::AnonymousReportError,
     );
-    let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate());
+    let sized_trait = ctx.lang_items.Sized;
 
     let mut predicates = Vec::new();
     let all_generics =
@@ -1811,7 +1942,7 @@ fn push_const_arg_has_type_predicates<'db>(
     predicates: &mut Vec<Clause<'db>>,
     generics: &Generics,
 ) {
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     let const_params_offset = generics.len_parent() + generics.len_lifetimes_self();
     for (param_index, (param_idx, param_data)) in generics.iter_self_type_or_consts().enumerate() {
         if !matches!(param_data, TypeOrConstParamData::ConstParamData(_)) {
@@ -1839,13 +1970,13 @@ fn push_const_arg_has_type_predicates<'db>(
 /// Exception is Self of a trait def.
 fn implicitly_sized_clauses<'a, 'subst, 'db>(
     db: &'db dyn HirDatabase,
+    lang_items: &LangItems,
     def: GenericDefId,
     explicitly_unsized_tys: &'a FxHashSet<Ty<'db>>,
     args: &'subst GenericArgs<'db>,
-    resolver: &Resolver<'db>,
 ) -> Option<impl Iterator<Item = Clause<'db>> + Captures<'a> + Captures<'subst>> {
-    let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
-    let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate())?;
+    let interner = DbInterner::new_no_crate(db);
+    let sized_trait = lang_items.Sized?;
 
     let trait_self_idx = trait_self_param_idx(db, def);
 
@@ -1992,7 +2123,7 @@ fn fn_sig_for_fn<'db>(
 ) -> EarlyBinder<'db, PolyFnSig<'db>> {
     let data = db.function_signature(def);
     let resolver = def.resolver(db);
-    let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
+    let interner = DbInterner::new_no_crate(db);
     let mut ctx_params = TyLoweringContext::new(
         db,
         &resolver,
@@ -2028,7 +2159,7 @@ fn fn_sig_for_fn<'db>(
 }
 
 fn type_for_adt<'db>(db: &'db dyn HirDatabase, adt: AdtId) -> EarlyBinder<'db, Ty<'db>> {
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     let args = GenericArgs::identity_for_item(interner, adt.into());
     let ty = Ty::new_adt(interner, adt, args);
     EarlyBinder::bind(ty)
@@ -2043,7 +2174,7 @@ fn fn_sig_for_struct_constructor<'db>(
     let ret = type_for_adt(db, def.into()).skip_binder();
 
     let inputs_and_output =
-        Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret)));
+        Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret)));
     EarlyBinder::bind(Binder::dummy(FnSig {
         abi: FnAbi::RustCall,
         c_variadic: false,
@@ -2062,7 +2193,7 @@ fn fn_sig_for_enum_variant_constructor<'db>(
     let ret = type_for_adt(db, parent.into()).skip_binder();
 
     let inputs_and_output =
-        Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret)));
+        Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret)));
     EarlyBinder::bind(Binder::dummy(FnSig {
         abi: FnAbi::RustCall,
         c_variadic: false,
@@ -2078,7 +2209,7 @@ pub(crate) fn associated_ty_item_bounds<'db>(
 ) -> EarlyBinder<'db, BoundExistentialPredicates<'db>> {
     let type_alias_data = db.type_alias_signature(type_alias);
     let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
-    let interner = DbInterner::new_with(db, Some(resolver.krate()), None);
+    let interner = DbInterner::new_no_crate(db);
     let mut ctx = TyLoweringContext::new(
         db,
         &resolver,
@@ -2139,7 +2270,7 @@ pub(crate) fn associated_ty_item_bounds<'db>(
     }
 
     if !ctx.unsized_types.contains(&self_ty)
-        && let Some(sized_trait) = LangItem::Sized.resolve_trait(db, resolver.krate())
+        && let Some(sized_trait) = ctx.lang_items.Sized
     {
         let sized_clause = Binder::dummy(ExistentialPredicate::Trait(ExistentialTraitRef::new(
             interner,
@@ -2157,7 +2288,8 @@ pub(crate) fn associated_type_by_name_including_super_traits<'db>(
     trait_ref: TraitRef<'db>,
     name: &Name,
 ) -> Option<(TraitRef<'db>, TypeAliasId)> {
-    let interner = DbInterner::new_with(db, None, None);
+    let module = trait_ref.def_id.0.module(db);
+    let interner = DbInterner::new_with(db, module.krate());
     rustc_type_ir::elaborate::supertraits(interner, Binder::dummy(trait_ref)).find_map(|t| {
         let trait_id = t.as_ref().skip_binder().def_id.0;
         let assoc_type = trait_id.trait_items(db).associated_type_by_name(name)?;
@@ -2171,7 +2303,7 @@ pub fn associated_type_shorthand_candidates(
     res: TypeNs,
     mut cb: impl FnMut(&Name, TypeAliasId) -> bool,
 ) -> Option<TypeAliasId> {
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     named_associated_type_shorthand_candidates(interner, def, res, None, |name, _, id| {
         cb(name, id).then_some(id)
     })
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs
index 6d3ce74..fe96b68 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs
@@ -1100,7 +1100,7 @@ pub(crate) fn substs_from_args_and_bindings<'db>(
     explicit_self_ty: Option<Ty<'db>>,
     ctx: &mut impl GenericArgsLowerer<'db>,
 ) -> GenericArgs<'db> {
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
 
     tracing::debug!(?args_and_bindings);
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index 59299f2..9a6aded 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -15,6 +15,7 @@
 use hir_def::{
     AssocItemId, BlockId, ConstId, FunctionId, GenericParamId, HasModule, ImplId, ItemContainerId,
     ModuleId, TraitId,
+    attrs::AttrFlags,
     expr_store::path::GenericArgs as HirGenericArgs,
     hir::ExprId,
     nameres::{DefMap, block_def_map, crate_def_map},
@@ -80,7 +81,7 @@ pub struct MethodResolutionContext<'a, 'db> {
     pub unstable_features: &'a MethodResolutionUnstableFeatures,
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
 pub enum CandidateId {
     FunctionId(FunctionId),
     ConstId(ConstId),
@@ -418,7 +419,7 @@ pub(crate) fn lookup_impl_method_query<'db>(
     func: FunctionId,
     fn_subst: GenericArgs<'db>,
 ) -> (FunctionId, GenericArgs<'db>) {
-    let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+    let interner = DbInterner::new_with(db, env.krate);
     let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
 
     let ItemContainerId::TraitId(trait_id) = func.loc(db).container else {
@@ -509,9 +510,8 @@ fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase) -> Box<[Cra
 pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType) -> &[ImplId] {
     let has_incoherent_impls = match self_ty.def() {
         Some(def_id) => match def_id.try_into() {
-            Ok(def_id) => {
-                db.attrs(def_id).by_key(sym::rustc_has_incoherent_inherent_impls).exists()
-            }
+            Ok(def_id) => AttrFlags::query(db, def_id)
+                .contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
             Err(()) => true,
         },
         _ => true,
@@ -597,7 +597,7 @@ fn collect(
                         continue;
                     }
 
-                    let interner = DbInterner::new_with(db, None, None);
+                    let interner = DbInterner::new_no_crate(db);
                     let self_ty = db.impl_self_ty(impl_id);
                     let self_ty = self_ty.instantiate_identity();
                     if let Some(self_ty) =
@@ -715,7 +715,9 @@ fn collect(
                     // FIXME: Reservation impls should be considered during coherence checks. If we are
                     // (ever) to implement coherence checks, this filtering should be done by the trait
                     // solver.
-                    if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() {
+                    if AttrFlags::query(db, impl_id.into())
+                        .contains(AttrFlags::RUSTC_RESERVATION_IMPL)
+                    {
                         continue;
                     }
                     let trait_ref = match db.impl_trait(impl_id) {
@@ -723,7 +725,7 @@ fn collect(
                         None => continue,
                     };
                     let self_ty = trait_ref.self_ty();
-                    let interner = DbInterner::new_with(db, None, None);
+                    let interner = DbInterner::new_no_crate(db);
                     let entry = map.entry(trait_ref.def_id.0).or_default();
                     match simplify_type(interner, self_ty, TreatParams::InstantiateWithInfer) {
                         Some(self_ty) => {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/confirm.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/confirm.rs
index 9e8791e..570dd63 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/confirm.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/confirm.rs
@@ -5,7 +5,6 @@
     FunctionId, GenericDefId, GenericParamId, ItemContainerId, TraitId,
     expr_store::path::{GenericArg as HirGenericArg, GenericArgs as HirGenericArgs},
     hir::{ExprId, generics::GenericParamDataRef},
-    lang_item::LangItem,
 };
 use rustc_type_ir::{
     TypeFoldable,
@@ -481,7 +480,7 @@ fn unify_receivers(
             }
             Err(_) => {
                 if self.ctx.unstable_features.arbitrary_self_types {
-                    self.ctx.result.type_mismatches.insert(
+                    self.ctx.result.type_mismatches.get_or_insert_default().insert(
                         self.expr.into(),
                         TypeMismatch { expected: method_self_ty, actual: self_ty },
                     );
@@ -550,9 +549,7 @@ fn predicates_require_illegal_sized_bound(
         &self,
         predicates: impl Iterator<Item = Clause<'db>>,
     ) -> bool {
-        let Some(sized_def_id) =
-            LangItem::Sized.resolve_trait(self.db(), self.ctx.resolver.krate())
-        else {
+        let Some(sized_def_id) = self.ctx.lang_items.Sized else {
             return false;
         };
 
@@ -570,9 +567,7 @@ fn predicates_require_illegal_sized_bound(
     fn check_for_illegal_method_calls(&self) {
         // Disallow calls to the method `drop` defined in the `Drop` trait.
         if let ItemContainerId::TraitId(trait_def_id) = self.candidate.loc(self.db()).container
-            && LangItem::Drop
-                .resolve_trait(self.db(), self.ctx.resolver.krate())
-                .is_some_and(|drop_trait| drop_trait == trait_def_id)
+            && self.ctx.lang_items.Drop.is_some_and(|drop_trait| drop_trait == trait_def_id)
         {
             // FIXME: Report an error.
         }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
index b5b691d..f47e3b7 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
@@ -134,27 +134,27 @@ fn from_fn(
         func_id: hir_def::FunctionId,
         generic_args: GenericArgs<'db>,
     ) -> Operand<'db> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let ty = Ty::new_fn_def(interner, CallableDefId::FunctionId(func_id).into(), generic_args);
         Operand::from_bytes(Box::default(), ty)
     }
 }
 
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum ProjectionElem<V, T> {
+#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
+pub enum ProjectionElem<'db, V: PartialEq> {
     Deref,
     Field(Either<FieldId, TupleFieldId>),
     // FIXME: get rid of this, and use FieldId for tuples and closures
     ClosureField(usize),
-    Index(V),
+    Index(#[update(unsafe(with(crate::utils::unsafe_update_eq)))] V),
     ConstantIndex { offset: u64, from_end: bool },
     Subslice { from: u64, to: u64 },
     //Downcast(Option<Symbol>, VariantIdx),
-    OpaqueCast(T),
+    OpaqueCast(Ty<'db>),
 }
 
-impl<V, T> ProjectionElem<V, T> {
-    pub fn projected_ty<'db>(
+impl<'db, V: PartialEq> ProjectionElem<'db, V> {
+    pub fn projected_ty(
         &self,
         infcx: &InferCtxt<'db>,
         mut base: Ty<'db>,
@@ -254,7 +254,7 @@ pub fn projected_ty<'db>(
     }
 }
 
-type PlaceElem<'db> = ProjectionElem<LocalId<'db>, Ty<'db>>;
+type PlaceElem<'db> = ProjectionElem<'db, LocalId<'db>>;
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct ProjectionId(u32);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
index 0189265..2d2fc03 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -12,7 +12,7 @@
 use triomphe::Arc;
 
 use crate::{
-    TraitEnvironment,
+    InferenceResult, TraitEnvironment,
     db::{HirDatabase, InternedClosure, InternedClosureId},
     display::DisplayTarget,
     mir::OperandKind,
@@ -97,7 +97,7 @@ pub fn borrowck_query<'db>(
 ) -> Result<Arc<[BorrowckResult<'db>]>, MirLowerError<'db>> {
     let _p = tracing::info_span!("borrowck_query").entered();
     let module = def.module(db);
-    let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
+    let interner = DbInterner::new_with(db, module.krate());
     let env = db.trait_environment_for_body(def);
     let mut res = vec![];
     // This calculates opaques defining scope which is a bit costly therefore is put outside `all_mir_bodies()`.
@@ -121,10 +121,10 @@ fn make_fetch_closure_field<'db>(
 ) -> impl FnOnce(InternedClosureId, GenericArgs<'db>, usize) -> Ty<'db> + use<'db> {
     |c: InternedClosureId, subst: GenericArgs<'db>, f: usize| {
         let InternedClosure(def, _) = db.lookup_intern_closure(c);
-        let infer = db.infer(def);
+        let infer = InferenceResult::for_body(db, def);
         let (captures, _) = infer.closure_info(c);
         let parent_subst = subst.split_closure_args_untupled().parent_args;
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         captures.get(f).expect("broken closure field").ty.instantiate(interner, parent_subst)
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
index da15ca6..3418689 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -9,7 +9,7 @@
     Lookup, StaticId, VariantId,
     expr_store::HygieneId,
     item_tree::FieldsShape,
-    lang_item::LangItem,
+    lang_item::LangItems,
     layout::{TagEncoding, Variants},
     resolver::{HasResolver, TypeNs, ValueNs},
     signatures::{StaticFlags, StructFlags},
@@ -34,7 +34,7 @@
 use triomphe::Arc;
 
 use crate::{
-    CallableDefId, ComplexMemoryMap, MemoryMap, TraitEnvironment,
+    CallableDefId, ComplexMemoryMap, InferenceResult, MemoryMap, TraitEnvironment,
     consteval::{self, ConstEvalError, try_const_usize},
     db::{HirDatabase, InternedClosure, InternedClosureId},
     display::{ClosureStyle, DisplayTarget, HirDisplay},
@@ -641,8 +641,9 @@ pub fn new(
             Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
         };
         let cached_ptr_size = target_data_layout.pointer_size().bytes_usize();
-        let interner = DbInterner::new_with(db, Some(crate_id), module.containing_block());
+        let interner = DbInterner::new_with(db, crate_id);
         let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
+        let lang_items = interner.lang_items();
         Ok(Evaluator {
             target_data_layout,
             stack: vec![0],
@@ -667,13 +668,13 @@ pub fn new(
             mir_or_dyn_index_cache: RefCell::new(Default::default()),
             unused_locals_store: RefCell::new(Default::default()),
             cached_ptr_size,
-            cached_fn_trait_func: LangItem::Fn
-                .resolve_trait(db, crate_id)
+            cached_fn_trait_func: lang_items
+                .Fn
                 .and_then(|x| x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call))),
-            cached_fn_mut_trait_func: LangItem::FnMut.resolve_trait(db, crate_id).and_then(|x| {
+            cached_fn_mut_trait_func: lang_items.FnMut.and_then(|x| {
                 x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_mut))
             }),
-            cached_fn_once_trait_func: LangItem::FnOnce.resolve_trait(db, crate_id).and_then(|x| {
+            cached_fn_once_trait_func: lang_items.FnOnce.and_then(|x| {
                 x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_once))
             }),
             infcx,
@@ -685,6 +686,11 @@ fn interner(&self) -> DbInterner<'db> {
         self.infcx.interner
     }
 
+    #[inline]
+    fn lang_items(&self) -> &'db LangItems {
+        self.infcx.interner.lang_items()
+    }
+
     fn place_addr(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Address> {
         Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
     }
@@ -716,7 +722,7 @@ fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem<'db>) -> Ty<'db> {
             ty,
             |c, subst, f| {
                 let InternedClosure(def, _) = self.db.lookup_intern_closure(c);
-                let infer = self.db.infer(def);
+                let infer = InferenceResult::for_body(self.db, def);
                 let (captures, _) = infer.closure_info(c);
                 let parent_subst = subst.split_closure_args_untupled().parent_args;
                 captures
@@ -877,7 +883,8 @@ fn operand_ty(&self, o: &Operand<'db>, locals: &Locals<'db>) -> Result<'db, Ty<'
             OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?,
             OperandKind::Constant { konst: _, ty } => *ty,
             &OperandKind::Static(s) => {
-                let ty = self.db.infer(s.into())[self.db.body(s.into()).body_expr];
+                let ty =
+                    InferenceResult::for_body(self.db, s.into())[self.db.body(s.into()).body_expr];
                 Ty::new_ref(
                     self.interner(),
                     Region::new_static(self.interner()),
@@ -2803,7 +2810,8 @@ fn eval_static(&mut self, st: StaticId, locals: &Locals<'db>) -> Result<'db, Add
             })?;
             self.allocate_const_in_heap(locals, konst)?
         } else {
-            let ty = self.db.infer(st.into())[self.db.body(st.into()).body_expr];
+            let ty =
+                InferenceResult::for_body(self.db, st.into())[self.db.body(st.into()).body_expr];
             let Some((size, align)) = self.size_align_of(ty, locals)? else {
                 not_supported!("unsized extern static");
             };
@@ -2864,7 +2872,7 @@ fn run_drop_glue_deep(
         span: MirSpan,
     ) -> Result<'db, ()> {
         let Some(drop_fn) = (|| {
-            let drop_trait = LangItem::Drop.resolve_trait(self.db, self.crate_id)?;
+            let drop_trait = self.lang_items().Drop?;
             drop_trait.trait_items(self.db).method_by_name(&Name::new_symbol_root(sym::drop))
         })() else {
             // in some tests we don't have drop trait in minicore, and
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
index 4b1adec..591c12e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
@@ -3,19 +3,20 @@
 //!
 use std::cmp::{self, Ordering};
 
-use hir_def::{CrateRootModuleId, resolver::HasResolver, signatures::FunctionSignature};
+use hir_def::{attrs::AttrFlags, signatures::FunctionSignature};
 use hir_expand::name::Name;
-use intern::{Symbol, sym};
+use intern::sym;
 use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
 use stdx::never;
 
 use crate::{
+    InferenceResult,
     display::DisplayTarget,
     drop::{DropGlue, has_drop_glue},
     mir::eval::{
         Address, AdtId, Arc, Evaluator, FunctionId, GenericArgs, HasModule, HirDisplay,
-        InternedClosure, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, LangItem,
-        Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Ty, TyKind, pad16,
+        InternedClosure, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, Layout, Locals,
+        Lookup, MirEvalError, MirSpan, Mutability, Result, Ty, TyKind, pad16,
     },
     next_solver::Region,
 };
@@ -38,6 +39,13 @@ macro_rules! not_supported {
     };
 }
 
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum EvalLangItem {
+    BeginPanic,
+    SliceLen,
+    DropInPlace,
+}
+
 impl<'db> Evaluator<'db> {
     pub(super) fn detect_and_exec_special_function(
         &mut self,
@@ -53,7 +61,7 @@ pub(super) fn detect_and_exec_special_function(
         }
 
         let function_data = self.db.function_signature(def);
-        let attrs = self.db.attrs(def.into());
+        let attrs = AttrFlags::query(self.db, def.into());
         let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def);
 
         if is_intrinsic {
@@ -65,7 +73,7 @@ pub(super) fn detect_and_exec_special_function(
                 locals,
                 span,
                 !function_data.has_body()
-                    || attrs.by_key(sym::rustc_intrinsic_must_be_overridden).exists(),
+                    || attrs.contains(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN),
             );
         }
         let is_extern_c = match def.lookup(self.db).container {
@@ -85,18 +93,13 @@ pub(super) fn detect_and_exec_special_function(
                 .map(|()| true);
         }
 
-        let alloc_fn =
-            attrs.iter().filter_map(|it| it.path().as_ident()).map(|it| it.symbol()).find(|it| {
-                [
-                    &sym::rustc_allocator,
-                    &sym::rustc_deallocator,
-                    &sym::rustc_reallocator,
-                    &sym::rustc_allocator_zeroed,
-                ]
-                .contains(it)
-            });
-        if let Some(alloc_fn) = alloc_fn {
-            self.exec_alloc_fn(alloc_fn, args, destination)?;
+        if attrs.intersects(
+            AttrFlags::RUSTC_ALLOCATOR
+                | AttrFlags::RUSTC_DEALLOCATOR
+                | AttrFlags::RUSTC_REALLOCATOR
+                | AttrFlags::RUSTC_ALLOCATOR_ZEROED,
+        ) {
+            self.exec_alloc_fn(attrs, args, destination)?;
             return Ok(true);
         }
         if let Some(it) = self.detect_lang_function(def) {
@@ -105,7 +108,7 @@ pub(super) fn detect_and_exec_special_function(
             return Ok(true);
         }
         if let ItemContainerId::TraitId(t) = def.lookup(self.db).container
-            && self.db.lang_attr(t.into()) == Some(LangItem::Clone)
+            && Some(t) == self.lang_items().Clone
         {
             let [self_ty] = generic_args.as_slice() else {
                 not_supported!("wrong generic arg count for clone");
@@ -131,12 +134,8 @@ pub(super) fn detect_and_redirect_special_function(
         def: FunctionId,
     ) -> Result<'db, Option<FunctionId>> {
         // `PanicFmt` is redirected to `ConstPanicFmt`
-        if let Some(LangItem::PanicFmt) = self.db.lang_attr(def.into()) {
-            let resolver = CrateRootModuleId::from(self.crate_id).resolver(self.db);
-
-            let Some(const_panic_fmt) =
-                LangItem::ConstPanicFmt.resolve_function(self.db, resolver.krate())
-            else {
+        if Some(def) == self.lang_items().PanicFmt {
+            let Some(const_panic_fmt) = self.lang_items().ConstPanicFmt else {
                 not_supported!("const_panic_fmt lang item not found or not a function");
             };
             return Ok(Some(const_panic_fmt));
@@ -169,7 +168,7 @@ fn exec_clone(
                 };
                 let addr = Address::from_bytes(arg.get(self)?)?;
                 let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure(id.0);
-                let infer = self.db.infer(closure_owner);
+                let infer = InferenceResult::for_body(self.db, closure_owner);
                 let (captures, _) = infer.closure_info(id.0);
                 let layout = self.layout(self_ty)?;
                 let db = self.db;
@@ -245,12 +244,14 @@ fn exec_clone_for_fields(
 
     fn exec_alloc_fn(
         &mut self,
-        alloc_fn: &Symbol,
+        alloc_fn: AttrFlags,
         args: &[IntervalAndTy<'db>],
         destination: Interval,
     ) -> Result<'db, ()> {
         match alloc_fn {
-            _ if *alloc_fn == sym::rustc_allocator_zeroed || *alloc_fn == sym::rustc_allocator => {
+            _ if alloc_fn
+                .intersects(AttrFlags::RUSTC_ALLOCATOR_ZEROED | AttrFlags::RUSTC_ALLOCATOR) =>
+            {
                 let [size, align] = args else {
                     return Err(MirEvalError::InternalError(
                         "rustc_allocator args are not provided".into(),
@@ -261,8 +262,8 @@ fn exec_alloc_fn(
                 let result = self.heap_allocate(size, align)?;
                 destination.write_from_bytes(self, &result.to_bytes())?;
             }
-            _ if *alloc_fn == sym::rustc_deallocator => { /* no-op for now */ }
-            _ if *alloc_fn == sym::rustc_reallocator => {
+            _ if alloc_fn.contains(AttrFlags::RUSTC_DEALLOCATOR) => { /* no-op for now */ }
+            _ if alloc_fn.contains(AttrFlags::RUSTC_REALLOCATOR) => {
                 let [ptr, old_size, align, new_size] = args else {
                     return Err(MirEvalError::InternalError(
                         "rustc_allocator args are not provided".into(),
@@ -286,19 +287,26 @@ fn exec_alloc_fn(
         Ok(())
     }
 
-    fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
-        use LangItem::*;
-        let attrs = self.db.attrs(def.into());
+    fn detect_lang_function(&self, def: FunctionId) -> Option<EvalLangItem> {
+        use EvalLangItem::*;
+        let lang_items = self.lang_items();
+        let attrs = AttrFlags::query(self.db, def.into());
 
-        if attrs.by_key(sym::rustc_const_panic_str).exists() {
+        if attrs.contains(AttrFlags::RUSTC_CONST_PANIC_STR) {
             // `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE.
-            return Some(LangItem::BeginPanic);
+            return Some(BeginPanic);
         }
 
-        let candidate = attrs.lang_item()?;
         // We want to execute these functions with special logic
         // `PanicFmt` is not detected here as it's redirected later.
-        if [BeginPanic, SliceLen, DropInPlace].contains(&candidate) {
+        if let Some((_, candidate)) = [
+            (lang_items.BeginPanic, BeginPanic),
+            (lang_items.SliceLen, SliceLen),
+            (lang_items.DropInPlace, DropInPlace),
+        ]
+        .iter()
+        .find(|&(candidate, _)| candidate == Some(def))
+        {
             return Some(candidate);
         }
 
@@ -307,13 +315,13 @@ fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
 
     fn exec_lang_item(
         &mut self,
-        it: LangItem,
+        it: EvalLangItem,
         generic_args: GenericArgs<'db>,
         args: &[IntervalAndTy<'db>],
         locals: &Locals<'db>,
         span: MirSpan,
     ) -> Result<'db, Vec<u8>> {
-        use LangItem::*;
+        use EvalLangItem::*;
         let mut args = args.iter();
         match it {
             BeginPanic => {
@@ -374,7 +382,6 @@ fn exec_lang_item(
                 )?;
                 Ok(vec![])
             }
-            it => not_supported!("Executing lang item {it:?}"),
         }
     }
 
@@ -1219,7 +1226,7 @@ fn exec_intrinsic(
                     let addr = tuple.interval.addr.offset(offset);
                     args.push(IntervalAndTy::new(addr, field, self, locals)?);
                 }
-                if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id)
+                if let Some(target) = self.lang_items().FnOnce
                     && let Some(def) = target
                         .trait_items(self.db)
                         .method_by_name(&Name::new_symbol_root(sym::call_once))
@@ -1329,7 +1336,7 @@ fn exec_intrinsic(
                 {
                     result = (l as i8).cmp(&(r as i8));
                 }
-                if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) {
+                if let Some(e) = self.lang_items().Ordering {
                     let ty = self.db.ty(e.into()).skip_binder();
                     let r = self.compute_discriminant(ty, &[result as i8 as u8])?;
                     destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])?;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
index 88acd490..bb2afb2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
@@ -17,7 +17,7 @@
 
 fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError<'_>> {
     crate::attach_db(db, || {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let module_id = db.module_for_file(file_id.file_id(db));
         let def_map = module_id.def_map(db);
         let scope = &def_map[module_id.local_id].scope;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
index 7f457ca..190b2f9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -12,7 +12,7 @@
         Pat, PatId, RecordFieldPat, RecordLitField,
     },
     item_tree::FieldsShape,
-    lang_item::{LangItem, LangItemTarget, lang_item},
+    lang_item::LangItems,
     resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
 };
 use hir_expand::name::Name;
@@ -110,7 +110,7 @@ pub enum MirLowerError<'db> {
     Loop,
     /// Something that should never happen and is definitely a bug, but we don't want to panic if it happened
     ImplementationError(String),
-    LangItemNotFound(LangItem),
+    LangItemNotFound,
     MutatingRvalue,
     UnresolvedLabel,
     UnresolvedUpvar(Place<'db>),
@@ -232,7 +232,7 @@ pub fn pretty_print(
             | MirLowerError::BreakWithoutLoop
             | MirLowerError::Loop
             | MirLowerError::ImplementationError(_)
-            | MirLowerError::LangItemNotFound(_)
+            | MirLowerError::LangItemNotFound
             | MirLowerError::MutatingRvalue
             | MirLowerError::UnresolvedLabel
             | MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{self:?}")?,
@@ -302,7 +302,7 @@ fn new(
         };
         let resolver = owner.resolver(db);
         let env = db.trait_environment_for_body(owner);
-        let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+        let interner = DbInterner::new_with(db, env.krate);
         // FIXME(next-solver): Is `non_body_analysis()` correct here? Don't we want to reveal opaque types defined by this body?
         let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
 
@@ -327,6 +327,11 @@ fn interner(&self) -> DbInterner<'db> {
         self.infcx.interner
     }
 
+    #[inline]
+    fn lang_items(&self) -> &'db LangItems {
+        self.infcx.interner.lang_items()
+    }
+
     fn temp(
         &mut self,
         ty: Ty<'db>,
@@ -1816,11 +1821,6 @@ fn push_storage_live_for_local(
         Ok(())
     }
 
-    fn resolve_lang_item(&self, item: LangItem) -> Result<'db, LangItemTarget> {
-        let crate_id = self.owner.module(self.db).krate();
-        lang_item(self.db, crate_id, item).ok_or(MirLowerError::LangItemNotFound(item))
-    }
-
     fn lower_block_to_place(
         &mut self,
         statements: &[hir_def::hir::Statement],
@@ -2111,7 +2111,7 @@ pub fn mir_body_for_closure_query<'db>(
 ) -> Result<'db, Arc<MirBody<'db>>> {
     let InternedClosure(owner, expr) = db.lookup_intern_closure(closure);
     let body = db.body(owner);
-    let infer = db.infer(owner);
+    let infer = InferenceResult::for_body(db, owner);
     let Expr::Closure { args, body: root, .. } = &body[expr] else {
         implementation_error!("closure expression is not closure");
     };
@@ -2119,7 +2119,7 @@ pub fn mir_body_for_closure_query<'db>(
         implementation_error!("closure expression is not closure");
     };
     let (captures, kind) = infer.closure_info(closure);
-    let mut ctx = MirLowerCtx::new(db, owner, &body, &infer);
+    let mut ctx = MirLowerCtx::new(db, owner, &body, infer);
     // 0 is return local
     ctx.result.locals.alloc(Local { ty: infer[*root] });
     let closure_local = ctx.result.locals.alloc(Local {
@@ -2249,8 +2249,8 @@ pub fn mir_body_query<'db>(
     };
     let _p = tracing::info_span!("mir_body_query", ?detail).entered();
     let body = db.body(def);
-    let infer = db.infer(def);
-    let mut result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;
+    let infer = InferenceResult::for_body(db, def);
+    let mut result = lower_to_mir(db, def, &body, infer, body.body_expr)?;
     result.shrink_to_fit();
     Ok(Arc::new(result))
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
index bceafae..40c6c5d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
@@ -2,7 +2,7 @@
 
 use hir_def::FunctionId;
 use intern::sym;
-use rustc_type_ir::inherent::{AdtDef, Region as _, Ty as _};
+use rustc_type_ir::inherent::{Region as _, Ty as _};
 
 use super::*;
 use crate::{
@@ -177,13 +177,7 @@ pub(super) fn lower_expr_as_place_without_adjust(
             Expr::UnaryOp { expr, op: hir_def::hir::UnaryOp::Deref } => {
                 let is_builtin = match self.expr_ty_without_adjust(*expr).kind() {
                     TyKind::Ref(..) | TyKind::RawPtr(..) => true,
-                    TyKind::Adt(id, _) => {
-                        if let Some(lang_item) = self.db.lang_attr(id.def_id().0.into()) {
-                            lang_item == LangItem::OwnedBox
-                        } else {
-                            false
-                        }
-                    }
+                    TyKind::Adt(id, _) => id.is_box(),
                     _ => false,
                 };
                 if !is_builtin {
@@ -198,8 +192,7 @@ pub(super) fn lower_expr_as_place_without_adjust(
                         expr_id.into(),
                         'b: {
                             if let Some((f, _)) = self.infer.method_resolution(expr_id)
-                                && let Some(deref_trait) =
-                                    self.resolve_lang_item(LangItem::DerefMut)?.as_trait()
+                                && let Some(deref_trait) = self.lang_items().DerefMut
                                 && let Some(deref_fn) = deref_trait
                                     .trait_items(self.db)
                                     .method_by_name(&Name::new_symbol_root(sym::deref_mut))
@@ -330,17 +323,18 @@ fn lower_overloaded_deref(
         span: MirSpan,
         mutability: bool,
     ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
+        let lang_items = self.lang_items();
         let (mutability, trait_lang_item, trait_method_name, borrow_kind) = if !mutability {
             (
                 Mutability::Not,
-                LangItem::Deref,
+                lang_items.Deref,
                 Name::new_symbol_root(sym::deref),
                 BorrowKind::Shared,
             )
         } else {
             (
                 Mutability::Mut,
-                LangItem::DerefMut,
+                lang_items.DerefMut,
                 Name::new_symbol_root(sym::deref_mut),
                 BorrowKind::Mut { kind: MutBorrowKind::Default },
             )
@@ -350,14 +344,11 @@ fn lower_overloaded_deref(
         let target_ty_ref = Ty::new_ref(self.interner(), error_region, target_ty, mutability);
         let ref_place: Place<'db> = self.temp(ty_ref, current, span)?.into();
         self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span);
-        let deref_trait = self
-            .resolve_lang_item(trait_lang_item)?
-            .as_trait()
-            .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
+        let deref_trait = trait_lang_item.ok_or(MirLowerError::LangItemNotFound)?;
         let deref_fn = deref_trait
             .trait_items(self.db)
             .method_by_name(&trait_method_name)
-            .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
+            .ok_or(MirLowerError::LangItemNotFound)?;
         let deref_fn_op = Operand::const_zst(Ty::new_fn_def(
             self.interner(),
             CallableDefId::FunctionId(deref_fn).into(),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
index 745f739..1f73d5c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
@@ -98,7 +98,7 @@ fn new(
         env: Arc<TraitEnvironment<'db>>,
         subst: GenericArgs<'db>,
     ) -> Self {
-        let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+        let interner = DbInterner::new_with(db, env.krate);
         let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
         Self { infcx, trait_env: env, subst }
     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs
index 2205cba..10f2ba2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs
@@ -7,6 +7,7 @@
     GenericArgKind, Interner, TermKind, TyKind, TyVid, Variance,
     inherent::{GenericArg as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _},
     relate::{Relate, VarianceDiagInfo},
+    walk::TypeWalker,
 };
 use smallvec::SmallVec;
 
@@ -78,6 +79,11 @@ pub fn error_from_id(interner: DbInterner<'db>, id: GenericParamId) -> GenericAr
             GenericParamId::LifetimeParamId(_) => Region::error(interner).into(),
         }
     }
+
+    #[inline]
+    pub fn walk(self) -> TypeWalker<DbInterner<'db>> {
+        TypeWalker::new(self)
+    }
 }
 
 impl<'db> From<Term<'db>> for GenericArg<'db> {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs
index fcce04f..4431914 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs
@@ -8,7 +8,6 @@
 pub use BoundRegionConversionTime::*;
 use ena::unify as ut;
 use hir_def::GenericParamId;
-use hir_def::lang_item::LangItem;
 use opaque_types::{OpaqueHiddenType, OpaqueTypeStorage};
 use region_constraints::{RegionConstraintCollector, RegionConstraintStorage};
 use rustc_next_trait_solver::solve::SolverDelegateEvalExt;
@@ -542,9 +541,7 @@ pub fn goal_may_hold_opaque_types_jank(&self, goal: Goal<'db, Predicate<'db>>) -
     pub fn type_is_copy_modulo_regions(&self, param_env: ParamEnv<'db>, ty: Ty<'db>) -> bool {
         let ty = self.resolve_vars_if_possible(ty);
 
-        let Some(copy_def_id) =
-            LangItem::Copy.resolve_trait(self.interner.db, self.interner.krate.unwrap())
-        else {
+        let Some(copy_def_id) = self.interner.lang_items().Copy else {
             return false;
         };
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
index a3c984f..2e52dce 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
@@ -8,9 +8,10 @@
 
 use base_db::Crate;
 use hir_def::{
-    AdtId, AttrDefId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, HasModule,
-    ItemContainerId, StructId, UnionId, VariantId,
-    lang_item::LangItem,
+    AdtId, CallableDefId, DefWithBodyId, EnumVariantId, HasModule, ItemContainerId, StructId,
+    UnionId, VariantId,
+    attrs::AttrFlags,
+    lang_item::LangItems,
     signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags},
 };
 use la_arena::Idx;
@@ -270,8 +271,8 @@ fn default() -> Self {
 #[derive(Debug, Copy, Clone)]
 pub struct DbInterner<'db> {
     pub(crate) db: &'db dyn HirDatabase,
-    pub(crate) krate: Option<Crate>,
-    pub(crate) block: Option<BlockId>,
+    krate: Option<Crate>,
+    lang_items: Option<&'db LangItems>,
 }
 
 // FIXME: very wrong, see https://github.com/rust-lang/rust/pull/144808
@@ -284,22 +285,42 @@ pub fn conjure() -> DbInterner<'db> {
         crate::with_attached_db(|db| DbInterner {
             db: unsafe { std::mem::transmute::<&dyn HirDatabase, &'db dyn HirDatabase>(db) },
             krate: None,
-            block: None,
+            lang_items: None,
         })
     }
 
-    pub fn new_with(
-        db: &'db dyn HirDatabase,
-        krate: Option<Crate>,
-        block: Option<BlockId>,
-    ) -> DbInterner<'db> {
-        DbInterner { db, krate, block }
+    /// Creates a new interner without an active crate. Good only for interning things, not for trait solving etc..
+    /// As a rule of thumb, when you create an `InferCtxt`, you need to provide the crate (and the block).
+    ///
+    /// Elaboration is a special kind: it needs lang items (for `Sized`), therefore it needs `new_with()`.
+    pub fn new_no_crate(db: &'db dyn HirDatabase) -> Self {
+        DbInterner { db, krate: None, lang_items: None }
+    }
+
+    pub fn new_with(db: &'db dyn HirDatabase, krate: Crate) -> DbInterner<'db> {
+        DbInterner {
+            db,
+            krate: Some(krate),
+            // As an approximation, when we call `new_with` we're trait solving, therefore we need the lang items.
+            // This is also convenient since here we have a starting crate but not in `new_no_crate`.
+            lang_items: Some(hir_def::lang_item::lang_items(db, krate)),
+        }
     }
 
     #[inline]
     pub fn db(&self) -> &'db dyn HirDatabase {
         self.db
     }
+
+    #[inline]
+    #[track_caller]
+    pub fn lang_items(&self) -> &'db LangItems {
+        self.lang_items.expect(
+            "Must have `DbInterner::lang_items`.\n\n\
+            Note: you might have called `DbInterner::new_no_crate()` \
+            where you should've called `DbInterner::new_with()`",
+        )
+    }
 }
 
 // This is intentionally left as `()`
@@ -479,28 +500,28 @@ pub fn new<'db>(def_id: AdtId, interner: DbInterner<'db>) -> Self {
 
                 let variants = vec![(VariantIdx(0), VariantDef::Struct(struct_id))];
 
-                let mut repr = ReprOptions::default();
-                repr.align = data.repr.and_then(|r| r.align);
-                repr.pack = data.repr.and_then(|r| r.pack);
-                repr.int = data.repr.and_then(|r| r.int);
-
+                let data_repr = data.repr(db, struct_id);
                 let mut repr_flags = ReprFlags::empty();
                 if flags.is_box {
                     repr_flags.insert(ReprFlags::IS_LINEAR);
                 }
-                if data.repr.is_some_and(|r| r.c()) {
+                if data_repr.is_some_and(|r| r.c()) {
                     repr_flags.insert(ReprFlags::IS_C);
                 }
-                if data.repr.is_some_and(|r| r.simd()) {
+                if data_repr.is_some_and(|r| r.simd()) {
                     repr_flags.insert(ReprFlags::IS_SIMD);
                 }
-                repr.flags = repr_flags;
+                let repr = ReprOptions {
+                    align: data_repr.and_then(|r| r.align),
+                    pack: data_repr.and_then(|r| r.pack),
+                    int: data_repr.and_then(|r| r.int),
+                    flags: repr_flags,
+                    ..ReprOptions::default()
+                };
 
                 (flags, variants, repr)
             }
             AdtId::UnionId(union_id) => {
-                let data = db.union_signature(union_id);
-
                 let flags = AdtFlags {
                     is_enum: false,
                     is_union: true,
@@ -513,22 +534,24 @@ pub fn new<'db>(def_id: AdtId, interner: DbInterner<'db>) -> Self {
 
                 let variants = vec![(VariantIdx(0), VariantDef::Union(union_id))];
 
-                let mut repr = ReprOptions::default();
-                repr.align = data.repr.and_then(|r| r.align);
-                repr.pack = data.repr.and_then(|r| r.pack);
-                repr.int = data.repr.and_then(|r| r.int);
-
+                let data_repr = AttrFlags::repr(db, union_id.into());
                 let mut repr_flags = ReprFlags::empty();
                 if flags.is_box {
                     repr_flags.insert(ReprFlags::IS_LINEAR);
                 }
-                if data.repr.is_some_and(|r| r.c()) {
+                if data_repr.is_some_and(|r| r.c()) {
                     repr_flags.insert(ReprFlags::IS_C);
                 }
-                if data.repr.is_some_and(|r| r.simd()) {
+                if data_repr.is_some_and(|r| r.simd()) {
                     repr_flags.insert(ReprFlags::IS_SIMD);
                 }
-                repr.flags = repr_flags;
+                let repr = ReprOptions {
+                    align: data_repr.and_then(|r| r.align),
+                    pack: data_repr.and_then(|r| r.pack),
+                    int: data_repr.and_then(|r| r.int),
+                    flags: repr_flags,
+                    ..ReprOptions::default()
+                };
 
                 (flags, variants, repr)
             }
@@ -552,24 +575,26 @@ pub fn new<'db>(def_id: AdtId, interner: DbInterner<'db>) -> Self {
                     .map(|(idx, v)| (idx, VariantDef::Enum(v.0)))
                     .collect();
 
-                let data = db.enum_signature(enum_id);
-
-                let mut repr = ReprOptions::default();
-                repr.align = data.repr.and_then(|r| r.align);
-                repr.pack = data.repr.and_then(|r| r.pack);
-                repr.int = data.repr.and_then(|r| r.int);
+                let data_repr = AttrFlags::repr(db, enum_id.into());
 
                 let mut repr_flags = ReprFlags::empty();
                 if flags.is_box {
                     repr_flags.insert(ReprFlags::IS_LINEAR);
                 }
-                if data.repr.is_some_and(|r| r.c()) {
+                if data_repr.is_some_and(|r| r.c()) {
                     repr_flags.insert(ReprFlags::IS_C);
                 }
-                if data.repr.is_some_and(|r| r.simd()) {
+                if data_repr.is_some_and(|r| r.simd()) {
                     repr_flags.insert(ReprFlags::IS_SIMD);
                 }
-                repr.flags = repr_flags;
+
+                let repr = ReprOptions {
+                    align: data_repr.and_then(|r| r.align),
+                    pack: data_repr.and_then(|r| r.pack),
+                    int: data_repr.and_then(|r| r.int),
+                    flags: repr_flags,
+                    ..ReprOptions::default()
+                };
 
                 (flags, variants, repr)
             }
@@ -849,7 +874,7 @@ fn relate<R: rustc_type_ir::relate::TypeRelation<DbInterner<'db>>>(
 
 macro_rules! as_lang_item {
     (
-        $solver_enum:ident, $var:ident;
+        $solver_enum:ident, $self:ident, $def_id:expr;
 
         ignore = {
             $( $ignore:ident ),* $(,)?
@@ -857,6 +882,7 @@ macro_rules! as_lang_item {
 
         $( $variant:ident ),* $(,)?
     ) => {{
+        let lang_items = $self.lang_items();
         // Ensure exhaustiveness.
         if let Some(it) = None::<$solver_enum> {
             match it {
@@ -864,13 +890,32 @@ macro_rules! as_lang_item {
                 $( $solver_enum::$ignore => {} )*
             }
         }
-        match $var {
-            $( LangItem::$variant => Some($solver_enum::$variant), )*
+        match $def_id {
+            $( def_id if lang_items.$variant.is_some_and(|it| it == def_id) => Some($solver_enum::$variant), )*
             _ => None
         }
     }};
 }
 
+macro_rules! is_lang_item {
+    (
+        $solver_enum:ident, $self:ident, $def_id:expr, $expected_variant:ident;
+
+        ignore = {
+            $( $ignore:ident ),* $(,)?
+        }
+
+        $( $variant:ident ),* $(,)?
+    ) => {{
+        let lang_items = $self.lang_items();
+        let def_id = $def_id;
+        match $expected_variant {
+            $( $solver_enum::$variant => lang_items.$variant.is_some_and(|it| it == def_id), )*
+            $( $solver_enum::$ignore => false, )*
+        }
+    }};
+}
+
 impl<'db> Interner for DbInterner<'db> {
     type DefId = SolverDefId;
     type LocalDefId = SolverDefId;
@@ -1253,8 +1298,7 @@ fn coroutine_for_closure(self, def_id: Self::CoroutineClosureId) -> Self::Corout
     }
 
     fn generics_require_sized_self(self, def_id: Self::DefId) -> bool {
-        let sized_trait =
-            LangItem::Sized.resolve_trait(self.db(), self.krate.expect("Must have self.krate"));
+        let sized_trait = self.lang_items().Sized;
         let Some(sized_id) = sized_trait else {
             return false; /* No Sized trait, can't require it! */
         };
@@ -1428,84 +1472,69 @@ fn has_target_features(self, _def_id: Self::FunctionId) -> bool {
     }
 
     fn require_lang_item(self, lang_item: SolverLangItem) -> Self::DefId {
+        let lang_items = self.lang_items();
         let lang_item = match lang_item {
             SolverLangItem::AsyncFnKindUpvars => unimplemented!(),
-            SolverLangItem::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput,
-            SolverLangItem::CallOnceFuture => LangItem::CallOnceFuture,
-            SolverLangItem::CallRefFuture => LangItem::CallRefFuture,
-            SolverLangItem::CoroutineReturn => LangItem::CoroutineReturn,
-            SolverLangItem::CoroutineYield => LangItem::CoroutineYield,
-            SolverLangItem::DynMetadata => LangItem::DynMetadata,
-            SolverLangItem::FutureOutput => LangItem::FutureOutput,
-            SolverLangItem::Metadata => LangItem::Metadata,
+            SolverLangItem::AsyncFnOnceOutput => lang_items.AsyncFnOnceOutput,
+            SolverLangItem::CallOnceFuture => lang_items.CallOnceFuture,
+            SolverLangItem::CallRefFuture => lang_items.CallRefFuture,
+            SolverLangItem::CoroutineReturn => lang_items.CoroutineReturn,
+            SolverLangItem::CoroutineYield => lang_items.CoroutineYield,
+            SolverLangItem::FutureOutput => lang_items.FutureOutput,
+            SolverLangItem::Metadata => lang_items.Metadata,
+            SolverLangItem::DynMetadata => {
+                return lang_items.DynMetadata.expect("Lang item required but not found.").into();
+            }
         };
-        let target = hir_def::lang_item::lang_item(
-            self.db(),
-            self.krate.expect("Must have self.krate"),
-            lang_item,
-        )
-        .unwrap_or_else(|| panic!("Lang item {lang_item:?} required but not found."));
-        match target {
-            hir_def::lang_item::LangItemTarget::EnumId(enum_id) => enum_id.into(),
-            hir_def::lang_item::LangItemTarget::Function(function_id) => function_id.into(),
-            hir_def::lang_item::LangItemTarget::ImplDef(impl_id) => impl_id.into(),
-            hir_def::lang_item::LangItemTarget::Static(static_id) => static_id.into(),
-            hir_def::lang_item::LangItemTarget::Struct(struct_id) => struct_id.into(),
-            hir_def::lang_item::LangItemTarget::Union(union_id) => union_id.into(),
-            hir_def::lang_item::LangItemTarget::TypeAlias(type_alias_id) => type_alias_id.into(),
-            hir_def::lang_item::LangItemTarget::Trait(trait_id) => trait_id.into(),
-            hir_def::lang_item::LangItemTarget::EnumVariant(_) => unimplemented!(),
-        }
+        lang_item.expect("Lang item required but not found.").into()
     }
 
     fn require_trait_lang_item(self, lang_item: SolverTraitLangItem) -> TraitIdWrapper {
+        let lang_items = self.lang_items();
         let lang_item = match lang_item {
-            SolverTraitLangItem::AsyncFn => LangItem::AsyncFn,
+            SolverTraitLangItem::AsyncFn => lang_items.AsyncFn,
             SolverTraitLangItem::AsyncFnKindHelper => unimplemented!(),
-            SolverTraitLangItem::AsyncFnMut => LangItem::AsyncFnMut,
-            SolverTraitLangItem::AsyncFnOnce => LangItem::AsyncFnOnce,
-            SolverTraitLangItem::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput,
+            SolverTraitLangItem::AsyncFnMut => lang_items.AsyncFnMut,
+            SolverTraitLangItem::AsyncFnOnce => lang_items.AsyncFnOnce,
+            SolverTraitLangItem::AsyncFnOnceOutput => unimplemented!(
+                "This is incorrectly marked as `SolverTraitLangItem`, and is not used by the solver."
+            ),
             SolverTraitLangItem::AsyncIterator => unimplemented!(),
-            SolverTraitLangItem::Clone => LangItem::Clone,
-            SolverTraitLangItem::Copy => LangItem::Copy,
-            SolverTraitLangItem::Coroutine => LangItem::Coroutine,
-            SolverTraitLangItem::Destruct => LangItem::Destruct,
-            SolverTraitLangItem::DiscriminantKind => LangItem::DiscriminantKind,
-            SolverTraitLangItem::Drop => LangItem::Drop,
-            SolverTraitLangItem::Fn => LangItem::Fn,
-            SolverTraitLangItem::FnMut => LangItem::FnMut,
-            SolverTraitLangItem::FnOnce => LangItem::FnOnce,
-            SolverTraitLangItem::FnPtrTrait => LangItem::FnPtrTrait,
+            SolverTraitLangItem::Clone => lang_items.Clone,
+            SolverTraitLangItem::Copy => lang_items.Copy,
+            SolverTraitLangItem::Coroutine => lang_items.Coroutine,
+            SolverTraitLangItem::Destruct => lang_items.Destruct,
+            SolverTraitLangItem::DiscriminantKind => lang_items.DiscriminantKind,
+            SolverTraitLangItem::Drop => lang_items.Drop,
+            SolverTraitLangItem::Fn => lang_items.Fn,
+            SolverTraitLangItem::FnMut => lang_items.FnMut,
+            SolverTraitLangItem::FnOnce => lang_items.FnOnce,
+            SolverTraitLangItem::FnPtrTrait => lang_items.FnPtrTrait,
             SolverTraitLangItem::FusedIterator => unimplemented!(),
-            SolverTraitLangItem::Future => LangItem::Future,
-            SolverTraitLangItem::Iterator => LangItem::Iterator,
-            SolverTraitLangItem::PointeeTrait => LangItem::PointeeTrait,
-            SolverTraitLangItem::Sized => LangItem::Sized,
-            SolverTraitLangItem::MetaSized => LangItem::MetaSized,
-            SolverTraitLangItem::PointeeSized => LangItem::PointeeSized,
-            SolverTraitLangItem::TransmuteTrait => LangItem::TransmuteTrait,
-            SolverTraitLangItem::Tuple => LangItem::Tuple,
-            SolverTraitLangItem::Unpin => LangItem::Unpin,
-            SolverTraitLangItem::Unsize => LangItem::Unsize,
+            SolverTraitLangItem::Future => lang_items.Future,
+            SolverTraitLangItem::Iterator => lang_items.Iterator,
+            SolverTraitLangItem::PointeeTrait => lang_items.PointeeTrait,
+            SolverTraitLangItem::Sized => lang_items.Sized,
+            SolverTraitLangItem::MetaSized => lang_items.MetaSized,
+            SolverTraitLangItem::PointeeSized => lang_items.PointeeSized,
+            SolverTraitLangItem::TransmuteTrait => lang_items.TransmuteTrait,
+            SolverTraitLangItem::Tuple => lang_items.Tuple,
+            SolverTraitLangItem::Unpin => lang_items.Unpin,
+            SolverTraitLangItem::Unsize => lang_items.Unsize,
             SolverTraitLangItem::BikeshedGuaranteedNoDrop => {
                 unimplemented!()
             }
         };
-        lang_item
-            .resolve_trait(self.db(), self.krate.expect("Must have self.krate"))
-            .unwrap_or_else(|| panic!("Lang item {lang_item:?} required but not found."))
-            .into()
+        lang_item.expect("Lang item required but not found.").into()
     }
 
     fn require_adt_lang_item(self, lang_item: SolverAdtLangItem) -> AdtIdWrapper {
+        let lang_items = self.lang_items();
         let lang_item = match lang_item {
-            SolverAdtLangItem::Option => LangItem::Option,
-            SolverAdtLangItem::Poll => LangItem::Poll,
+            SolverAdtLangItem::Option => lang_items.Option,
+            SolverAdtLangItem::Poll => lang_items.Poll,
         };
-        lang_item
-            .resolve_adt(self.db(), self.krate.expect("Must have self.krate"))
-            .unwrap_or_else(|| panic!("Lang item {lang_item:?} required but not found."))
-            .into()
+        AdtIdWrapper(lang_item.expect("Lang item required but not found.").into())
     }
 
     fn is_lang_item(self, def_id: Self::DefId, lang_item: SolverLangItem) -> bool {
@@ -1514,53 +1543,15 @@ fn is_lang_item(self, def_id: Self::DefId, lang_item: SolverLangItem) -> bool {
     }
 
     fn is_trait_lang_item(self, def_id: Self::TraitId, lang_item: SolverTraitLangItem) -> bool {
-        self.as_trait_lang_item(def_id)
-            .map_or(false, |l| std::mem::discriminant(&l) == std::mem::discriminant(&lang_item))
-    }
-
-    fn is_adt_lang_item(self, def_id: Self::AdtId, lang_item: SolverAdtLangItem) -> bool {
-        // FIXME: derive PartialEq on SolverTraitLangItem
-        self.as_adt_lang_item(def_id)
-            .map_or(false, |l| std::mem::discriminant(&l) == std::mem::discriminant(&lang_item))
-    }
-
-    fn as_lang_item(self, def_id: Self::DefId) -> Option<SolverLangItem> {
-        let def_id: AttrDefId = match def_id {
-            SolverDefId::TraitId(id) => id.into(),
-            SolverDefId::TypeAliasId(id) => id.into(),
-            SolverDefId::AdtId(id) => id.into(),
-            _ => panic!("Unexpected SolverDefId in as_lang_item"),
-        };
-        let lang_item = self.db().lang_attr(def_id)?;
-        as_lang_item!(
-            SolverLangItem, lang_item;
-
-            ignore = {
-                AsyncFnKindUpvars,
-            }
-
-            Metadata,
-            DynMetadata,
-            CoroutineReturn,
-            CoroutineYield,
-            FutureOutput,
-            CallRefFuture,
-            CallOnceFuture,
-            AsyncFnOnceOutput,
-        )
-    }
-
-    fn as_trait_lang_item(self, def_id: Self::TraitId) -> Option<SolverTraitLangItem> {
-        let def_id: AttrDefId = def_id.0.into();
-        let lang_item = self.db().lang_attr(def_id)?;
-        as_lang_item!(
-            SolverTraitLangItem, lang_item;
+        is_lang_item!(
+            SolverTraitLangItem, self, def_id.0, lang_item;
 
             ignore = {
                 AsyncFnKindHelper,
                 AsyncIterator,
                 BikeshedGuaranteedNoDrop,
                 FusedIterator,
+                AsyncFnOnceOutput, // This is incorrectly marked as `SolverTraitLangItem`, and is not used by the solver.
             }
 
             Sized,
@@ -1586,15 +1577,101 @@ fn as_trait_lang_item(self, def_id: Self::TraitId) -> Option<SolverTraitLangItem
             AsyncFn,
             AsyncFnMut,
             AsyncFnOnce,
-            AsyncFnOnceOutput,
+        )
+    }
+
+    fn is_adt_lang_item(self, def_id: Self::AdtId, lang_item: SolverAdtLangItem) -> bool {
+        // FIXME: derive PartialEq on SolverTraitLangItem
+        self.as_adt_lang_item(def_id)
+            .map_or(false, |l| std::mem::discriminant(&l) == std::mem::discriminant(&lang_item))
+    }
+
+    fn as_lang_item(self, def_id: Self::DefId) -> Option<SolverLangItem> {
+        match def_id {
+            SolverDefId::TypeAliasId(id) => {
+                as_lang_item!(
+                    SolverLangItem, self, id;
+
+                    ignore = {
+                        AsyncFnKindUpvars,
+                        DynMetadata,
+                    }
+
+                    Metadata,
+                    CoroutineReturn,
+                    CoroutineYield,
+                    FutureOutput,
+                    CallRefFuture,
+                    CallOnceFuture,
+                    AsyncFnOnceOutput,
+                )
+            }
+            SolverDefId::AdtId(AdtId::StructId(id)) => {
+                as_lang_item!(
+                    SolverLangItem, self, id;
+
+                    ignore = {
+                        AsyncFnKindUpvars,
+                        Metadata,
+                        CoroutineReturn,
+                        CoroutineYield,
+                        FutureOutput,
+                        CallRefFuture,
+                        CallOnceFuture,
+                        AsyncFnOnceOutput,
+                    }
+
+                    DynMetadata,
+                )
+            }
+            _ => panic!("Unexpected SolverDefId in as_lang_item"),
+        }
+    }
+
+    fn as_trait_lang_item(self, def_id: Self::TraitId) -> Option<SolverTraitLangItem> {
+        as_lang_item!(
+            SolverTraitLangItem, self, def_id.0;
+
+            ignore = {
+                AsyncFnKindHelper,
+                AsyncIterator,
+                BikeshedGuaranteedNoDrop,
+                FusedIterator,
+                AsyncFnOnceOutput, // This is incorrectly marked as `SolverTraitLangItem`, and is not used by the solver.
+            }
+
+            Sized,
+            MetaSized,
+            PointeeSized,
+            Unsize,
+            Copy,
+            Clone,
+            DiscriminantKind,
+            PointeeTrait,
+            FnPtrTrait,
+            Drop,
+            Destruct,
+            TransmuteTrait,
+            Fn,
+            FnMut,
+            FnOnce,
+            Future,
+            Coroutine,
+            Unpin,
+            Tuple,
+            Iterator,
+            AsyncFn,
+            AsyncFnMut,
+            AsyncFnOnce,
         )
     }
 
     fn as_adt_lang_item(self, def_id: Self::AdtId) -> Option<SolverAdtLangItem> {
-        let def_id: AttrDefId = def_id.0.into();
-        let lang_item = self.db().lang_attr(def_id)?;
+        let AdtId::EnumId(def_id) = def_id.0 else {
+            panic!("Unexpected SolverDefId in as_adt_lang_item");
+        };
         as_lang_item!(
-            SolverAdtLangItem, lang_item;
+            SolverAdtLangItem, self, def_id;
 
             ignore = {}
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
index 58849ce..c0a7c9a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
@@ -5,7 +5,6 @@
 use hir_def::{
     AdtId, HasModule, TypeParamId,
     hir::generics::{TypeOrConstParamData, TypeParamProvenance},
-    lang_item::LangItem,
 };
 use hir_def::{TraitId, type_ref::Rawness};
 use rustc_abi::{Float, Integer, Size};
@@ -620,7 +619,7 @@ pub fn as_builtin(self) -> Option<hir_def::builtin_type::BuiltinType> {
 
     // FIXME: Should this be here?
     pub fn impl_trait_bounds(self, db: &'db dyn HirDatabase) -> Option<Vec<Clause<'db>>> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
 
         match self.kind() {
             TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => Some(
@@ -658,7 +657,7 @@ pub fn impl_trait_bounds(self, db: &'db dyn HirDatabase) -> Option<Vec<Clause<'d
             TyKind::Coroutine(coroutine_id, _args) => {
                 let InternedCoroutine(owner, _) = coroutine_id.0.loc(db);
                 let krate = owner.module(db).krate();
-                if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) {
+                if let Some(future_trait) = hir_def::lang_item::lang_items(db, krate).Future {
                     // This is only used by type walking.
                     // Parameters will be walked outside, and projection predicate is not used.
                     // So just provide the Future trait.
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/opaques.rs b/src/tools/rust-analyzer/crates/hir-ty/src/opaques.rs
index acf532c..0b84ce1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/opaques.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/opaques.rs
@@ -9,7 +9,7 @@
 use syntax::ast;
 
 use crate::{
-    ImplTraitId,
+    ImplTraitId, InferenceResult,
     db::{HirDatabase, InternedOpaqueTyId},
     lower::{ImplTraitIdx, ImplTraits},
     next_solver::{
@@ -94,7 +94,7 @@ pub(crate) fn rpit_hidden_types<'db>(
     db: &'db dyn HirDatabase,
     function: FunctionId,
 ) -> ArenaMap<ImplTraitIdx<'db>, EarlyBinder<'db, Ty<'db>>> {
-    let infer = db.infer(function.into());
+    let infer = InferenceResult::for_body(db, function.into());
     let mut result = ArenaMap::new();
     for (opaque, hidden_type) in infer.return_position_impl_trait_types(db) {
         result.insert(opaque, EarlyBinder::bind(hidden_type));
@@ -118,7 +118,7 @@ pub(crate) fn tait_hidden_types<'db>(
 
     let loc = type_alias.loc(db);
     let module = loc.module(db);
-    let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
+    let interner = DbInterner::new_with(db, module.krate());
     let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
     let mut ocx = ObligationCtxt::new(&infcx);
     let cause = ObligationCause::dummy();
@@ -128,7 +128,7 @@ pub(crate) fn tait_hidden_types<'db>(
 
     let mut result = ArenaMap::with_capacity(taits_count);
     for defining_body in defining_bodies {
-        let infer = db.infer(defining_body);
+        let infer = InferenceResult::for_body(db, defining_body);
         for (&opaque, &hidden_type) in &infer.type_of_opaque {
             let ImplTraitId::TypeAliasImplTrait(opaque_owner, opaque_idx) = opaque.loc(db) else {
                 continue;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs
index 304679d..0241751 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs
@@ -46,7 +46,7 @@ fn specializes_query(
     parent_impl_def_id: ImplId,
 ) -> bool {
     let trait_env = db.trait_environment(specializing_impl_def_id.into());
-    let interner = DbInterner::new_with(db, Some(trait_env.krate), trait_env.block);
+    let interner = DbInterner::new_with(db, trait_env.krate);
 
     let specializing_impl_signature = db.impl_signature(specializing_impl_def_id);
     let parent_impl_signature = db.impl_signature(parent_impl_def_id);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
index 0a8ed2c..2bd675b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
@@ -1,31 +1,35 @@
 //! Stuff for handling `#[target_feature]` (needed for unsafe check).
 
+use std::borrow::Cow;
 use std::sync::LazyLock;
 
-use hir_def::attr::Attrs;
-use hir_def::tt;
-use intern::{Symbol, sym};
+use hir_def::FunctionId;
+use hir_def::attrs::AttrFlags;
+use intern::Symbol;
 use rustc_hash::{FxHashMap, FxHashSet};
 
+use crate::db::HirDatabase;
+
 #[derive(Debug, Default, Clone)]
-pub struct TargetFeatures {
-    pub(crate) enabled: FxHashSet<Symbol>,
+pub struct TargetFeatures<'db> {
+    pub(crate) enabled: Cow<'db, FxHashSet<Symbol>>,
 }
 
-impl TargetFeatures {
-    pub fn from_attrs(attrs: &Attrs) -> Self {
-        let mut result = TargetFeatures::from_attrs_no_implications(attrs);
+impl<'db> TargetFeatures<'db> {
+    pub fn from_fn(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
+        let mut result = TargetFeatures::from_fn_no_implications(db, owner);
         result.expand_implications();
         result
     }
 
     fn expand_implications(&mut self) {
         let all_implications = LazyLock::force(&TARGET_FEATURE_IMPLICATIONS);
-        let mut queue = self.enabled.iter().cloned().collect::<Vec<_>>();
+        let enabled = self.enabled.to_mut();
+        let mut queue = enabled.iter().cloned().collect::<Vec<_>>();
         while let Some(feature) = queue.pop() {
             if let Some(implications) = all_implications.get(&feature) {
                 for implication in implications {
-                    if self.enabled.insert(implication.clone()) {
+                    if enabled.insert(implication.clone()) {
                         queue.push(implication.clone());
                     }
                 }
@@ -34,25 +38,9 @@ fn expand_implications(&mut self) {
     }
 
     /// Retrieves the target features from the attributes, and does not expand the target features implied by them.
-    pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self {
-        let enabled = attrs
-            .by_key(sym::target_feature)
-            .tt_values()
-            .filter_map(|tt| match tt.token_trees().flat_tokens() {
-                [
-                    tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
-                    tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
-                    tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
-                        kind: tt::LitKind::Str,
-                        symbol: features,
-                        ..
-                    })),
-                ] if enable_ident.sym == sym::enable => Some(features),
-                _ => None,
-            })
-            .flat_map(|features| features.as_str().split(',').map(Symbol::intern))
-            .collect();
-        Self { enabled }
+    pub(crate) fn from_fn_no_implications(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
+        let enabled = AttrFlags::target_features(db, owner);
+        Self { enabled: Cow::Borrowed(enabled) }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
index 002d589..1acb0b8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -38,7 +38,6 @@
 
 use crate::{
     InferenceResult,
-    db::HirDatabase,
     display::{DisplayTarget, HirDisplay},
     infer::{Adjustment, TypeMismatch},
     next_solver::Ty,
@@ -148,7 +147,7 @@ fn check_impl(
         for (def, krate) in defs {
             let display_target = DisplayTarget::from_crate(&db, krate);
             let (body, body_source_map) = db.body_with_source_map(def);
-            let inference_result = db.infer(def);
+            let inference_result = InferenceResult::for_body(&db, def);
 
             for (pat, mut ty) in inference_result.type_of_pat.iter() {
                 if let Pat::Bind { id, .. } = body[pat] {
@@ -319,7 +318,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
     crate::attach_db(&db, || {
         let mut buf = String::new();
 
-        let mut infer_def = |inference_result: Arc<InferenceResult<'_>>,
+        let mut infer_def = |inference_result: &InferenceResult<'_>,
                              body: Arc<Body>,
                              body_source_map: Arc<BodySourceMap>,
                              krate: Crate| {
@@ -443,7 +442,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
         });
         for (def, krate) in defs {
             let (body, source_map) = db.body_with_source_map(def);
-            let infer = db.infer(def);
+            let infer = InferenceResult::for_body(&db, def);
             infer_def(infer, body, source_map, krate);
         }
 
@@ -595,13 +594,16 @@ fn main() {
         let module = db.module_for_file(pos.file_id.file_id(&db));
         let crate_def_map = module.def_map(&db);
         visit_module(&db, crate_def_map, module.local_id, &mut |def| {
-            db.infer(match def {
-                ModuleDefId::FunctionId(it) => it.into(),
-                ModuleDefId::EnumVariantId(it) => it.into(),
-                ModuleDefId::ConstId(it) => it.into(),
-                ModuleDefId::StaticId(it) => it.into(),
-                _ => return,
-            });
+            InferenceResult::for_body(
+                &db,
+                match def {
+                    ModuleDefId::FunctionId(it) => it.into(),
+                    ModuleDefId::EnumVariantId(it) => it.into(),
+                    ModuleDefId::ConstId(it) => it.into(),
+                    ModuleDefId::StaticId(it) => it.into(),
+                    _ => return,
+                },
+            );
         });
     });
 
@@ -636,13 +638,16 @@ fn main() {
         let module = db.module_for_file(pos.file_id.file_id(&db));
         let crate_def_map = module.def_map(&db);
         visit_module(&db, crate_def_map, module.local_id, &mut |def| {
-            db.infer(match def {
-                ModuleDefId::FunctionId(it) => it.into(),
-                ModuleDefId::EnumVariantId(it) => it.into(),
-                ModuleDefId::ConstId(it) => it.into(),
-                ModuleDefId::StaticId(it) => it.into(),
-                _ => return,
-            });
+            InferenceResult::for_body(
+                &db,
+                match def {
+                    ModuleDefId::FunctionId(it) => it.into(),
+                    ModuleDefId::EnumVariantId(it) => it.into(),
+                    ModuleDefId::ConstId(it) => it.into(),
+                    ModuleDefId::StaticId(it) => it.into(),
+                    _ => return,
+                },
+            );
         });
     })
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs
index 8425c0d..ef71681 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs
@@ -7,6 +7,7 @@
 use test_fixture::WithFixture;
 
 use crate::{
+    InferenceResult,
     db::HirDatabase,
     display::{DisplayTarget, HirDisplay},
     mir::MirSpan,
@@ -34,7 +35,7 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
                 hir_def::ModuleDefId::StaticId(it) => it.into(),
                 _ => continue,
             };
-            let infer = db.infer(def);
+            let infer = InferenceResult::for_body(&db, def);
             let db = &db;
             captures_info.extend(infer.closure_info.iter().flat_map(
                 |(closure_id, (captures, _))| {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
index e98e5e4..fd564a3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
@@ -4,7 +4,7 @@
 use salsa::EventKind;
 use test_fixture::WithFixture;
 
-use crate::{db::HirDatabase, method_resolution::TraitImpls, test_db::TestDB};
+use crate::{InferenceResult, method_resolution::TraitImpls, test_db::TestDB};
 
 use super::visit_module;
 
@@ -24,32 +24,30 @@ fn foo() -> i32 {
             let crate_def_map = module.def_map(&db);
             visit_module(&db, crate_def_map, module.local_id, &mut |def| {
                 if let ModuleDefId::FunctionId(it) = def {
-                    db.infer(it.into());
+                    InferenceResult::for_body(&db, it.into());
                 }
             });
         },
-        &[("infer_shim", 1)],
+        &[("InferenceResult < 'db >::for_body_", 1)],
         expect_test::expect![[r#"
             [
-                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
                 "parse_shim",
                 "real_span_map_shim",
-                "infer_shim",
+                "InferenceResult < 'db >::for_body_",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "body_shim",
                 "body_with_source_map_shim",
                 "trait_environment_shim",
-                "GenericPredicates < 'db >::query_with_diagnostics_",
-                "lang_item",
+                "lang_items",
                 "crate_lang_items",
+                "GenericPredicates < 'db >::query_with_diagnostics_",
                 "ImplTraits < 'db >::return_type_impl_traits_",
                 "expr_scopes_shim",
-                "lang_item",
             ]
         "#]],
     );
@@ -70,18 +68,18 @@ fn foo() -> i32 {
             let crate_def_map = module.def_map(&db);
             visit_module(&db, crate_def_map, module.local_id, &mut |def| {
                 if let ModuleDefId::FunctionId(it) = def {
-                    db.infer(it.into());
+                    InferenceResult::for_body(&db, it.into());
                 }
             });
         },
-        &[("infer_shim", 0)],
+        &[("InferenceResult < 'db >::for_body_", 0)],
         expect_test::expect![[r#"
             [
                 "parse_shim",
                 "ast_id_map_shim",
                 "file_item_tree_query",
                 "real_span_map_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "function_signature_with_source_map_shim",
                 "function_signature_shim",
                 "body_with_source_map_shim",
@@ -113,35 +111,33 @@ fn baz() -> i32 {
             let crate_def_map = module.def_map(&db);
             visit_module(&db, crate_def_map, module.local_id, &mut |def| {
                 if let ModuleDefId::FunctionId(it) = def {
-                    db.infer(it.into());
+                    InferenceResult::for_body(&db, it.into());
                 }
             });
         },
-        &[("infer_shim", 3)],
+        &[("InferenceResult < 'db >::for_body_", 3)],
         expect_test::expect![[r#"
             [
-                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
                 "parse_shim",
                 "real_span_map_shim",
-                "infer_shim",
+                "InferenceResult < 'db >::for_body_",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "body_shim",
                 "body_with_source_map_shim",
                 "trait_environment_shim",
-                "GenericPredicates < 'db >::query_with_diagnostics_",
-                "lang_item",
+                "lang_items",
                 "crate_lang_items",
-                "attrs_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
+                "AttrFlags::query_",
+                "GenericPredicates < 'db >::query_with_diagnostics_",
                 "ImplTraits < 'db >::return_type_impl_traits_",
                 "expr_scopes_shim",
-                "lang_item",
-                "infer_shim",
+                "InferenceResult < 'db >::for_body_",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
                 "body_shim",
@@ -150,7 +146,7 @@ fn baz() -> i32 {
                 "GenericPredicates < 'db >::query_with_diagnostics_",
                 "ImplTraits < 'db >::return_type_impl_traits_",
                 "expr_scopes_shim",
-                "infer_shim",
+                "InferenceResult < 'db >::for_body_",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
                 "body_shim",
@@ -184,29 +180,29 @@ fn baz() -> i32 {
             let crate_def_map = module.def_map(&db);
             visit_module(&db, crate_def_map, module.local_id, &mut |def| {
                 if let ModuleDefId::FunctionId(it) = def {
-                    db.infer(it.into());
+                    InferenceResult::for_body(&db, it.into());
                 }
             });
         },
-        &[("infer_shim", 1)],
+        &[("InferenceResult < 'db >::for_body_", 1)],
         expect_test::expect![[r#"
             [
                 "parse_shim",
                 "ast_id_map_shim",
                 "file_item_tree_query",
                 "real_span_map_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "function_signature_with_source_map_shim",
                 "function_signature_shim",
                 "body_with_source_map_shim",
                 "body_shim",
-                "attrs_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
+                "AttrFlags::query_",
                 "function_signature_with_source_map_shim",
                 "function_signature_shim",
                 "body_with_source_map_shim",
                 "body_shim",
-                "infer_shim",
+                "InferenceResult < 'db >::for_body_",
                 "expr_scopes_shim",
                 "function_signature_with_source_map_shim",
                 "function_signature_shim",
@@ -241,7 +237,6 @@ fn bar() -> f32 {
         &[("TraitImpls::for_crate_", 1)],
         expect_test::expect![[r#"
             [
-                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -313,7 +308,6 @@ fn bar() -> f32 {
         &[("TraitImpls::for_crate_", 1)],
         expect_test::expect![[r#"
             [
-                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -386,7 +380,6 @@ fn bar() -> f32 {
         &[("TraitImpls::for_crate_", 1)],
         expect_test::expect![[r#"
             [
-                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -460,7 +453,6 @@ pub struct SomeStruct {
         &[("TraitImpls::for_crate_", 1)],
         expect_test::expect![[r#"
             [
-                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -509,14 +501,20 @@ pub fn new(value: i32) -> Self {
                 "real_span_map_shim",
                 "crate_local_def_map",
                 "TraitImpls::for_crate_",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "impl_trait_with_diagnostics_shim",
                 "impl_signature_shim",
                 "impl_signature_with_source_map_shim",
+                "lang_items",
+                "crate_lang_items",
+                "ImplItems::of_",
+                "AttrFlags::query_",
+                "AttrFlags::query_",
+                "AttrFlags::query_",
+                "AttrFlags::query_",
                 "impl_self_ty_with_diagnostics_shim",
                 "struct_signature_shim",
                 "struct_signature_with_source_map_shim",
-                "attrs_shim",
             ]
         "#]],
     );
@@ -560,13 +558,12 @@ fn main() {
             });
 
             for def in defs {
-                let _inference_result = db.infer(def);
+                let _inference_result = InferenceResult::for_body(&db, def);
             }
         },
         &[("trait_solve_shim", 0)],
         expect_test::expect![[r#"
             [
-                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -575,26 +572,26 @@ fn main() {
                 "TraitItems::query_with_diagnostics_",
                 "body_shim",
                 "body_with_source_map_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "ImplItems::of_",
-                "infer_shim",
+                "InferenceResult < 'db >::for_body_",
                 "trait_signature_shim",
                 "trait_signature_with_source_map_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "body_shim",
                 "body_with_source_map_shim",
                 "trait_environment_shim",
-                "GenericPredicates < 'db >::query_with_diagnostics_",
-                "lang_item",
+                "lang_items",
                 "crate_lang_items",
-                "attrs_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
+                "AttrFlags::query_",
+                "GenericPredicates < 'db >::query_with_diagnostics_",
                 "GenericPredicates < 'db >::query_with_diagnostics_",
                 "ImplTraits < 'db >::return_type_impl_traits_",
-                "infer_shim",
+                "InferenceResult < 'db >::for_body_",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
                 "trait_environment_shim",
@@ -607,7 +604,6 @@ fn main() {
                 "value_ty_shim",
                 "VariantFields::firewall_",
                 "VariantFields::query_",
-                "lang_item",
                 "InherentImpls::for_crate_",
                 "impl_signature_shim",
                 "impl_signature_with_source_map_shim",
@@ -617,7 +613,6 @@ fn main() {
                 "impl_trait_with_diagnostics_shim",
                 "impl_self_ty_with_diagnostics_shim",
                 "GenericPredicates < 'db >::query_with_diagnostics_",
-                "lang_item",
             ]
         "#]],
     );
@@ -660,7 +655,7 @@ fn main() {
             });
 
             for def in defs {
-                let _inference_result = db.infer(def);
+                let _inference_result = InferenceResult::for_body(&db, def);
             }
         },
         &[("trait_solve_shim", 0)],
@@ -673,25 +668,25 @@ fn main() {
                 "crate_local_def_map",
                 "TraitItems::query_with_diagnostics_",
                 "body_with_source_map_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "body_shim",
                 "ImplItems::of_",
-                "infer_shim",
-                "attrs_shim",
+                "InferenceResult < 'db >::for_body_",
+                "AttrFlags::query_",
                 "trait_signature_with_source_map_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
                 "function_signature_with_source_map_shim",
                 "function_signature_shim",
                 "body_with_source_map_shim",
                 "body_shim",
-                "GenericPredicates < 'db >::query_with_diagnostics_",
                 "crate_lang_items",
-                "attrs_shim",
-                "attrs_shim",
-                "attrs_shim",
+                "AttrFlags::query_",
+                "AttrFlags::query_",
+                "AttrFlags::query_",
+                "GenericPredicates < 'db >::query_with_diagnostics_",
                 "GenericPredicates < 'db >::query_with_diagnostics_",
                 "ImplTraits < 'db >::return_type_impl_traits_",
-                "infer_shim",
+                "InferenceResult < 'db >::for_body_",
                 "function_signature_with_source_map_shim",
                 "GenericPredicates < 'db >::query_with_diagnostics_",
                 "ImplTraits < 'db >::return_type_impl_traits_",
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index eb4ae5e..677e3577 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -4134,7 +4134,7 @@ trait Trait {
 }
 
 fn f(t: &dyn Trait<T = (), T = ()>) {}
-   //^&'? {unknown}
+   //^&'? (dyn Trait<T = ()> + 'static)
         "#,
     );
 }
@@ -5056,3 +5056,26 @@ fn token<A>(self, alloc: A)
     "#,
     );
 }
+
+#[test]
+fn dyn_trait_supertrait_projections_are_elaborated() {
+    check_types(
+        r#"
+//- minicore: deref, sized, unsize, coerce_unsized, dispatch_from_dyn
+use core::ops::Deref;
+
+struct Base;
+
+impl Base {
+    fn func(&self) -> i32 { 111 }
+}
+
+trait BaseLayerOne: Deref<Target = Base>{}
+
+fn foo(base_layer_two: &dyn BaseLayerOne) {
+    let _r = base_layer_two.func();
+     // ^^ i32
+}
+    "#,
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
index 2055c31..1462f1e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -1,12 +1,11 @@
 //! Trait solving using next trait solver.
 
-use core::fmt;
 use std::hash::Hash;
 
 use base_db::Crate;
 use hir_def::{
     AdtId, AssocItemId, BlockId, HasModule, ImplId, Lookup, TraitId,
-    lang_item::LangItem,
+    lang_item::LangItems,
     nameres::DefMap,
     signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags},
 };
@@ -152,7 +151,7 @@ pub fn next_trait_solve_in_ctxt<'db, 'a>(
     res
 }
 
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, salsa::Update)]
 pub enum FnTrait {
     // Warning: Order is important. If something implements `x` it should also implement
     // `y` if `y <= x`.
@@ -165,54 +164,7 @@ pub enum FnTrait {
     AsyncFn,
 }
 
-impl fmt::Display for FnTrait {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self {
-            FnTrait::FnOnce => write!(f, "FnOnce"),
-            FnTrait::FnMut => write!(f, "FnMut"),
-            FnTrait::Fn => write!(f, "Fn"),
-            FnTrait::AsyncFnOnce => write!(f, "AsyncFnOnce"),
-            FnTrait::AsyncFnMut => write!(f, "AsyncFnMut"),
-            FnTrait::AsyncFn => write!(f, "AsyncFn"),
-        }
-    }
-}
-
 impl FnTrait {
-    pub const fn function_name(&self) -> &'static str {
-        match self {
-            FnTrait::FnOnce => "call_once",
-            FnTrait::FnMut => "call_mut",
-            FnTrait::Fn => "call",
-            FnTrait::AsyncFnOnce => "async_call_once",
-            FnTrait::AsyncFnMut => "async_call_mut",
-            FnTrait::AsyncFn => "async_call",
-        }
-    }
-
-    const fn lang_item(self) -> LangItem {
-        match self {
-            FnTrait::FnOnce => LangItem::FnOnce,
-            FnTrait::FnMut => LangItem::FnMut,
-            FnTrait::Fn => LangItem::Fn,
-            FnTrait::AsyncFnOnce => LangItem::AsyncFnOnce,
-            FnTrait::AsyncFnMut => LangItem::AsyncFnMut,
-            FnTrait::AsyncFn => LangItem::AsyncFn,
-        }
-    }
-
-    pub const fn from_lang_item(lang_item: LangItem) -> Option<Self> {
-        match lang_item {
-            LangItem::FnOnce => Some(FnTrait::FnOnce),
-            LangItem::FnMut => Some(FnTrait::FnMut),
-            LangItem::Fn => Some(FnTrait::Fn),
-            LangItem::AsyncFnOnce => Some(FnTrait::AsyncFnOnce),
-            LangItem::AsyncFnMut => Some(FnTrait::AsyncFnMut),
-            LangItem::AsyncFn => Some(FnTrait::AsyncFn),
-            _ => None,
-        }
-    }
-
     pub fn method_name(self) -> Name {
         match self {
             FnTrait::FnOnce => Name::new_symbol_root(sym::call_once),
@@ -224,8 +176,15 @@ pub fn method_name(self) -> Name {
         }
     }
 
-    pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option<TraitId> {
-        self.lang_item().resolve_trait(db, krate)
+    pub fn get_id(self, lang_items: &LangItems) -> Option<TraitId> {
+        match self {
+            FnTrait::FnOnce => lang_items.FnOnce,
+            FnTrait::FnMut => lang_items.FnMut,
+            FnTrait::Fn => lang_items.Fn,
+            FnTrait::AsyncFnOnce => lang_items.AsyncFnOnce,
+            FnTrait::AsyncFnMut => lang_items.AsyncFnMut,
+            FnTrait::AsyncFn => lang_items.AsyncFn,
+        }
     }
 }
 
@@ -257,7 +216,7 @@ fn implements_trait_unique_impl<'db>(
     trait_: TraitId,
     create_args: &mut dyn FnMut(&InferCtxt<'db>) -> GenericArgs<'db>,
 ) -> bool {
-    let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+    let interner = DbInterner::new_with(db, env.krate);
     // FIXME(next-solver): I believe this should be `PostAnalysis`.
     let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
index ca5e33f..7dd73f1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -3,15 +3,13 @@
 
 use std::cell::LazyCell;
 
-use base_db::{
-    Crate,
-    target::{self, TargetData},
-};
+use base_db::target::{self, TargetData};
 use hir_def::{
     EnumId, EnumVariantId, FunctionId, Lookup, TraitId,
+    attrs::AttrFlags,
     db::DefDatabase,
     hir::generics::WherePredicate,
-    lang_item::LangItem,
+    lang_item::LangItems,
     resolver::{HasResolver, TypeNs},
     type_ref::{TraitBoundModifier, TypeRef},
 };
@@ -27,10 +25,28 @@
     mir::pad16,
 };
 
-pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: Crate) -> impl Iterator<Item = TraitId> + '_ {
-    [LangItem::Fn, LangItem::FnMut, LangItem::FnOnce]
-        .into_iter()
-        .filter_map(move |lang| lang.resolve_trait(db, krate))
+/// SAFETY: `old_pointer` must be valid for unique writes
+pub(crate) unsafe fn unsafe_update_eq<T>(old_pointer: *mut T, new_value: T) -> bool
+where
+    T: PartialEq,
+{
+    // SAFETY: Caller obligation
+    let old_ref: &mut T = unsafe { &mut *old_pointer };
+
+    if *old_ref != new_value {
+        *old_ref = new_value;
+        true
+    } else {
+        // Subtle but important: Eq impls can be buggy or define equality
+        // in surprising ways. If it says that the value has not changed,
+        // we do not modify the existing value, and thus do not have to
+        // update the revision, as downstream code will not see the new value.
+        false
+    }
+}
+
+pub(crate) fn fn_traits(lang_items: &LangItems) -> impl Iterator<Item = TraitId> + '_ {
+    [lang_items.Fn, lang_items.FnMut, lang_items.FnOnce].into_iter().flatten()
 }
 
 /// Returns an iterator over the direct super traits (including the trait itself).
@@ -119,7 +135,7 @@ pub fn target_feature_is_safe_in_target(target: &TargetData) -> TargetFeatureIsS
 pub fn is_fn_unsafe_to_call(
     db: &dyn HirDatabase,
     func: FunctionId,
-    caller_target_features: &TargetFeatures,
+    caller_target_features: &TargetFeatures<'_>,
     call_edition: Edition,
     target_feature_is_safe: TargetFeatureIsSafeInTarget,
 ) -> Unsafety {
@@ -130,8 +146,7 @@ pub fn is_fn_unsafe_to_call(
 
     if data.has_target_feature() && target_feature_is_safe == TargetFeatureIsSafeInTarget::No {
         // RFC 2396 <https://rust-lang.github.io/rfcs/2396-target-feature-1.1.html>.
-        let callee_target_features =
-            TargetFeatures::from_attrs_no_implications(&db.attrs(func.into()));
+        let callee_target_features = TargetFeatures::from_fn_no_implications(db, func);
         if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) {
             return Unsafety::Unsafe;
         }
@@ -152,7 +167,7 @@ pub fn is_fn_unsafe_to_call(
             if is_intrinsic_block {
                 // legacy intrinsics
                 // extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
-                if db.attrs(func.into()).by_key(sym::rustc_safe_intrinsic).exists() {
+                if AttrFlags::query(db, func.into()).contains(AttrFlags::RUSTC_SAFE_INTRINSIC) {
                     Unsafety::Safe
                 } else {
                     Unsafety::Unsafe
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs
index b57bf03..df9d53f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs
@@ -32,7 +32,7 @@
 
 pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> VariancesOf<'_> {
     tracing::debug!("variances_of(def={:?})", def);
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     match def {
         GenericDefId::FunctionId(_) => (),
         GenericDefId::AdtId(adt) => {
@@ -107,7 +107,7 @@ pub(crate) fn variances_of_cycle_initial(
     db: &dyn HirDatabase,
     def: GenericDefId,
 ) -> VariancesOf<'_> {
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     let generics = generics(db, def);
     let count = generics.len();
 
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index cfc4080..5e716c6 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -1,8 +1,11 @@
 //! Attributes & documentation for hir types.
 
+use cfg::CfgExpr;
+use either::Either;
 use hir_def::{
-    AssocItemId, AttrDefId, ModuleDefId,
-    attr::AttrsWithOwner,
+    AssocItemId, AttrDefId, FieldId, InternedModuleId, LifetimeParamId, ModuleDefId,
+    TypeOrConstParamId,
+    attrs::{AttrFlags, Docs, IsInnerDoc},
     expr_store::path::Path,
     item_scope::ItemInNs,
     per_ns::Namespace,
@@ -19,35 +22,169 @@
     },
     next_solver::{DbInterner, TypingMode, infer::DbInternerInferExt},
 };
+use intern::Symbol;
 
 use crate::{
     Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
-    Field, Function, GenericParam, HasCrate, Impl, LifetimeParam, Macro, Module, ModuleDef, Static,
-    Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
+    Field, Function, GenericParam, HasCrate, Impl, LangItem, LifetimeParam, Macro, Module,
+    ModuleDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
 };
 
-pub trait HasAttrs {
-    fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
+#[derive(Debug, Clone, Copy)]
+pub enum AttrsOwner {
+    AttrDef(AttrDefId),
+    Field(FieldId),
+    LifetimeParam(LifetimeParamId),
+    TypeOrConstParam(TypeOrConstParamId),
+}
+
+impl AttrsOwner {
+    #[inline]
+    fn attr_def(&self) -> Option<AttrDefId> {
+        match self {
+            AttrsOwner::AttrDef(it) => Some(*it),
+            _ => None,
+        }
+    }
+}
+
+#[derive(Debug, Clone)]
+pub struct AttrsWithOwner {
+    pub(crate) attrs: AttrFlags,
+    owner: AttrsOwner,
+}
+
+impl AttrsWithOwner {
+    fn new(db: &dyn HirDatabase, owner: AttrDefId) -> Self {
+        Self { attrs: AttrFlags::query(db, owner), owner: AttrsOwner::AttrDef(owner) }
+    }
+
+    fn new_field(db: &dyn HirDatabase, owner: FieldId) -> Self {
+        Self { attrs: AttrFlags::query_field(db, owner), owner: AttrsOwner::Field(owner) }
+    }
+
+    fn new_lifetime_param(db: &dyn HirDatabase, owner: LifetimeParamId) -> Self {
+        Self {
+            attrs: AttrFlags::query_lifetime_param(db, owner),
+            owner: AttrsOwner::LifetimeParam(owner),
+        }
+    }
+    fn new_type_or_const_param(db: &dyn HirDatabase, owner: TypeOrConstParamId) -> Self {
+        Self {
+            attrs: AttrFlags::query_type_or_const_param(db, owner),
+            owner: AttrsOwner::TypeOrConstParam(owner),
+        }
+    }
+
+    #[inline]
+    pub fn is_unstable(&self) -> bool {
+        self.attrs.contains(AttrFlags::IS_UNSTABLE)
+    }
+
+    #[inline]
+    pub fn is_macro_export(&self) -> bool {
+        self.attrs.contains(AttrFlags::IS_MACRO_EXPORT)
+    }
+
+    #[inline]
+    pub fn is_doc_notable_trait(&self) -> bool {
+        self.attrs.contains(AttrFlags::IS_DOC_NOTABLE_TRAIT)
+    }
+
+    #[inline]
+    pub fn is_doc_hidden(&self) -> bool {
+        self.attrs.contains(AttrFlags::IS_DOC_HIDDEN)
+    }
+
+    #[inline]
+    pub fn is_deprecated(&self) -> bool {
+        self.attrs.contains(AttrFlags::IS_DEPRECATED)
+    }
+
+    #[inline]
+    pub fn is_non_exhaustive(&self) -> bool {
+        self.attrs.contains(AttrFlags::NON_EXHAUSTIVE)
+    }
+
+    #[inline]
+    pub fn is_test(&self) -> bool {
+        self.attrs.contains(AttrFlags::IS_TEST)
+    }
+
+    #[inline]
+    pub fn lang(&self, db: &dyn HirDatabase) -> Option<LangItem> {
+        self.owner
+            .attr_def()
+            .and_then(|owner| self.attrs.lang_item_with_attrs(db, owner))
+            .and_then(|lang| LangItem::from_symbol(&lang))
+    }
+
+    #[inline]
+    pub fn doc_aliases<'db>(&self, db: &'db dyn HirDatabase) -> &'db [Symbol] {
+        let owner = match self.owner {
+            AttrsOwner::AttrDef(it) => Either::Left(it),
+            AttrsOwner::Field(it) => Either::Right(it),
+            AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return &[],
+        };
+        self.attrs.doc_aliases(db, owner)
+    }
+
+    #[inline]
+    pub fn cfgs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db CfgExpr> {
+        let owner = match self.owner {
+            AttrsOwner::AttrDef(it) => Either::Left(it),
+            AttrsOwner::Field(it) => Either::Right(it),
+            AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
+        };
+        self.attrs.cfgs(db, owner)
+    }
+
+    #[inline]
+    pub fn hir_docs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db Docs> {
+        match self.owner {
+            AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
+            AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
+            AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
+        }
+    }
+}
+
+pub trait HasAttrs: Sized {
+    #[inline]
+    fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+        match self.attr_id(db) {
+            AttrsOwner::AttrDef(it) => AttrsWithOwner::new(db, it),
+            AttrsOwner::Field(it) => AttrsWithOwner::new_field(db, it),
+            AttrsOwner::LifetimeParam(it) => AttrsWithOwner::new_lifetime_param(db, it),
+            AttrsOwner::TypeOrConstParam(it) => AttrsWithOwner::new_type_or_const_param(db, it),
+        }
+    }
+
     #[doc(hidden)]
-    fn attr_id(self) -> AttrDefId;
+    fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner;
+
+    #[inline]
+    fn hir_docs(self, db: &dyn HirDatabase) -> Option<&Docs> {
+        match self.attr_id(db) {
+            AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
+            AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
+            AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
+        }
+    }
 }
 
 macro_rules! impl_has_attrs {
     ($(($def:ident, $def_id:ident),)*) => {$(
         impl HasAttrs for $def {
-            fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
-                let def = AttrDefId::$def_id(self.into());
-                AttrsWithOwner::new(db, def)
-            }
-            fn attr_id(self) -> AttrDefId {
-                AttrDefId::$def_id(self.into())
+            #[inline]
+            fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
+                AttrsOwner::AttrDef(AttrDefId::$def_id(self.into()))
             }
         }
     )*};
 }
 
 impl_has_attrs![
-    (Field, FieldId),
     (Variant, EnumVariantId),
     (Static, StaticId),
     (Const, ConstId),
@@ -56,8 +193,6 @@ fn attr_id(self) -> AttrDefId {
     (Macro, MacroId),
     (Function, FunctionId),
     (Adt, AdtId),
-    (Module, ModuleId),
-    (GenericParam, GenericParamId),
     (Impl, ImplId),
     (ExternCrateDecl, ExternCrateId),
 ];
@@ -65,11 +200,9 @@ fn attr_id(self) -> AttrDefId {
 macro_rules! impl_has_attrs_enum {
     ($($variant:ident),* for $enum:ident) => {$(
         impl HasAttrs for $variant {
-            fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
-                $enum::$variant(self).attrs(db)
-            }
-            fn attr_id(self) -> AttrDefId {
-                $enum::$variant(self).attr_id()
+            #[inline]
+            fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
+                $enum::$variant(self).attr_id(db)
             }
         }
     )*};
@@ -78,30 +211,46 @@ fn attr_id(self) -> AttrDefId {
 impl_has_attrs_enum![Struct, Union, Enum for Adt];
 impl_has_attrs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam];
 
-impl HasAttrs for AssocItem {
-    fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+impl HasAttrs for Module {
+    #[inline]
+    fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
+        AttrsOwner::AttrDef(AttrDefId::ModuleId(InternedModuleId::new(db, self.id)))
+    }
+}
+
+impl HasAttrs for GenericParam {
+    #[inline]
+    fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
         match self {
-            AssocItem::Function(it) => it.attrs(db),
-            AssocItem::Const(it) => it.attrs(db),
-            AssocItem::TypeAlias(it) => it.attrs(db),
+            GenericParam::TypeParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()),
+            GenericParam::ConstParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()),
+            GenericParam::LifetimeParam(it) => AttrsOwner::LifetimeParam(it.into()),
         }
     }
-    fn attr_id(self) -> AttrDefId {
+}
+
+impl HasAttrs for AssocItem {
+    #[inline]
+    fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
         match self {
-            AssocItem::Function(it) => it.attr_id(),
-            AssocItem::Const(it) => it.attr_id(),
-            AssocItem::TypeAlias(it) => it.attr_id(),
+            AssocItem::Function(it) => it.attr_id(db),
+            AssocItem::Const(it) => it.attr_id(db),
+            AssocItem::TypeAlias(it) => it.attr_id(db),
         }
     }
 }
 
 impl HasAttrs for crate::Crate {
-    fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
-        let def = AttrDefId::ModuleId(self.root_module().id);
-        AttrsWithOwner::new(db, def)
+    #[inline]
+    fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
+        self.root_module().attr_id(db)
     }
-    fn attr_id(self) -> AttrDefId {
-        AttrDefId::ModuleId(self.root_module().id)
+}
+
+impl HasAttrs for Field {
+    #[inline]
+    fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
+        AttrsOwner::Field(self.into())
     }
 }
 
@@ -111,21 +260,22 @@ pub fn resolve_doc_path_on(
     def: impl HasAttrs + Copy,
     link: &str,
     ns: Option<Namespace>,
-    is_inner_doc: bool,
+    is_inner_doc: IsInnerDoc,
 ) -> Option<DocLinkDef> {
-    resolve_doc_path_on_(db, link, def.attr_id(), ns, is_inner_doc)
+    resolve_doc_path_on_(db, link, def.attr_id(db), ns, is_inner_doc)
 }
 
 fn resolve_doc_path_on_(
     db: &dyn HirDatabase,
     link: &str,
-    attr_id: AttrDefId,
+    attr_id: AttrsOwner,
     ns: Option<Namespace>,
-    is_inner_doc: bool,
+    is_inner_doc: IsInnerDoc,
 ) -> Option<DocLinkDef> {
     let resolver = match attr_id {
-        AttrDefId::ModuleId(it) => {
-            if is_inner_doc {
+        AttrsOwner::AttrDef(AttrDefId::ModuleId(it)) => {
+            let it = it.loc(db);
+            if is_inner_doc.yes() {
                 it.resolver(db)
             } else if let Some(parent) = Module::from(it).parent(db) {
                 parent.id.resolver(db)
@@ -133,20 +283,20 @@ fn resolve_doc_path_on_(
                 it.resolver(db)
             }
         }
-        AttrDefId::FieldId(it) => it.parent.resolver(db),
-        AttrDefId::AdtId(it) => it.resolver(db),
-        AttrDefId::FunctionId(it) => it.resolver(db),
-        AttrDefId::EnumVariantId(it) => it.resolver(db),
-        AttrDefId::StaticId(it) => it.resolver(db),
-        AttrDefId::ConstId(it) => it.resolver(db),
-        AttrDefId::TraitId(it) => it.resolver(db),
-        AttrDefId::TypeAliasId(it) => it.resolver(db),
-        AttrDefId::ImplId(it) => it.resolver(db),
-        AttrDefId::ExternBlockId(it) => it.resolver(db),
-        AttrDefId::UseId(it) => it.resolver(db),
-        AttrDefId::MacroId(it) => it.resolver(db),
-        AttrDefId::ExternCrateId(it) => it.resolver(db),
-        AttrDefId::GenericParamId(_) => return None,
+        AttrsOwner::AttrDef(AttrDefId::AdtId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::FunctionId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::EnumVariantId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::StaticId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::ConstId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::TraitId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::TypeAliasId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::ImplId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::ExternBlockId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::UseId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::MacroId(it)) => it.resolver(db),
+        AttrsOwner::AttrDef(AttrDefId::ExternCrateId(it)) => it.resolver(db),
+        AttrsOwner::Field(it) => it.parent.resolver(db),
+        AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
     };
 
     let mut modpath = doc_modpath_from_str(link)?;
@@ -271,7 +421,7 @@ fn resolve_impl_trait_item<'db>(
     // attributes here. Use path resolution directly instead.
     //
     // FIXME: resolve type aliases (which are not yielded by iterate_path_candidates)
-    let interner = DbInterner::new_with(db, Some(environment.krate), environment.block);
+    let interner = DbInterner::new_with(db, environment.krate);
     let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
     let unstable_features =
         MethodResolutionUnstableFeatures::from_def_map(resolver.top_level_def_map());
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index a6d67e8..6ef6ea2 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -153,8 +153,7 @@ pub struct UnresolvedImport {
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct UnresolvedMacroCall {
-    pub macro_call: InFile<SyntaxNodePtr>,
-    pub precise_location: Option<TextRange>,
+    pub range: InFile<TextRange>,
     pub path: ModPath,
     pub is_bang: bool,
 }
@@ -185,8 +184,7 @@ pub struct InactiveCode {
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct MacroError {
-    pub node: InFile<SyntaxNodePtr>,
-    pub precise_location: Option<TextRange>,
+    pub range: InFile<TextRange>,
     pub message: String,
     pub error: bool,
     pub kind: &'static str,
@@ -194,8 +192,7 @@ pub struct MacroError {
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct MacroExpansionParseError {
-    pub node: InFile<SyntaxNodePtr>,
-    pub precise_location: Option<TextRange>,
+    pub range: InFile<TextRange>,
     pub errors: Arc<[SyntaxError]>,
 }
 
@@ -213,12 +210,12 @@ pub struct UnimplementedBuiltinMacro {
 
 #[derive(Debug)]
 pub struct InvalidDeriveTarget {
-    pub node: InFile<SyntaxNodePtr>,
+    pub range: InFile<TextRange>,
 }
 
 #[derive(Debug)]
 pub struct MalformedDerive {
-    pub node: InFile<SyntaxNodePtr>,
+    pub range: InFile<TextRange>,
 }
 
 #[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index c215438..10a1fa1 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -6,7 +6,6 @@
     expr_store::ExpressionStore,
     hir::generics::{GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate},
     item_tree::FieldsShape,
-    lang_item::LangItem,
     signatures::{StaticFlags, TraitFlags},
     type_ref::{TypeBound, TypeRef, TypeRefId},
 };
@@ -520,7 +519,7 @@ fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError>
             return Ok(());
         }
 
-        let sized_trait = LangItem::Sized.resolve_trait(f.db, krate);
+        let sized_trait = f.lang_items().Sized;
         let has_only_sized_bound =
             predicates.iter().all(move |pred| match pred.kind().skip_binder() {
                 ClauseKind::Trait(it) => Some(it.def_id().0) == sized_trait,
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 2d70a8d..2146e4d 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -48,17 +48,19 @@
 use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin};
 use either::Either;
 use hir_def::{
-    AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId,
-    CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
-    FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+    AdtId, AssocItemId, AssocItemLoc, CallableDefId, ConstId, ConstParamId, CrateRootModuleId,
+    DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId,
+    GenericParamId, HasModule, ImplId, InternedModuleId, ItemContainerId, LifetimeParamId,
     LocalFieldId, Lookup, MacroExpander, MacroId, StaticId, StructId, SyntheticSyntax, TupleId,
     TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+    attrs::AttrFlags,
     expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap},
     hir::{
         BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat,
         generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance},
     },
     item_tree::ImportAlias,
+    lang_item::LangItemTarget,
     layout::{self, ReprOptions, TargetDataLayout},
     nameres::{
         assoc::TraitItems,
@@ -66,17 +68,16 @@
     },
     per_ns::PerNs,
     resolver::{HasResolver, Resolver},
-    signatures::{ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields},
+    signatures::{EnumSignature, ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields},
     src::HasSource as _,
     visibility::visibility_from_ast,
 };
 use hir_expand::{
-    AstId, MacroCallKind, RenderedExpandError, ValueResult, attrs::collect_attrs,
-    proc_macro::ProcMacroKind,
+    AstId, MacroCallKind, RenderedExpandError, ValueResult, proc_macro::ProcMacroKind,
 };
 use hir_ty::{
-    GenericPredicates, TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId,
-    all_super_traits, autoderef, check_orphan_rules,
+    GenericPredicates, InferenceResult, TraitEnvironment, TyDefId, TyLoweringDiagnostic,
+    ValueTyDefId, all_super_traits, autoderef, check_orphan_rules,
     consteval::try_const_usize,
     db::{InternedClosureId, InternedCoroutineId},
     diagnostics::BodyValidationDiagnostic,
@@ -91,7 +92,7 @@
         PolyFnSig, Region, SolverDefId, Ty, TyKind, TypingMode,
         infer::{DbInternerInferExt, InferCtxt},
     },
-    traits::{self, FnTrait, is_inherent_impl_coherent, structurally_normalize_ty},
+    traits::{self, is_inherent_impl_coherent, structurally_normalize_ty},
 };
 use itertools::Itertools;
 use rustc_hash::FxHashSet;
@@ -103,8 +104,8 @@
 use span::{AstIdNode, Edition, FileId};
 use stdx::{format_to, impl_from, never};
 use syntax::{
-    AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
-    ast::{self, HasAttrs as _, HasName, HasVisibility as _},
+    AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr,
+    ast::{self, HasName, HasVisibility as _},
     format_smolstr,
 };
 use triomphe::{Arc, ThinArc};
@@ -112,7 +113,7 @@
 use crate::db::{DefDatabase, HirDatabase};
 
 pub use crate::{
-    attrs::{HasAttrs, resolve_doc_path_on},
+    attrs::{AttrsWithOwner, HasAttrs, resolve_doc_path_on},
     diagnostics::*,
     has_source::HasSource,
     semantics::{
@@ -135,10 +136,10 @@
     hir_def::{
         Complete,
         FindPathConfig,
-        attr::{AttrSourceMap, Attrs, AttrsWithOwner},
+        attrs::{Docs, IsInnerDoc},
         find_path::PrefixKind,
         import_map,
-        lang_item::{LangItem, crate_lang_items},
+        lang_item::{LangItemEnum as LangItem, crate_lang_items},
         nameres::{DefMap, ModuleSource, crate_def_map},
         per_ns::Namespace,
         type_ref::{Mutability, TypeRef},
@@ -149,7 +150,6 @@
     },
     hir_expand::{
         EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
-        attrs::{Attr, AttrId},
         change::ChangeWithProcMacros,
         files::{
             FilePosition, FilePositionWrapper, FileRange, FileRangeWrapper, HirFilePosition,
@@ -305,11 +305,10 @@ pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
     }
 
     /// Try to get the root URL of the documentation of a crate.
-    pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
+    pub fn get_html_root_url(self, db: &dyn HirDatabase) -> Option<String> {
         // Look for #![doc(html_root_url = "...")]
-        let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into()));
-        let doc_url = attrs.by_key(sym::doc).find_string_value_in_tt(sym::html_root_url);
-        doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
+        let doc_url = AttrFlags::doc_html_root_url(db, self.id);
+        doc_url.as_ref().map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
     }
 
     pub fn cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions {
@@ -654,7 +653,7 @@ pub fn diagnostics<'db>(
                 // FIXME: This is accidentally quadratic.
                 continue;
             }
-            emit_def_diagnostic(db, acc, diag, edition);
+            emit_def_diagnostic(db, acc, diag, edition, def_map.krate());
         }
 
         if !self.id.is_block_module() {
@@ -673,8 +672,9 @@ pub fn diagnostics<'db>(
                     acc.extend(def.diagnostics(db, style_lints))
                 }
                 ModuleDef::Trait(t) => {
+                    let krate = t.krate(db);
                     for diag in TraitItems::query_with_diagnostics(db, t.id).1.iter() {
-                        emit_def_diagnostic(db, acc, diag, edition);
+                        emit_def_diagnostic(db, acc, diag, edition, krate.id);
                     }
 
                     for item in t.items(db) {
@@ -765,7 +765,7 @@ pub fn diagnostics<'db>(
         }
         self.legacy_macros(db).into_iter().for_each(|m| emit_macro_def_diagnostics(db, acc, m));
 
-        let interner = DbInterner::new_with(db, Some(self.id.krate()), self.id.containing_block());
+        let interner = DbInterner::new_with(db, self.id.krate());
         let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
 
         let mut impl_assoc_items_scratch = vec![];
@@ -790,7 +790,7 @@ pub fn diagnostics<'db>(
             let ast_id_map = db.ast_id_map(file_id);
 
             for diag in impl_def.id.impl_items_with_diagnostics(db).1.iter() {
-                emit_def_diagnostic(db, acc, diag, edition);
+                emit_def_diagnostic(db, acc, diag, edition, loc.container.krate());
             }
 
             if impl_signature.target_trait.is_none()
@@ -818,26 +818,15 @@ pub fn diagnostics<'db>(
             let drop_maybe_dangle = (|| {
                 // FIXME: This can be simplified a lot by exposing hir-ty's utils.rs::Generics helper
                 let trait_ = trait_?;
-                let drop_trait = LangItem::Drop.resolve_trait(db, self.krate().into())?;
+                let drop_trait = interner.lang_items().Drop?;
                 if drop_trait != trait_.into() {
                     return None;
                 }
                 let parent = impl_def.id.into();
-                let generic_params = db.generic_params(parent);
-                let lifetime_params = generic_params.iter_lt().map(|(local_id, _)| {
-                    GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
-                });
-                let type_params = generic_params
-                    .iter_type_or_consts()
-                    .filter(|(_, it)| it.type_param().is_some())
-                    .map(|(local_id, _)| {
-                        GenericParamId::TypeParamId(TypeParamId::from_unchecked(
-                            TypeOrConstParamId { parent, local_id },
-                        ))
-                    });
-                let res = type_params.chain(lifetime_params).any(|p| {
-                    db.attrs(AttrDefId::GenericParamId(p)).by_key(sym::may_dangle).exists()
-                });
+                let (lifetimes_attrs, type_and_consts_attrs) =
+                    AttrFlags::query_generic_params(db, parent);
+                let res = lifetimes_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE))
+                    || type_and_consts_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE));
                 Some(res)
             })()
             .unwrap_or(false);
@@ -998,6 +987,17 @@ pub fn find_use_path(
     ) -> Option<ModPath> {
         hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg)
     }
+
+    #[inline]
+    pub fn doc_keyword(self, db: &dyn HirDatabase) -> Option<Symbol> {
+        AttrFlags::doc_keyword(db, InternedModuleId::new(db, self.id))
+    }
+
+    /// Whether it has `#[path = "..."]` attribute.
+    #[inline]
+    pub fn has_path(&self, db: &dyn HirDatabase) -> bool {
+        self.attrs(db).attrs.contains(AttrFlags::HAS_PATH)
+    }
 }
 
 fn macro_call_diagnostics<'db>(
@@ -1012,31 +1012,19 @@ fn macro_call_diagnostics<'db>(
     if let Some(err) = err {
         let loc = db.lookup_intern_macro_call(macro_call_id);
         let file_id = loc.kind.file_id();
-        let node =
-            InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
+        let mut range = precise_macro_call_location(&loc.kind, db, loc.krate);
         let RenderedExpandError { message, error, kind } = err.render_to_string(db);
-        let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
-        let precise_location = if editioned_file_id == file_id {
-            Some(
-                err.span().range
-                    + db.ast_id_map(editioned_file_id.into())
-                        .get_erased(err.span().anchor.ast_id)
-                        .text_range()
-                        .start(),
-            )
-        } else {
-            None
-        };
-        acc.push(MacroError { node, precise_location, message, error, kind }.into());
+        if Some(err.span().anchor.file_id) == file_id.file_id().map(|it| it.editioned_file_id(db)) {
+            range.value = err.span().range
+                + db.ast_id_map(file_id).get_erased(err.span().anchor.ast_id).text_range().start();
+        }
+        acc.push(MacroError { range, message, error, kind }.into());
     }
 
     if !parse_errors.is_empty() {
         let loc = db.lookup_intern_macro_call(macro_call_id);
-        let (node, precise_location) = precise_macro_call_location(&loc.kind, db);
-        acc.push(
-            MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() }
-                .into(),
-        )
+        let range = precise_macro_call_location(&loc.kind, db, loc.krate);
+        acc.push(MacroExpansionParseError { range, errors: parse_errors.clone() }.into())
     }
 }
 
@@ -1060,6 +1048,7 @@ fn emit_macro_def_diagnostics<'db>(
             acc,
             &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
             edition,
+            m.krate(db).id,
         );
     }
 }
@@ -1069,8 +1058,9 @@ fn emit_def_diagnostic<'db>(
     acc: &mut Vec<AnyDiagnostic<'db>>,
     diag: &DefDiagnostic,
     edition: Edition,
+    krate: base_db::Crate,
 ) {
-    emit_def_diagnostic_(db, acc, &diag.kind, edition)
+    emit_def_diagnostic_(db, acc, &diag.kind, edition, krate)
 }
 
 fn emit_def_diagnostic_<'db>(
@@ -1078,6 +1068,7 @@ fn emit_def_diagnostic_<'db>(
     acc: &mut Vec<AnyDiagnostic<'db>>,
     diag: &DefDiagnosticKind,
     edition: Edition,
+    krate: base_db::Crate,
 ) {
     match diag {
         DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
@@ -1100,8 +1091,7 @@ fn emit_def_diagnostic_<'db>(
             let RenderedExpandError { message, error, kind } = err.render_to_string(db);
             acc.push(
                 MacroError {
-                    node: InFile::new(ast.file_id, item.syntax_node_ptr()),
-                    precise_location: None,
+                    range: InFile::new(ast.file_id, item.text_range()),
                     message: format!("{}: {message}", path.display(db, edition)),
                     error,
                     kind,
@@ -1131,11 +1121,10 @@ fn emit_def_diagnostic_<'db>(
             );
         }
         DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
-            let (node, precise_location) = precise_macro_call_location(ast, db);
+            let location = precise_macro_call_location(ast, db, krate);
             acc.push(
                 UnresolvedMacroCall {
-                    macro_call: node,
-                    precise_location,
+                    range: location,
                     path: path.clone(),
                     is_bang: matches!(ast, MacroCallKind::FnLike { .. }),
                 }
@@ -1154,34 +1143,12 @@ fn emit_def_diagnostic_<'db>(
             );
         }
         DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
-            let node = ast.to_node(db);
-            let derive = node.attrs().nth(*id);
-            match derive {
-                Some(derive) => {
-                    acc.push(
-                        InvalidDeriveTarget {
-                            node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
-                        }
-                        .into(),
-                    );
-                }
-                None => stdx::never!("derive diagnostic on item without derive attribute"),
-            }
+            let derive = id.find_attr_range(db, krate, *ast).3.path_range();
+            acc.push(InvalidDeriveTarget { range: ast.with_value(derive) }.into());
         }
         DefDiagnosticKind::MalformedDerive { ast, id } => {
-            let node = ast.to_node(db);
-            let derive = node.attrs().nth(*id);
-            match derive {
-                Some(derive) => {
-                    acc.push(
-                        MalformedDerive {
-                            node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
-                        }
-                        .into(),
-                    );
-                }
-                None => stdx::never!("derive diagnostic on item without derive attribute"),
-            }
+            let derive = id.find_attr_range(db, krate, *ast).2;
+            acc.push(MalformedDerive { range: ast.with_value(derive) }.into());
         }
         DefDiagnosticKind::MacroDefError { ast, message } => {
             let node = ast.to_node(db);
@@ -1200,61 +1167,28 @@ fn emit_def_diagnostic_<'db>(
 fn precise_macro_call_location(
     ast: &MacroCallKind,
     db: &dyn HirDatabase,
-) -> (InFile<SyntaxNodePtr>, Option<TextRange>) {
+    krate: base_db::Crate,
+) -> InFile<TextRange> {
     // FIXME: maybe we actually want slightly different ranges for the different macro diagnostics
     // - e.g. the full attribute for macro errors, but only the name for name resolution
     match ast {
         MacroCallKind::FnLike { ast_id, .. } => {
             let node = ast_id.to_node(db);
-            (
-                ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
-                node.path()
-                    .and_then(|it| it.segment())
-                    .and_then(|it| it.name_ref())
-                    .map(|it| it.syntax().text_range()),
-            )
+            let range = node
+                .path()
+                .and_then(|it| it.segment())
+                .and_then(|it| it.name_ref())
+                .map(|it| it.syntax().text_range());
+            let range = range.unwrap_or_else(|| node.syntax().text_range());
+            ast_id.with_value(range)
         }
         MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
-            let node = ast_id.to_node(db);
-            // Compute the precise location of the macro name's token in the derive
-            // list.
-            let token = (|| {
-                let derive_attr = collect_attrs(&node)
-                    .nth(derive_attr_index.ast_index())
-                    .and_then(|x| Either::left(x.1))?;
-                let token_tree = derive_attr.meta()?.token_tree()?;
-                let chunk_by = token_tree
-                    .syntax()
-                    .children_with_tokens()
-                    .filter_map(|elem| match elem {
-                        syntax::NodeOrToken::Token(tok) => Some(tok),
-                        _ => None,
-                    })
-                    .chunk_by(|t| t.kind() == T![,]);
-                let (_, mut group) = chunk_by
-                    .into_iter()
-                    .filter(|&(comma, _)| !comma)
-                    .nth(*derive_index as usize)?;
-                group.find(|t| t.kind() == T![ident])
-            })();
-            (
-                ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
-                token.as_ref().map(|tok| tok.text_range()),
-            )
+            let range = derive_attr_index.find_derive_range(db, krate, *ast_id, *derive_index);
+            ast_id.with_value(range)
         }
-        MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
-            let node = ast_id.to_node(db);
-            let attr = collect_attrs(&node)
-                .nth(invoc_attr_index.ast_index())
-                .and_then(|x| Either::left(x.1))
-                .unwrap_or_else(|| {
-                    panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
-                });
-
-            (
-                ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
-                Some(attr.syntax().text_range()),
-            )
+        MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
+            let attr_range = attr_ids.invoc_attr().find_attr_range(db, krate, *ast_id).2;
+            ast_id.with_value(attr_range)
         }
     }
 }
@@ -1282,8 +1216,7 @@ pub struct InstantiatedField<'db> {
 impl<'db> InstantiatedField<'db> {
     /// Returns the type as in the signature of the struct.
     pub fn ty(&self, db: &'db dyn HirDatabase) -> TypeNs<'db> {
-        let krate = self.inner.krate(db);
-        let interner = DbInterner::new_with(db, Some(krate.base()), None);
+        let interner = DbInterner::new_no_crate(db);
 
         let var_id = self.inner.parent.into();
         let field = db.field_types(var_id)[self.inner.id];
@@ -1305,9 +1238,8 @@ pub fn name(&self) -> Name {
     }
 
     pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
-        let interner = DbInterner::new_with(db, None, None);
-        let ty = db
-            .infer(self.owner)
+        let interner = DbInterner::new_no_crate(db);
+        let ty = InferenceResult::for_body(db, self.owner)
             .tuple_field_access_type(self.tuple)
             .as_slice()
             .get(self.index as usize)
@@ -1381,7 +1313,7 @@ pub fn ty_with_args<'db>(
             VariantDef::Union(it) => it.id.into(),
             VariantDef::Variant(it) => it.parent_enum(db).id.into(),
         };
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let args = generic_args_from_tys(interner, def_id.into(), generics.map(|ty| ty.ty));
         let ty = db.field_types(var_id)[self.id].instantiate(interner, args);
         Type::new(db, var_id, ty)
@@ -1452,7 +1384,7 @@ pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type<'_> {
     }
 
     pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
-        db.struct_signature(self.id).repr
+        AttrFlags::repr(db, self.id.into())
     }
 
     pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
@@ -1468,7 +1400,7 @@ fn variant_fields(self, db: &dyn HirDatabase) -> &VariantFields {
     }
 
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).is_unstable()
+        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
     }
 
     pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedStruct<'db> {
@@ -1506,8 +1438,7 @@ pub fn fields(self, db: &dyn HirDatabase) -> Vec<InstantiatedField<'db>> {
     }
 
     pub fn ty(self, db: &'db dyn HirDatabase) -> TypeNs<'db> {
-        let krate = self.inner.krate(db);
-        let interner = DbInterner::new_with(db, Some(krate.base()), None);
+        let interner = DbInterner::new_no_crate(db);
 
         let ty = db.ty(self.inner.id.into());
         TypeNs::new(db, self.inner.id, ty.instantiate(interner, self.args))
@@ -1557,7 +1488,7 @@ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
             .collect()
     }
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).is_unstable()
+        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
     }
 }
 
@@ -1592,7 +1523,7 @@ pub fn num_variants(self, db: &dyn HirDatabase) -> usize {
     }
 
     pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
-        db.enum_signature(self.id).repr
+        AttrFlags::repr(db, self.id.into())
     }
 
     pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
@@ -1605,10 +1536,10 @@ pub fn ty_params<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
 
     /// The type of the enum variant bodies.
     pub fn variant_body_ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         Type::new_for_crate(
             self.id.lookup(db).container.krate(),
-            match db.enum_signature(self.id).variant_body_type() {
+            match EnumSignature::variant_body_type(db, self.id) {
                 layout::IntegerType::Pointer(sign) => match sign {
                     true => Ty::new_int(interner, rustc_type_ir::IntTy::Isize),
                     false => Ty::new_uint(interner, rustc_type_ir::UintTy::Usize),
@@ -1649,7 +1580,7 @@ pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
     }
 
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).is_unstable()
+        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
     }
 }
 
@@ -1669,8 +1600,7 @@ pub struct InstantiatedEnum<'db> {
 
 impl<'db> InstantiatedEnum<'db> {
     pub fn ty(self, db: &'db dyn HirDatabase) -> TypeNs<'db> {
-        let krate = self.inner.krate(db);
-        let interner = DbInterner::new_with(db, Some(krate.base()), None);
+        let interner = DbInterner::new_no_crate(db);
 
         let ty = db.ty(self.inner.id.into());
         TypeNs::new(db, self.inner.id, ty.instantiate(interner, self.args))
@@ -1750,7 +1680,7 @@ pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
     }
 
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).is_unstable()
+        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
     }
 
     pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedVariant<'db> {
@@ -1816,7 +1746,7 @@ pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
 
     pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
         let env = db.trait_environment(self.into());
-        let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+        let interner = DbInterner::new_no_crate(db);
         let adt_id = AdtId::from(self);
         let args = GenericArgs::for_item_with_defaults(interner, adt_id.into(), |_, id, _| {
             GenericArg::error_from_id(interner, id)
@@ -1841,7 +1771,7 @@ pub fn ty_with_args<'db>(
         args: impl IntoIterator<Item = Type<'db>>,
     ) -> Type<'db> {
         let id = AdtId::from(self);
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let ty = Ty::new_adt(
             interner,
             id,
@@ -2025,7 +1955,7 @@ pub fn diagnostics<'db>(
 
         expr_store_diagnostics(db, acc, &source_map);
 
-        let infer = db.infer(self.into());
+        let infer = InferenceResult::for_body(db, self.into());
         for d in infer.diagnostics() {
             acc.extend(AnyDiagnostic::inference_diagnostic(
                 db,
@@ -2235,8 +2165,7 @@ fn expr_store_diagnostics<'db>(
                 InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
             }
             ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
-                macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
-                precise_location: None,
+                range: node.map(|ptr| ptr.text_range()),
                 path: path.clone(),
                 is_bang: true,
             }
@@ -2277,7 +2206,7 @@ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
 
     pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type<'_> {
         let resolver = self.id.resolver(db);
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s.
         let callable_sig = db.callable_item_signature(self.id.into()).instantiate_identity();
         let ty = Ty::new_fn_ptr(interner, callable_sig);
@@ -2305,10 +2234,10 @@ pub fn ret_type_with_args<'db>(
         generics: impl Iterator<Item = Type<'db>>,
     ) -> Type<'db> {
         let resolver = self.id.resolver(db);
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let args = generic_args_from_tys(interner, self.id.into(), generics.map(|ty| ty.ty));
 
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let ty = db
             .callable_item_signature(self.id.into())
             .instantiate(interner, args)
@@ -2396,7 +2325,7 @@ pub fn params_without_self_with_args<'db>(
         generics: impl Iterator<Item = Type<'db>>,
     ) -> Vec<Param<'db>> {
         let environment = db.trait_environment(self.id.into());
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let args = generic_args_from_tys(interner, self.id.into(), generics.map(|ty| ty.ty));
         let callable_sig =
             db.callable_item_signature(self.id.into()).instantiate(interner, args).skip_binder();
@@ -2439,11 +2368,11 @@ pub fn returns_impl_future(self, db: &dyn HirDatabase) -> bool {
 
         let ret_type = self.ret_type(db);
         let Some(impl_traits) = ret_type.as_impl_traits(db) else { return false };
-        let Some(future_trait_id) = LangItem::Future.resolve_trait(db, self.ty(db).env.krate)
-        else {
+        let lang_items = hir_def::lang_item::lang_items(db, self.krate(db).id);
+        let Some(future_trait_id) = lang_items.Future else {
             return false;
         };
-        let Some(sized_trait_id) = LangItem::Sized.resolve_trait(db, self.ty(db).env.krate) else {
+        let Some(sized_trait_id) = lang_items.Sized else {
             return false;
         };
 
@@ -2461,33 +2390,33 @@ pub fn returns_impl_future(self, db: &dyn HirDatabase) -> bool {
 
     /// Does this function have `#[test]` attribute?
     pub fn is_test(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).is_test()
+        self.attrs(db).is_test()
     }
 
     /// is this a `fn main` or a function with an `export_name` of `main`?
     pub fn is_main(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).export_name() == Some(&sym::main)
+        self.exported_main(db)
             || self.module(db).is_crate_root() && db.function_signature(self.id).name == sym::main
     }
 
     /// Is this a function with an `export_name` of `main`?
     pub fn exported_main(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).export_name() == Some(&sym::main)
+        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_EXPORT_NAME_MAIN)
     }
 
     /// Does this function have the ignore attribute?
     pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).is_ignore()
+        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_IGNORE)
     }
 
     /// Does this function have `#[bench]` attribute?
     pub fn is_bench(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).is_bench()
+        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_BENCH)
     }
 
     /// Is this function marked as unstable with `#[feature]` attribute?
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(self.id.into()).is_unstable()
+        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
     }
 
     pub fn is_unsafe_to_call(
@@ -2498,8 +2427,7 @@ pub fn is_unsafe_to_call(
     ) -> bool {
         let (target_features, target_feature_is_safe_in_target) = caller
             .map(|caller| {
-                let target_features =
-                    hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into()));
+                let target_features = hir_ty::TargetFeatures::from_fn(db, caller.id);
                 let target_feature_is_safe_in_target =
                     match &caller.krate(db).id.workspace_data(db).target {
                         Ok(target) => hir_ty::target_feature_is_safe_in_target(target),
@@ -2530,14 +2458,6 @@ pub fn has_body(self, db: &dyn HirDatabase) -> bool {
     }
 
     pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
-        let attrs = db.attrs(self.id.into());
-        // FIXME: Store this in FunctionData flags?
-        if !(attrs.is_proc_macro()
-            || attrs.is_proc_macro_attribute()
-            || attrs.is_proc_macro_derive())
-        {
-            return None;
-        }
         let def_map = crate_def_map(db, HasModule::krate(&self.id, db));
         def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
     }
@@ -2547,7 +2467,7 @@ pub fn eval(
         db: &dyn HirDatabase,
         span_formatter: impl Fn(FileId, TextRange) -> String,
     ) -> Result<String, ConstEvalError<'_>> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let body = db.monomorphized_mir_body(
             self.id.into(),
             GenericArgs::new_from_iter(interner, []),
@@ -2704,7 +2624,7 @@ pub fn ty_with_args<'db>(
         db: &'db dyn HirDatabase,
         generics: impl Iterator<Item = Type<'db>>,
     ) -> Type<'db> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let args = generic_args_from_tys(interner, self.func.into(), generics.map(|ty| ty.ty));
         let callable_sig =
             db.callable_item_signature(self.func.into()).instantiate(interner, args).skip_binder();
@@ -2804,7 +2724,7 @@ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
 
     /// Evaluate the constant.
     pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst<'_>, ConstEvalError<'_>> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let ty = db.value_ty(self.id.into()).unwrap().instantiate_identity();
         db.const_eval(self.id, GenericArgs::new_from_iter(interner, []), None)
             .map(|it| EvaluatedConst { const_: it, def: self.id.into(), ty })
@@ -2908,8 +2828,12 @@ pub struct Trait {
 }
 
 impl Trait {
-    pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option<Trait> {
-        LangItem::from_name(name)?.resolve_trait(db, krate.into()).map(Into::into)
+    pub fn lang(db: &dyn HirDatabase, krate: Crate, lang_item: LangItem) -> Option<Trait> {
+        let lang_items = hir_def::lang_item::lang_items(db, krate.id);
+        match lang_item.from_lang_items(lang_items)? {
+            LangItemTarget::TraitId(it) => Some(it.into()),
+            _ => None,
+        }
     }
 
     pub fn module(self, db: &dyn HirDatabase) -> Module {
@@ -2992,7 +2916,7 @@ fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, Macro
 
     /// `#[rust_analyzer::completions(...)]` mode.
     pub fn complete(self, db: &dyn HirDatabase) -> Complete {
-        Complete::extract(true, &self.attrs(db))
+        Complete::extract(true, self.attrs(db).attrs)
     }
 }
 
@@ -3076,7 +3000,7 @@ pub fn i32() -> BuiltinType {
 
     pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
         let core = Crate::core(db).map(|core| core.id).unwrap_or_else(|| db.all_crates()[0]);
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         Type::new_for_crate(core, Ty::from_builtin_type(interner, self.inner))
     }
 
@@ -3163,10 +3087,10 @@ pub fn name(self, db: &dyn HirDatabase) -> Name {
                 let loc = id.lookup(db);
                 let source = loc.source(db);
                 match loc.kind {
-                    ProcMacroKind::CustomDerive => db
-                        .attrs(id.into())
-                        .parse_proc_macro_derive()
-                        .map_or_else(|| as_name_opt(source.value.name()), |(it, _)| it),
+                    ProcMacroKind::CustomDerive => AttrFlags::derive_info(db, self.id).map_or_else(
+                        || as_name_opt(source.value.name()),
+                        |info| Name::new_symbol_root(info.trait_name.clone()),
+                    ),
                     ProcMacroKind::Bang | ProcMacroKind::Attr => as_name_opt(source.value.name()),
                 }
             }
@@ -3174,7 +3098,7 @@ pub fn name(self, db: &dyn HirDatabase) -> Name {
     }
 
     pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
-        matches!(self.id, MacroId::MacroRulesId(_) if db.attrs(self.id.into()).by_key(sym::macro_export).exists())
+        matches!(self.id, MacroId::MacroRulesId(_) if AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_MACRO_EXPORT))
     }
 
     pub fn is_proc_macro(self) -> bool {
@@ -3919,7 +3843,7 @@ pub fn module(self, db: &dyn HirDatabase) -> Module {
 
     pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
         let def = self.parent;
-        let infer = db.infer(def);
+        let infer = InferenceResult::for_body(db, def);
         let ty = infer[self.binding_id];
         Type::new(db, def, ty)
     }
@@ -4007,18 +3931,10 @@ pub fn derive(&self) -> Macro {
     }
 
     pub fn name(&self, db: &dyn HirDatabase) -> Name {
-        match self.derive {
-            makro @ MacroId::Macro2Id(_) => db
-                .attrs(makro.into())
-                .parse_rustc_builtin_macro()
-                .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
-            MacroId::MacroRulesId(_) => None,
-            makro @ MacroId::ProcMacroId(_) => db
-                .attrs(makro.into())
-                .parse_proc_macro_derive()
-                .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
-        }
-        .unwrap_or_else(Name::missing)
+        AttrFlags::derive_info(db, self.derive)
+            .and_then(|it| it.helpers.get(self.idx as usize))
+            .map(|helper| Name::new_symbol_root(helper.clone()))
+            .unwrap_or_else(Name::missing)
     }
 }
 
@@ -4208,7 +4124,7 @@ pub fn is_implicit(self, db: &dyn HirDatabase) -> bool {
 
     pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
         let resolver = self.id.parent().resolver(db);
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let index = hir_ty::param_idx(db, self.id.into()).unwrap();
         let ty = Ty::new_param(interner, self.id, index as u32);
         Type::new_with_resolver_inner(db, &resolver, ty)
@@ -4242,7 +4158,7 @@ pub fn default(self, db: &dyn HirDatabase) -> Option<Type<'_>> {
     }
 
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        db.attrs(GenericParamId::from(self.id).into()).is_unstable()
+        self.attrs(db).is_unstable()
     }
 }
 
@@ -4412,9 +4328,12 @@ pub fn all_in_module(db: &dyn HirDatabase, module: Module) -> Vec<Impl> {
     /// blanket impls, and only does a shallow type constructor check. In fact, this should've probably been on `Adt`
     /// etc., and not on `Type`. If you would want to create a precise list of all impls applying to a type,
     /// you would need to include blanket impls, and try to prove to predicates for each candidate.
-    pub fn all_for_type<'db>(db: &'db dyn HirDatabase, Type { ty, env }: Type<'db>) -> Vec<Impl> {
+    pub fn all_for_type<'db>(
+        db: &'db dyn HirDatabase,
+        Type { ty, env: _ }: Type<'db>,
+    ) -> Vec<Impl> {
         let mut result = Vec::new();
-        let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+        let interner = DbInterner::new_no_crate(db);
         let Some(simplified_ty) =
             fast_reject::simplify_type(interner, ty, fast_reject::TreatParams::AsRigid)
         else {
@@ -4591,7 +4510,7 @@ pub struct Closure<'db> {
 
 impl<'db> Closure<'db> {
     fn as_ty(&self, db: &'db dyn HirDatabase) -> Ty<'db> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         match self.id {
             AnyClosureId::ClosureId(id) => Ty::new_closure(interner, id.into(), self.subst),
             AnyClosureId::CoroutineClosureId(id) => {
@@ -4620,7 +4539,7 @@ pub fn captured_items(&self, db: &'db dyn HirDatabase) -> Vec<ClosureCapture<'db
             return Vec::new();
         };
         let owner = db.lookup_intern_closure(id).0;
-        let infer = db.infer(owner);
+        let infer = InferenceResult::for_body(db, owner);
         let info = infer.closure_info(id);
         info.0
             .iter()
@@ -4635,7 +4554,7 @@ pub fn capture_types(&self, db: &'db dyn HirDatabase) -> Vec<Type<'db>> {
             return Vec::new();
         };
         let owner = db.lookup_intern_closure(id).0;
-        let infer = db.infer(owner);
+        let infer = InferenceResult::for_body(db, owner);
         let (captures, _) = infer.closure_info(id);
         let env = db.trait_environment_for_body(owner);
         captures
@@ -4648,9 +4567,9 @@ pub fn fn_trait(&self, db: &dyn HirDatabase) -> FnTrait {
         match self.id {
             AnyClosureId::ClosureId(id) => {
                 let owner = db.lookup_intern_closure(id).0;
-                let infer = db.infer(owner);
+                let infer = InferenceResult::for_body(db, owner);
                 let info = infer.closure_info(id);
-                info.1
+                info.1.into()
             }
             AnyClosureId::CoroutineClosureId(_id) => {
                 // FIXME: Infer kind for coroutine closures.
@@ -4664,6 +4583,71 @@ pub fn fn_trait(&self, db: &dyn HirDatabase) -> FnTrait {
     }
 }
 
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum FnTrait {
+    FnOnce,
+    FnMut,
+    Fn,
+
+    AsyncFnOnce,
+    AsyncFnMut,
+    AsyncFn,
+}
+
+impl From<traits::FnTrait> for FnTrait {
+    fn from(value: traits::FnTrait) -> Self {
+        match value {
+            traits::FnTrait::FnOnce => FnTrait::FnOnce,
+            traits::FnTrait::FnMut => FnTrait::FnMut,
+            traits::FnTrait::Fn => FnTrait::Fn,
+            traits::FnTrait::AsyncFnOnce => FnTrait::AsyncFnOnce,
+            traits::FnTrait::AsyncFnMut => FnTrait::AsyncFnMut,
+            traits::FnTrait::AsyncFn => FnTrait::AsyncFn,
+        }
+    }
+}
+
+impl fmt::Display for FnTrait {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            FnTrait::FnOnce => write!(f, "FnOnce"),
+            FnTrait::FnMut => write!(f, "FnMut"),
+            FnTrait::Fn => write!(f, "Fn"),
+            FnTrait::AsyncFnOnce => write!(f, "AsyncFnOnce"),
+            FnTrait::AsyncFnMut => write!(f, "AsyncFnMut"),
+            FnTrait::AsyncFn => write!(f, "AsyncFn"),
+        }
+    }
+}
+
+impl FnTrait {
+    pub const fn function_name(&self) -> &'static str {
+        match self {
+            FnTrait::FnOnce => "call_once",
+            FnTrait::FnMut => "call_mut",
+            FnTrait::Fn => "call",
+            FnTrait::AsyncFnOnce => "async_call_once",
+            FnTrait::AsyncFnMut => "async_call_mut",
+            FnTrait::AsyncFn => "async_call",
+        }
+    }
+
+    pub fn lang_item(self) -> LangItem {
+        match self {
+            FnTrait::FnOnce => LangItem::FnOnce,
+            FnTrait::FnMut => LangItem::FnMut,
+            FnTrait::Fn => LangItem::Fn,
+            FnTrait::AsyncFnOnce => LangItem::AsyncFnOnce,
+            FnTrait::AsyncFnMut => LangItem::AsyncFnMut,
+            FnTrait::AsyncFn => LangItem::AsyncFn,
+        }
+    }
+
+    pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option<Trait> {
+        Trait::lang(db, krate, self.lang_item())
+    }
+}
+
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub struct ClosureCapture<'db> {
     owner: DefWithBodyId,
@@ -4821,7 +4805,7 @@ fn new(db: &'db dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty<'db>) ->
     }
 
     fn from_def(db: &'db dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Self {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let ty = db.ty(def.into());
         let def = match def.into() {
             TyDefId::AdtId(it) => GenericDefId::AdtId(it),
@@ -4844,7 +4828,7 @@ fn from_value_def(
         db: &'db dyn HirDatabase,
         def: impl Into<ValueTyDefId> + HasResolver,
     ) -> Self {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let Some(ty) = db.value_ty(def.into()) else {
             return Type::new(db, def, Ty::new_error(interner, ErrorGuaranteed));
         };
@@ -4900,7 +4884,7 @@ pub fn is_reference(&self) -> bool {
     }
 
     pub fn contains_reference(&self, db: &'db dyn HirDatabase) -> bool {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         return self.ty.visit_with(&mut Visitor { interner }).is_break();
 
         fn is_phantom_data(db: &dyn HirDatabase, adt_id: AdtId) -> bool {
@@ -5052,14 +5036,15 @@ pub fn is_unknown(&self) -> bool {
     /// `std::future::Future` and returns the `Output` associated type.
     /// This function is used in `.await` syntax completion.
     pub fn into_future_output(&self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
-        let trait_ = LangItem::IntoFutureIntoFuture
-            .resolve_function(db, self.env.krate)
+        let lang_items = hir_def::lang_item::lang_items(db, self.env.krate);
+        let trait_ = lang_items
+            .IntoFutureIntoFuture
             .and_then(|into_future_fn| {
                 let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?;
                 let into_future_trait = assoc_item.container_or_implemented_trait(db)?;
                 Some(into_future_trait.id)
             })
-            .or_else(|| LangItem::Future.resolve_trait(db, self.env.krate))?;
+            .or(lang_items.Future)?;
 
         if !traits::implements_trait_unique(self.ty, db, self.env.clone(), trait_) {
             return None;
@@ -5072,13 +5057,15 @@ pub fn into_future_output(&self, db: &'db dyn HirDatabase) -> Option<Type<'db>>
 
     /// This does **not** resolve `IntoFuture`, only `Future`.
     pub fn future_output(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
-        let future_output = LangItem::FutureOutput.resolve_type_alias(db, self.env.krate)?;
+        let lang_items = hir_def::lang_item::lang_items(db, self.env.krate);
+        let future_output = lang_items.FutureOutput?;
         self.normalize_trait_assoc_type(db, &[], future_output.into())
     }
 
     /// This does **not** resolve `IntoIterator`, only `Iterator`.
     pub fn iterator_item(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
-        let iterator_trait = LangItem::Iterator.resolve_trait(db, self.env.krate)?;
+        let lang_items = hir_def::lang_item::lang_items(db, self.env.krate);
+        let iterator_trait = lang_items.Iterator?;
         let iterator_item = iterator_trait
             .trait_items(db)
             .associated_type_by_name(&Name::new_symbol_root(sym::Item))?;
@@ -5086,7 +5073,8 @@ pub fn iterator_item(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
     }
 
     pub fn impls_iterator(self, db: &'db dyn HirDatabase) -> bool {
-        let Some(iterator_trait) = LangItem::Iterator.resolve_trait(db, self.env.krate) else {
+        let lang_items = hir_def::lang_item::lang_items(db, self.env.krate);
+        let Some(iterator_trait) = lang_items.Iterator else {
             return false;
         };
         traits::implements_trait_unique(self.ty, db, self.env.clone(), iterator_trait)
@@ -5094,13 +5082,12 @@ pub fn impls_iterator(self, db: &'db dyn HirDatabase) -> bool {
 
     /// Resolves the projection `<Self as IntoIterator>::IntoIter` and returns the resulting type
     pub fn into_iterator_iter(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
-        let trait_ = LangItem::IntoIterIntoIter.resolve_function(db, self.env.krate).and_then(
-            |into_iter_fn| {
-                let assoc_item = as_assoc_item(db, AssocItem::Function, into_iter_fn)?;
-                let into_iter_trait = assoc_item.container_or_implemented_trait(db)?;
-                Some(into_iter_trait.id)
-            },
-        )?;
+        let lang_items = hir_def::lang_item::lang_items(db, self.env.krate);
+        let trait_ = lang_items.IntoIterIntoIter.and_then(|into_iter_fn| {
+            let assoc_item = as_assoc_item(db, AssocItem::Function, into_iter_fn)?;
+            let into_iter_trait = assoc_item.container_or_implemented_trait(db)?;
+            Some(into_iter_trait.id)
+        })?;
 
         if !traits::implements_trait_unique(self.ty, db, self.env.clone(), trait_) {
             return None;
@@ -5117,7 +5104,8 @@ pub fn into_iterator_iter(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
     /// This function can be used to check if a particular type is callable, since FnOnce is a
     /// supertrait of Fn and FnMut, so all callable types implements at least FnOnce.
     pub fn impls_fnonce(&self, db: &'db dyn HirDatabase) -> bool {
-        let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.env.krate) {
+        let lang_items = hir_def::lang_item::lang_items(db, self.env.krate);
+        let fnonce_trait = match lang_items.FnOnce {
             Some(it) => it,
             None => return false,
         };
@@ -5127,7 +5115,7 @@ pub fn impls_fnonce(&self, db: &'db dyn HirDatabase) -> bool {
 
     // FIXME: Find better API that also handles const generics
     pub fn impls_trait(&self, db: &'db dyn HirDatabase, trait_: Trait, args: &[Type<'db>]) -> bool {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let args = generic_args_from_tys(
             interner,
             trait_.id.into(),
@@ -5142,7 +5130,7 @@ pub fn normalize_trait_assoc_type(
         args: &[Type<'db>],
         alias: TypeAlias,
     ) -> Option<Type<'db>> {
-        let interner = DbInterner::new_with(db, Some(self.env.krate), self.env.block);
+        let interner = DbInterner::new_with(db, self.env.krate);
         let args = generic_args_from_tys(
             interner,
             alias.id.into(),
@@ -5161,14 +5149,15 @@ pub fn normalize_trait_assoc_type(
     }
 
     pub fn is_copy(&self, db: &'db dyn HirDatabase) -> bool {
-        let Some(copy_trait) = LangItem::Copy.resolve_trait(db, self.env.krate) else {
+        let lang_items = hir_def::lang_item::lang_items(db, self.env.krate);
+        let Some(copy_trait) = lang_items.Copy else {
             return false;
         };
         self.impls_trait(db, copy_trait.into(), &[])
     }
 
     pub fn as_callable(&self, db: &'db dyn HirDatabase) -> Option<Callable<'db>> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let callee = match self.ty.kind() {
             TyKind::Closure(id, subst) => Callee::Closure(id.0, subst),
             TyKind::CoroutineClosure(id, subst) => Callee::CoroutineClosure(id.0, subst),
@@ -5242,7 +5231,7 @@ pub fn contains_unknown(&self) -> bool {
     }
 
     pub fn fields(&self, db: &'db dyn HirDatabase) -> Vec<(Field, Self)> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let (variant_id, substs) = match self.ty.kind() {
             TyKind::Adt(adt_def, substs) => {
                 let id = match adt_def.def_id().0 {
@@ -5299,7 +5288,7 @@ pub fn autoderef(
     }
 
     fn autoderef_(&self, db: &'db dyn HirDatabase) -> impl Iterator<Item = Ty<'db>> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         // There should be no inference vars in types passed here
         let canonical = hir_ty::replace_errors_with_variables(interner, &self.ty);
         autoderef(db, self.env.clone(), canonical)
@@ -5335,7 +5324,7 @@ fn iterate_assoc_items_dyn(
             }
         };
 
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let Some(simplified_type) =
             fast_reject::simplify_type(interner, self.ty, fast_reject::TreatParams::AsRigid)
         else {
@@ -5484,7 +5473,7 @@ fn with_method_resolution<R>(
         f: impl FnOnce(&MethodResolutionContext<'_, 'db>) -> R,
     ) -> R {
         let module = resolver.module();
-        let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
+        let interner = DbInterner::new_with(db, module.krate());
         let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
         let unstable_features =
             MethodResolutionUnstableFeatures::from_def_map(resolver.top_level_def_map());
@@ -5781,7 +5770,7 @@ fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result {
     /// Note that we consider placeholder types to unify with everything.
     /// For example `Option<T>` and `Option<U>` unify although there is unresolved goal `T = U`.
     pub fn could_unify_with(&self, db: &'db dyn HirDatabase, other: &Type<'db>) -> bool {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let tys = hir_ty::replace_errors_with_variables(interner, &(self.ty, other.ty));
         hir_ty::could_unify(db, self.env.clone(), &tys)
     }
@@ -5791,13 +5780,13 @@ pub fn could_unify_with(&self, db: &'db dyn HirDatabase, other: &Type<'db>) -> b
     /// This means that placeholder types are not considered to unify if there are any bounds set on
     /// them. For example `Option<T>` and `Option<U>` do not unify as we cannot show that `T = U`
     pub fn could_unify_with_deeply(&self, db: &'db dyn HirDatabase, other: &Type<'db>) -> bool {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let tys = hir_ty::replace_errors_with_variables(interner, &(self.ty, other.ty));
         hir_ty::could_unify_deeply(db, self.env.clone(), &tys)
     }
 
     pub fn could_coerce_to(&self, db: &'db dyn HirDatabase, to: &Type<'db>) -> bool {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let tys = hir_ty::replace_errors_with_variables(interner, &(self.ty, to.ty));
         hir_ty::could_coerce(db, self.env.clone(), &tys)
     }
@@ -5823,7 +5812,7 @@ pub fn layout(&self, db: &'db dyn HirDatabase) -> Result<Layout, LayoutError> {
     }
 
     pub fn drop_glue(&self, db: &'db dyn HirDatabase) -> DropGlue {
-        let interner = DbInterner::new_with(db, Some(self.env.krate), self.env.block);
+        let interner = DbInterner::new_with(db, self.env.krate);
         let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
         hir_ty::drop::has_drop_glue(&infcx, self.ty, self.env.clone())
     }
@@ -5913,7 +5902,7 @@ enum Callee<'db> {
     Closure(InternedClosureId, GenericArgs<'db>),
     CoroutineClosure(InternedCoroutineId, GenericArgs<'db>),
     FnPtr,
-    FnImpl(FnTrait),
+    FnImpl(traits::FnTrait),
 }
 
 pub enum CallableKind<'db> {
@@ -5940,7 +5929,7 @@ pub fn kind(&self) -> CallableKind<'db> {
                 CallableKind::Closure(Closure { id: AnyClosureId::CoroutineClosureId(id), subst })
             }
             Callee::FnPtr => CallableKind::FnPtr,
-            Callee::FnImpl(fn_) => CallableKind::FnImpl(fn_),
+            Callee::FnImpl(fn_) => CallableKind::FnImpl(fn_.into()),
         }
     }
     pub fn receiver_param(&self, db: &'db dyn HirDatabase) -> Option<(SelfParam, Type<'db>)> {
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index 769cfd9..82e60bf 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -13,7 +13,7 @@
 use either::Either;
 use hir_def::{
     DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId,
-    expr_store::{Body, ExprOrPatSource, path::Path},
+    expr_store::{Body, ExprOrPatSource, HygieneId, path::Path},
     hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
     nameres::{ModuleOrigin, crate_def_map},
     resolver::{self, HasResolver, Resolver, TypeNs},
@@ -21,7 +21,6 @@
 };
 use hir_expand::{
     EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
-    attrs::collect_attrs,
     builtin::{BuiltinFnLikeExpander, EagerExpander},
     db::ExpandDatabase,
     files::{FileRangeWrapper, HirFileRange, InRealFile},
@@ -29,6 +28,7 @@
     name::AsName,
 };
 use hir_ty::{
+    InferenceResult,
     diagnostics::{unsafe_operations, unsafe_operations_for_body},
     next_solver::DbInterner,
 };
@@ -36,7 +36,7 @@
 use itertools::Itertools;
 use rustc_hash::{FxHashMap, FxHashSet};
 use smallvec::{SmallVec, smallvec};
-use span::{Edition, FileId, SyntaxContext};
+use span::{FileId, SyntaxContext};
 use stdx::{TupleExt, always};
 use syntax::{
     AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
@@ -53,7 +53,7 @@
     TypeAlias, TypeParam, Union, Variant, VariantDef,
     db::HirDatabase,
     semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
-    source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path},
+    source_analyzer::{SourceAnalyzer, resolve_hir_path},
 };
 
 const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
@@ -385,18 +385,18 @@ pub fn first_crate(&self, file: FileId) -> Option<Crate> {
         }
     }
 
-    pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
-        Some(EditionedFileId::new(
-            self.db,
-            file,
-            self.file_to_module_defs(file).next()?.krate().edition(self.db),
-        ))
+    pub fn attach_first_edition_opt(&self, file: FileId) -> Option<EditionedFileId> {
+        let krate = self.file_to_module_defs(file).next()?.krate();
+        Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id))
+    }
+
+    pub fn attach_first_edition(&self, file: FileId) -> EditionedFileId {
+        self.attach_first_edition_opt(file)
+            .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file))
     }
 
     pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
-        let file_id = self
-            .attach_first_edition(file_id)
-            .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
+        let file_id = self.attach_first_edition(file_id);
 
         let tree = self.db.parse(file_id).tree();
         self.cache(tree.syntax().clone(), file_id.into());
@@ -405,7 +405,7 @@ pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
 
     pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId {
         if let Some(editioned_file_id) = file_id.file_id() {
-            self.attach_first_edition(editioned_file_id.file_id(self.db))
+            self.attach_first_edition_opt(editioned_file_id.file_id(self.db))
                 .map_or(file_id, Into::into)
         } else {
             file_id
@@ -1197,33 +1197,34 @@ fn descend_into_macros_impl<T>(
                                     .zip(Some(item))
                             })
                             .map(|(call_id, item)| {
-                                let attr_id = match db.lookup_intern_macro_call(call_id).kind {
+                                let item_range = item.syntax().text_range();
+                                let loc = db.lookup_intern_macro_call(call_id);
+                                let text_range = match loc.kind {
                                     hir_expand::MacroCallKind::Attr {
-                                        invoc_attr_index, ..
-                                    } => invoc_attr_index.ast_index(),
-                                    _ => 0,
+                                        censored_attr_ids: attr_ids,
+                                        ..
+                                    } => {
+                                        // FIXME: here, the attribute's text range is used to strip away all
+                                        // entries from the start of the attribute "list" up the invoking
+                                        // attribute. But in
+                                        // ```
+                                        // mod foo {
+                                        //     #![inner]
+                                        // }
+                                        // ```
+                                        // we don't wanna strip away stuff in the `mod foo {` range, that is
+                                        // here if the id corresponds to an inner attribute we got strip all
+                                        // text ranges of the outer ones, and then all of the inner ones up
+                                        // to the invoking attribute so that the inbetween is ignored.
+                                        // FIXME: Should cfg_attr be handled differently?
+                                        let (attr, _, _, _) = attr_ids
+                                            .invoc_attr()
+                                            .find_attr_range_with_source(db, loc.krate, &item);
+                                        let start = attr.syntax().text_range().start();
+                                        TextRange::new(start, item_range.end())
+                                    }
+                                    _ => item_range,
                                 };
-                                // FIXME: here, the attribute's text range is used to strip away all
-                                // entries from the start of the attribute "list" up the invoking
-                                // attribute. But in
-                                // ```
-                                // mod foo {
-                                //     #![inner]
-                                // }
-                                // ```
-                                // we don't wanna strip away stuff in the `mod foo {` range, that is
-                                // here if the id corresponds to an inner attribute we got strip all
-                                // text ranges of the outer ones, and then all of the inner ones up
-                                // to the invoking attribute so that the inbetween is ignored.
-                                let text_range = item.syntax().text_range();
-                                let start = collect_attrs(&item)
-                                    .nth(attr_id)
-                                    .map(|attr| match attr.1 {
-                                        Either::Left(it) => it.syntax().text_range().start(),
-                                        Either::Right(it) => it.syntax().text_range().start(),
-                                    })
-                                    .unwrap_or_else(|| text_range.start());
-                                let text_range = TextRange::new(start, text_range.end());
                                 filter_duplicates(tokens, text_range);
                                 process_expansion_for_token(ctx, &mut stack, call_id)
                             })
@@ -1473,6 +1474,14 @@ pub fn diagnostics_display_range(
         FileRangeWrapper { file_id: file_id.file_id(self.db), range }
     }
 
+    pub fn diagnostics_display_range_for_range(
+        &self,
+        src: InFile<TextRange>,
+    ) -> FileRangeWrapper<FileId> {
+        let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db);
+        FileRangeWrapper { file_id: file_id.file_id(self.db), range }
+    }
+
     fn token_ancestors_with_macros(
         &self,
         token: SyntaxToken,
@@ -1655,7 +1664,7 @@ pub fn resolve_trait_impl_method(
         func: Function,
         subst: impl IntoIterator<Item = Type<'db>>,
     ) -> Option<Function> {
-        let interner = DbInterner::new_with(self.db, None, None);
+        let interner = DbInterner::new_no_crate(self.db);
         let mut subst = subst.into_iter();
         let substs =
             hir_ty::next_solver::GenericArgs::for_item(interner, trait_.id.into(), |_, id, _| {
@@ -1769,9 +1778,9 @@ pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32>
     pub fn get_unsafe_ops(&self, def: DefWithBody) -> FxHashSet<ExprOrPatSource> {
         let def = DefWithBodyId::from(def);
         let (body, source_map) = self.db.body_with_source_map(def);
-        let infer = self.db.infer(def);
+        let infer = InferenceResult::for_body(self.db, def);
         let mut res = FxHashSet::default();
-        unsafe_operations_for_body(self.db, &infer, def, &body, &mut |node| {
+        unsafe_operations_for_body(self.db, infer, def, &body, &mut |node| {
             if let Ok(node) = source_map.expr_or_pat_syntax(node) {
                 res.insert(node);
             }
@@ -1785,12 +1794,12 @@ pub fn get_unsafe_ops_for_unsafe_block(&self, block: ast::BlockExpr) -> Vec<Expr
         let Some(def) = self.body_for(block.syntax()) else { return Vec::new() };
         let def = def.into();
         let (body, source_map) = self.db.body_with_source_map(def);
-        let infer = self.db.infer(def);
+        let infer = InferenceResult::for_body(self.db, def);
         let Some(ExprOrPatId::ExprId(block)) = source_map.node_expr(block.as_ref()) else {
             return Vec::new();
         };
         let mut res = Vec::default();
-        unsafe_operations(self.db, &infer, def, &body, block, &mut |node, _| {
+        unsafe_operations(self.db, infer, def, &body, block, &mut |node, _| {
             if let Ok(node) = source_map.expr_or_pat_syntax(node) {
                 res.push(node);
             }
@@ -2330,7 +2339,7 @@ pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option<PathResolution
             self.db,
             &self.resolver,
             &Path::BarePath(Interned::new(ModPath::from_segments(kind, segments))),
-            name_hygiene(self.db, InFile::new(self.file_id, ast_path.syntax())),
+            HygieneId::ROOT,
             None,
         )
     }
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
index 5019a59..165ac7e 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
@@ -5,7 +5,7 @@
 //! node for a *child*, and get its hir.
 
 use either::Either;
-use hir_expand::{HirFileId, attrs::collect_attrs};
+use hir_expand::HirFileId;
 use span::AstIdNode;
 use syntax::{AstPtr, ast};
 
@@ -94,6 +94,7 @@ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: Hi
 
 impl ChildBySource for ItemScope {
     fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+        let krate = file_id.krate(db);
         self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
         self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL));
         self.extern_blocks().for_each(|extern_block| {
@@ -123,12 +124,10 @@ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: Hi
             |(ast_id, calls)| {
                 let adt = ast_id.to_node(db);
                 calls.for_each(|(attr_id, call_id, calls)| {
-                    if let Some((_, Either::Left(attr))) =
-                        collect_attrs(&adt).nth(attr_id.ast_index())
-                    {
-                        res[keys::DERIVE_MACRO_CALL]
-                            .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
-                    }
+                    // FIXME: Fix cfg_attr handling.
+                    let (attr, _, _, _) = attr_id.find_attr_range_with_source(db, krate, &adt);
+                    res[keys::DERIVE_MACRO_CALL]
+                        .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
                 });
             },
         );
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index 858426c..8144b2f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -18,7 +18,7 @@
         scope::{ExprScopes, ScopeId},
     },
     hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
-    lang_item::LangItem,
+    lang_item::LangItems,
     nameres::MacroSubNs,
     resolver::{HasResolver, Resolver, TypeNs, ValueNs, resolver_for_scope},
     type_ref::{Mutability, TypeRef, TypeRefId},
@@ -78,7 +78,7 @@ pub(crate) enum BodyOrSig<'db> {
         def: DefWithBodyId,
         body: Arc<Body>,
         source_map: Arc<BodySourceMap>,
-        infer: Option<Arc<InferenceResult<'db>>>,
+        infer: Option<&'db InferenceResult<'db>>,
     },
     // To be folded into body once it is considered one
     VariantFields {
@@ -101,7 +101,7 @@ pub(crate) fn new_for_body(
         node: InFile<&SyntaxNode>,
         offset: Option<TextSize>,
     ) -> SourceAnalyzer<'db> {
-        Self::new_for_body_(db, def, node, offset, Some(db.infer(def)))
+        Self::new_for_body_(db, def, node, offset, Some(InferenceResult::for_body(db, def)))
     }
 
     pub(crate) fn new_for_body_no_infer(
@@ -118,7 +118,7 @@ pub(crate) fn new_for_body_(
         def: DefWithBodyId,
         node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
         offset: Option<TextSize>,
-        infer: Option<Arc<InferenceResult<'db>>>,
+        infer: Option<&'db InferenceResult<'db>>,
     ) -> SourceAnalyzer<'db> {
         let (body, source_map) = db.body_with_source_map(def);
         let scopes = db.expr_scopes(def);
@@ -267,7 +267,7 @@ pub(crate) fn type_of_type(
         db: &'db dyn HirDatabase,
         ty: &ast::Type,
     ) -> Option<Type<'db>> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
 
         let type_ref = self.type_id(ty)?;
 
@@ -410,7 +410,7 @@ pub(crate) fn resolve_method_call_as_callable(
     ) -> Option<Callable<'db>> {
         let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
         let (func, args) = self.infer()?.method_resolution(expr_id)?;
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let ty = db.value_ty(func.into())?.instantiate(interner, args);
         let ty = Type::new_with_resolver(db, &self.resolver, ty);
         let mut res = ty.as_callable(db)?;
@@ -589,10 +589,10 @@ pub(crate) fn resolve_await_to_poll(
             }
         }
 
-        let poll_fn = LangItem::FuturePoll.resolve_function(db, self.resolver.krate())?;
+        let poll_fn = self.lang_items(db).FuturePoll?;
         // HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself
         // doesn't have any generic parameters, so we skip building another subst for `poll()`.
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let substs = GenericArgs::new_from_iter(interner, [ty.into()]);
         Some(self.resolve_impl_method_or_trait_def(db, poll_fn, substs))
     }
@@ -607,15 +607,18 @@ pub(crate) fn resolve_prefix_expr(
                 // This can be either `Deref::deref` or `DerefMut::deref_mut`.
                 // Since deref kind is inferenced and stored in `InferenceResult.method_resolution`,
                 // use that result to find out which one it is.
-                let (deref_trait, deref) =
-                    self.lang_trait_fn(db, LangItem::Deref, &Name::new_symbol_root(sym::deref))?;
+                let (deref_trait, deref) = self.lang_trait_fn(
+                    db,
+                    self.lang_items(db).Deref,
+                    &Name::new_symbol_root(sym::deref),
+                )?;
                 self.infer()
                     .and_then(|infer| {
                         let expr = self.expr_id(prefix_expr.clone().into())?.as_expr()?;
                         let (func, _) = infer.method_resolution(expr)?;
                         let (deref_mut_trait, deref_mut) = self.lang_trait_fn(
                             db,
-                            LangItem::DerefMut,
+                            self.lang_items(db).DerefMut,
                             &Name::new_symbol_root(sym::deref_mut),
                         )?;
                         if func == deref_mut { Some((deref_mut_trait, deref_mut)) } else { None }
@@ -623,16 +626,16 @@ pub(crate) fn resolve_prefix_expr(
                     .unwrap_or((deref_trait, deref))
             }
             ast::UnaryOp::Not => {
-                self.lang_trait_fn(db, LangItem::Not, &Name::new_symbol_root(sym::not))?
+                self.lang_trait_fn(db, self.lang_items(db).Not, &Name::new_symbol_root(sym::not))?
             }
             ast::UnaryOp::Neg => {
-                self.lang_trait_fn(db, LangItem::Neg, &Name::new_symbol_root(sym::neg))?
+                self.lang_trait_fn(db, self.lang_items(db).Neg, &Name::new_symbol_root(sym::neg))?
             }
         };
 
         let ty = self.ty_of_expr(prefix_expr.expr()?)?;
 
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         // HACK: subst for all methods coincides with that for their trait because the methods
         // don't have any generic parameters, so we skip building another subst for the methods.
         let substs = GenericArgs::new_from_iter(interner, [ty.into()]);
@@ -649,7 +652,7 @@ pub(crate) fn resolve_index_expr(
         let index_ty = self.ty_of_expr(index_expr.index()?)?;
 
         let (_index_trait, index_fn) =
-            self.lang_trait_fn(db, LangItem::Index, &Name::new_symbol_root(sym::index))?;
+            self.lang_trait_fn(db, self.lang_items(db).Index, &Name::new_symbol_root(sym::index))?;
         let op_fn = self
             .infer()
             .and_then(|infer| {
@@ -657,7 +660,7 @@ pub(crate) fn resolve_index_expr(
                 let (func, _) = infer.method_resolution(expr)?;
                 let (_index_mut_trait, index_mut_fn) = self.lang_trait_fn(
                     db,
-                    LangItem::IndexMut,
+                    self.lang_items(db).IndexMut,
                     &Name::new_symbol_root(sym::index_mut),
                 )?;
                 if func == index_mut_fn { Some(index_mut_fn) } else { None }
@@ -665,7 +668,7 @@ pub(crate) fn resolve_index_expr(
             .unwrap_or(index_fn);
         // HACK: subst for all methods coincides with that for their trait because the methods
         // don't have any generic parameters, so we skip building another subst for the methods.
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let substs = GenericArgs::new_from_iter(interner, [base_ty.into(), index_ty.into()]);
         Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
     }
@@ -679,12 +682,13 @@ pub(crate) fn resolve_bin_expr(
         let lhs = self.ty_of_expr(binop_expr.lhs()?)?;
         let rhs = self.ty_of_expr(binop_expr.rhs()?)?;
 
-        let (_op_trait, op_fn) = lang_items_for_bin_op(op).and_then(|(name, lang_item)| {
-            self.lang_trait_fn(db, lang_item, &Name::new_symbol_root(name))
-        })?;
+        let (_op_trait, op_fn) =
+            lang_items_for_bin_op(self.lang_items(db), op).and_then(|(name, lang_item)| {
+                self.lang_trait_fn(db, lang_item, &Name::new_symbol_root(name))
+            })?;
         // HACK: subst for `index()` coincides with that for `Index` because `index()` itself
         // doesn't have any generic parameters, so we skip building another subst for `index()`.
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let substs = GenericArgs::new_from_iter(interner, [lhs.into(), rhs.into()]);
 
         Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
@@ -697,10 +701,10 @@ pub(crate) fn resolve_try_expr(
     ) -> Option<FunctionId> {
         let ty = self.ty_of_expr(try_expr.expr()?)?;
 
-        let op_fn = LangItem::TryTraitBranch.resolve_function(db, self.resolver.krate())?;
+        let op_fn = self.lang_items(db).TryTraitBranch?;
         // HACK: subst for `branch()` coincides with that for `Try` because `branch()` itself
         // doesn't have any generic parameters, so we skip building another subst for `branch()`.
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let substs = GenericArgs::new_from_iter(interner, [ty.into()]);
 
         Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
@@ -714,7 +718,7 @@ pub(crate) fn resolve_record_field(
         let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
         let expr = ast::Expr::from(record_expr);
         let expr_id = self.store_sm()?.node_expr(InFile::new(self.file_id, &expr))?;
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
 
         let ast_name = field.field_name()?;
         let local_name = ast_name.as_name();
@@ -755,7 +759,7 @@ pub(crate) fn resolve_record_pat_field(
         db: &'db dyn HirDatabase,
         field: &ast::RecordPatField,
     ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let field_name = field.field_name()?.as_name();
         let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
         let pat_id = self.pat_id(&record_pat.into())?;
@@ -817,7 +821,7 @@ pub(crate) fn resolve_offset_of_field(
 
         let trait_env = container.env;
 
-        let interner = DbInterner::new_with(db, Some(trait_env.krate), trait_env.block);
+        let interner = DbInterner::new_with(db, trait_env.krate);
         let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
 
         let mut container = Either::Right(container.ty);
@@ -1273,7 +1277,7 @@ fn missing_fields(
         variant: VariantId,
         missing_fields: Vec<LocalFieldId>,
     ) -> Vec<(Field, Type<'db>)> {
-        let interner = DbInterner::new_with(db, None, None);
+        let interner = DbInterner::new_no_crate(db);
         let field_types = db.field_types(variant);
 
         missing_fields
@@ -1423,18 +1427,22 @@ fn resolve_impl_const_or_trait_def_with_subst(
             None => return (const_id, subs),
         };
         let env = db.trait_environment_for_body(owner);
-        let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+        let interner = DbInterner::new_with(db, env.krate);
         let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
         method_resolution::lookup_impl_const(&infcx, env, const_id, subs)
     }
 
+    fn lang_items<'a>(&self, db: &'a dyn HirDatabase) -> &'a LangItems {
+        hir_def::lang_item::lang_items(db, self.resolver.krate())
+    }
+
     fn lang_trait_fn(
         &self,
         db: &'db dyn HirDatabase,
-        lang_trait: LangItem,
+        lang_trait: Option<TraitId>,
         method_name: &Name,
     ) -> Option<(TraitId, FunctionId)> {
-        let trait_id = lang_trait.resolve_trait(db, self.resolver.krate())?;
+        let trait_id = lang_trait?;
         let fn_id = trait_id.trait_items(db).method_by_name(method_name)?;
         Some((trait_id, fn_id))
     }
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index bd4cff5..1530e69 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -416,12 +416,12 @@ fn push_decl<L>(
         let mut do_not_complete = Complete::Yes;
 
         if let Some(attrs) = def.attrs(self.db) {
-            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
+            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs);
             if let Some(trait_do_not_complete) = trait_do_not_complete {
                 do_not_complete = Complete::for_trait_item(trait_do_not_complete, do_not_complete);
             }
 
-            for alias in attrs.doc_aliases() {
+            for alias in attrs.doc_aliases(self.db) {
                 self.symbols.insert(FileSymbol {
                     name: alias.clone(),
                     def,
@@ -465,9 +465,9 @@ fn push_module(&mut self, module_id: ModuleId, name: &Name) {
 
         let mut do_not_complete = Complete::Yes;
         if let Some(attrs) = def.attrs(self.db) {
-            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
+            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs);
 
-            for alias in attrs.doc_aliases() {
+            for alias in attrs.doc_aliases(self.db) {
                 self.symbols.insert(FileSymbol {
                     name: alias.clone(),
                     def,
diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs
index dddc035..979ec8c 100644
--- a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs
@@ -597,7 +597,7 @@ pub(super) fn famous_types<'a, 'lt, 'db, DB: HirDatabase>(
 ) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
     let db = ctx.sema.db;
     let module = ctx.scope.module();
-    let interner = DbInterner::new_with(db, None, None);
+    let interner = DbInterner::new_no_crate(db);
     let bool_ty = Ty::new_bool(interner);
     let unit_ty = Ty::new_unit(interner);
     [
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 3eeff2a..248ce2a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -1,7 +1,7 @@
 use std::iter::{self, Peekable};
 
 use either::Either;
-use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics, sym};
+use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics};
 use ide_db::RootDatabase;
 use ide_db::assists::ExprFillDefaultMode;
 use ide_db::syntax_helpers::suggest_name;
@@ -401,7 +401,7 @@ impl ExtendedVariant {
     fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool {
         match self {
             ExtendedVariant::Variant { variant: var, .. } => {
-                var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate
+                var.attrs(db).is_doc_hidden() && var.module(db).krate() != krate
             }
             _ => false,
         }
@@ -424,7 +424,7 @@ fn enum_(
     fn is_non_exhaustive(&self, db: &RootDatabase, krate: Crate) -> bool {
         match self {
             ExtendedEnum::Enum { enum_: e, .. } => {
-                e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate
+                e.attrs(db).is_non_exhaustive() && e.module(db).krate() != krate
             }
             _ => false,
         }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index 61d8449..f8b9bb6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -1197,4 +1197,57 @@ fn foo() {
 "#,
         );
     }
+
+    #[test]
+    fn regression_issue_21020() {
+        check_assist(
+            convert_tuple_struct_to_named_struct,
+            r#"
+pub struct S$0(pub ());
+
+trait T {
+    fn id(&self) -> usize;
+}
+
+trait T2 {
+    fn foo(&self) -> usize;
+}
+
+impl T for S {
+    fn id(&self) -> usize {
+        self.0.len()
+    }
+}
+
+impl T2 for S {
+    fn foo(&self) -> usize {
+        self.0.len()
+    }
+}
+            "#,
+            r#"
+pub struct S { pub field1: () }
+
+trait T {
+    fn id(&self) -> usize;
+}
+
+trait T2 {
+    fn foo(&self) -> usize;
+}
+
+impl T for S {
+    fn id(&self) -> usize {
+        self.field1.len()
+    }
+}
+
+impl T2 for S {
+    fn foo(&self) -> usize {
+        self.field1.len()
+    }
+}
+            "#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs
index 8b24d33..46f2108 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs
@@ -1,4 +1,4 @@
-use hir::{HasVisibility, sym};
+use hir::HasVisibility;
 use ide_db::{
     FxHashMap, FxHashSet,
     assists::AssistId,
@@ -93,7 +93,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
     let kind = struct_type.kind(ctx.db());
     let struct_def_path = module.find_path(ctx.db(), struct_def, cfg)?;
 
-    let is_non_exhaustive = struct_def.attrs(ctx.db())?.by_key(sym::non_exhaustive).exists();
+    let is_non_exhaustive = struct_def.attrs(ctx.db())?.is_non_exhaustive();
     let is_foreign_crate = struct_def.module(ctx.db()).is_some_and(|m| m.krate() != module.krate());
 
     let fields = struct_type.fields(ctx.db());
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
index 6a86823..199c85d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
@@ -123,7 +123,7 @@ fn existing_from_impl(
     let variant = sema.to_def(variant)?;
     let krate = variant.module(db).krate();
     let from_trait = FamousDefs(sema, krate).core_convert_From()?;
-    let interner = DbInterner::new_with(db, Some(krate.base()), None);
+    let interner = DbInterner::new_with(db, krate.base());
     use hir::next_solver::infer::DbInternerInferExt;
     let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
index a1ec763..bdb42f9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
@@ -220,7 +220,7 @@ fn from_impl_exists(
     let strukt = sema.to_def(strukt)?;
     let krate = strukt.krate(db);
     let from_trait = FamousDefs(sema, krate).core_convert_From()?;
-    let interner = DbInterner::new_with(db, Some(krate.base()), None);
+    let interner = DbInterner::new_with(db, krate.base());
     use hir::next_solver::infer::DbInternerInferExt;
     let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
index da62b81..503003b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs
@@ -1,7 +1,6 @@
 use std::iter;
 
 use ast::edit::IndentLevel;
-use hir::{HasAttrs, sym};
 use ide_db::base_db::AnchoredPathBuf;
 use itertools::Itertools;
 use stdx::format_to;
@@ -53,14 +52,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
                 let mut buf = String::from("./");
                 let db = ctx.db();
                 match parent_module.name(db) {
-                    Some(name)
-                        if !parent_module.is_mod_rs(db)
-                            && parent_module
-                                .attrs(db)
-                                .by_key(sym::path)
-                                .string_value_unescape()
-                                .is_none() =>
-                    {
+                    Some(name) if !parent_module.is_mod_rs(db) && !parent_module.has_path(db) => {
                         format_to!(buf, "{}/", name.as_str())
                     }
                     _ => (),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index ca46890..0b5dae6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -67,8 +67,8 @@
 pub mod utils;
 
 use hir::Semantics;
-use ide_db::{EditionedFileId, RootDatabase};
-use syntax::{Edition, TextRange};
+use ide_db::RootDatabase;
+use syntax::TextRange;
 
 pub(crate) use crate::assist_context::{AssistContext, Assists};
 
@@ -88,9 +88,7 @@ pub fn assists(
     range: ide_db::FileRange,
 ) -> Vec<Assist> {
     let sema = Semantics::new(db);
-    let file_id = sema
-        .attach_first_edition(range.file_id)
-        .unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT));
+    let file_id = sema.attach_first_edition(range.file_id);
     let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range });
     let mut acc = Assists::new(&ctx, resolve);
     handlers::all().iter().for_each(|handler| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
index ade6069..c9044fe 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -321,11 +321,14 @@ fn check_with_config(
     let _tracing = setup_tracing();
     let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
     db.enable_proc_attr_macros();
+    let sema = Semantics::new(&db);
+    let file_with_caret_id = sema
+        .attach_first_edition_opt(file_with_caret_id.file_id(&db))
+        .unwrap_or(file_with_caret_id);
     let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string();
 
     let frange = hir::FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
 
-    let sema = Semantics::new(&db);
     let ctx = AssistContext::new(sema, &config, frange);
     let resolve = match expected {
         ExpectedResult::Unresolved => AssistResolveStrategy::None,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index a00af92..de8c4b6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -101,16 +101,7 @@ pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option<ast::Attr> {
 }
 
 pub fn has_test_related_attribute(attrs: &hir::AttrsWithOwner) -> bool {
-    attrs.iter().any(|attr| {
-        let path = attr.path();
-        (|| {
-            Some(
-                path.segments().first()?.as_str().starts_with("test")
-                    || path.segments().last()?.as_str().ends_with("test"),
-            )
-        })()
-        .unwrap_or_default()
-    })
+    attrs.is_test()
 }
 
 #[derive(Clone, Copy, PartialEq)]
@@ -136,7 +127,7 @@ pub fn filter_assoc_items(
         .copied()
         .filter(|assoc_item| {
             if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
-                && assoc_item.attrs(sema.db).has_doc_hidden()
+                && assoc_item.attrs(sema.db).is_doc_hidden()
             {
                 if let hir::AssocItem::Function(f) = assoc_item
                     && !f.has_body(sema.db)
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
index c87c46d..df577b8 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
@@ -56,7 +56,7 @@ pub(super) fn complete_lint(
         };
         let mut item =
             CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label, ctx.edition);
-        item.documentation(Documentation::new(description.to_owned()));
+        item.documentation(Documentation::new_owned(description.to_owned()));
         item.add_to(acc, ctx.db)
     }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
index d1e05a4..20d0148 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
@@ -266,7 +266,7 @@ fn import_on_the_fly(
             let original_item = &import.original_item;
             !ctx.is_item_hidden(&import.item_to_import)
                 && !ctx.is_item_hidden(original_item)
-                && ctx.check_stability(original_item.attrs(ctx.db).as_deref())
+                && ctx.check_stability(original_item.attrs(ctx.db).as_ref())
         })
         .filter(|import| filter_excluded_flyimport(ctx, import))
         .sorted_by(|a, b| {
@@ -313,7 +313,7 @@ fn import_on_the_fly_pat_(
             let original_item = &import.original_item;
             !ctx.is_item_hidden(&import.item_to_import)
                 && !ctx.is_item_hidden(original_item)
-                && ctx.check_stability(original_item.attrs(ctx.db).as_deref())
+                && ctx.check_stability(original_item.attrs(ctx.db).as_ref())
         })
         .sorted_by(|a, b| {
             let key = |import_path| {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
index 6c001bd..39048e4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs
@@ -87,6 +87,9 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
     let in_block = kind.is_none();
 
     let no_vis_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
+    let no_abi_qualifiers = ctx.qualifier_ctx.abi_node.is_none();
+    let has_extern_kw =
+        ctx.qualifier_ctx.abi_node.as_ref().is_some_and(|it| it.string_token().is_none());
     let has_unsafe_kw = ctx.qualifier_ctx.unsafe_tok.is_some();
     let has_async_kw = ctx.qualifier_ctx.async_tok.is_some();
     let has_safe_kw = ctx.qualifier_ctx.safe_tok.is_some();
@@ -118,7 +121,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
             }
         }
 
-        if !has_async_kw && no_vis_qualifiers && in_item_list {
+        if !has_async_kw && no_vis_qualifiers && no_abi_qualifiers && in_item_list {
             add_keyword("extern", "extern $0");
         }
 
@@ -159,11 +162,14 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
         add_keyword("static", "static $1: $2;");
     } else {
         if !in_inherent_impl {
-            if !in_trait {
+            if !in_trait && no_abi_qualifiers {
                 add_keyword("extern", "extern $0");
             }
             add_keyword("type", "type $0");
         }
+        if has_extern_kw {
+            add_keyword("crate", "crate $0;");
+        }
 
         add_keyword("fn", "fn $1($2) {\n    $0\n}");
         add_keyword("unsafe", "unsafe $0");
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
index ba1fe64..4dd84da 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
@@ -446,7 +446,7 @@ fn add_custom_postfix_completions(
             let body = snippet.postfix_snippet(receiver_text);
             let mut builder =
                 postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body);
-            builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
+            builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```")));
             for import in imports.into_iter() {
                 builder.add_import(import);
             }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
index ead9852..04450ae 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
@@ -139,7 +139,7 @@ fn add_custom_completions(
             };
             let body = snip.snippet();
             let mut builder = snippet(ctx, cap, trigger, &body);
-            builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
+            builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```")));
             for import in imports.into_iter() {
                 builder.add_import(import);
             }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index 31a9a74..ffffc26 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -53,6 +53,7 @@ pub(crate) struct QualifierCtx {
     pub(crate) unsafe_tok: Option<SyntaxToken>,
     pub(crate) safe_tok: Option<SyntaxToken>,
     pub(crate) vis_node: Option<ast::Visibility>,
+    pub(crate) abi_node: Option<ast::Abi>,
 }
 
 impl QualifierCtx {
@@ -61,6 +62,7 @@ pub(crate) fn none(&self) -> bool {
             && self.unsafe_tok.is_none()
             && self.safe_tok.is_none()
             && self.vis_node.is_none()
+            && self.abi_node.is_none()
     }
 }
 
@@ -557,7 +559,7 @@ pub(crate) fn doc_aliases<I>(&self, item: &I) -> Vec<SmolStr>
         I: hir::HasAttrs + Copy,
     {
         let attrs = item.attrs(self.db);
-        attrs.doc_aliases().map(|it| it.as_str().into()).collect()
+        attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect()
     }
 
     /// Check if an item is `#[doc(hidden)]`.
@@ -571,7 +573,7 @@ pub(crate) fn is_item_hidden(&self, item: &hir::ItemInNs) -> bool {
     }
 
     /// Checks whether this item should be listed in regards to stability. Returns `true` if we should.
-    pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool {
+    pub(crate) fn check_stability(&self, attrs: Option<&hir::AttrsWithOwner>) -> bool {
         let Some(attrs) = attrs else {
             return true;
         };
@@ -589,15 +591,15 @@ pub(crate) fn check_stability_and_hidden<I>(&self, item: I) -> bool
 
     /// Whether the given trait is an operator trait or not.
     pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
-        match trait_.attrs(self.db).lang() {
-            Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
+        match trait_.attrs(self.db).lang(self.db) {
+            Some(lang) => OP_TRAIT_LANG.contains(&lang),
             None => false,
         }
     }
 
     /// Whether the given trait has `#[doc(notable_trait)]`
     pub(crate) fn is_doc_notable_trait(&self, trait_: hir::Trait) -> bool {
-        trait_.attrs(self.db).has_doc_notable_trait()
+        trait_.attrs(self.db).is_doc_notable_trait()
     }
 
     /// Returns the traits in scope, with the [`Drop`] trait removed.
@@ -654,7 +656,7 @@ fn is_scope_def_hidden(&self, scope_def: ScopeDef) -> bool {
     fn is_visible_impl(
         &self,
         vis: &hir::Visibility,
-        attrs: &hir::Attrs,
+        attrs: &hir::AttrsWithOwner,
         defining_crate: hir::Crate,
     ) -> Visible {
         if !self.check_stability(Some(attrs)) {
@@ -676,14 +678,18 @@ fn is_visible_impl(
         if self.is_doc_hidden(attrs, defining_crate) { Visible::No } else { Visible::Yes }
     }
 
-    pub(crate) fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
+    pub(crate) fn is_doc_hidden(
+        &self,
+        attrs: &hir::AttrsWithOwner,
+        defining_crate: hir::Crate,
+    ) -> bool {
         // `doc(hidden)` items are only completed within the defining crate.
-        self.krate != defining_crate && attrs.has_doc_hidden()
+        self.krate != defining_crate && attrs.is_doc_hidden()
     }
 
     pub(crate) fn doc_aliases_in_scope(&self, scope_def: ScopeDef) -> Vec<SmolStr> {
         if let Some(attrs) = scope_def.attrs(self.db) {
-            attrs.doc_aliases().map(|it| it.as_str().into()).collect()
+            attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect()
         } else {
             vec![]
         }
@@ -701,7 +707,7 @@ pub(crate) fn new(
         let _p = tracing::info_span!("CompletionContext::new").entered();
         let sema = Semantics::new(db);
 
-        let editioned_file_id = sema.attach_first_edition(file_id)?;
+        let editioned_file_id = sema.attach_first_edition(file_id);
         let original_file = sema.parse(editioned_file_id);
 
         // Insert a fake ident to get a valid parse tree. We will use this file
@@ -887,35 +893,35 @@ pub(crate) fn new(
     }
 }
 
-const OP_TRAIT_LANG_NAMES: &[&str] = &[
-    "add_assign",
-    "add",
-    "bitand_assign",
-    "bitand",
-    "bitor_assign",
-    "bitor",
-    "bitxor_assign",
-    "bitxor",
-    "deref_mut",
-    "deref",
-    "div_assign",
-    "div",
-    "eq",
-    "fn_mut",
-    "fn_once",
-    "fn",
-    "index_mut",
-    "index",
-    "mul_assign",
-    "mul",
-    "neg",
-    "not",
-    "partial_ord",
-    "rem_assign",
-    "rem",
-    "shl_assign",
-    "shl",
-    "shr_assign",
-    "shr",
-    "sub",
+const OP_TRAIT_LANG: &[hir::LangItem] = &[
+    hir::LangItem::AddAssign,
+    hir::LangItem::Add,
+    hir::LangItem::BitAndAssign,
+    hir::LangItem::BitAnd,
+    hir::LangItem::BitOrAssign,
+    hir::LangItem::BitOr,
+    hir::LangItem::BitXorAssign,
+    hir::LangItem::BitXor,
+    hir::LangItem::DerefMut,
+    hir::LangItem::Deref,
+    hir::LangItem::DivAssign,
+    hir::LangItem::Div,
+    hir::LangItem::PartialEq,
+    hir::LangItem::FnMut,
+    hir::LangItem::FnOnce,
+    hir::LangItem::Fn,
+    hir::LangItem::IndexMut,
+    hir::LangItem::Index,
+    hir::LangItem::MulAssign,
+    hir::LangItem::Mul,
+    hir::LangItem::Neg,
+    hir::LangItem::Not,
+    hir::LangItem::PartialOrd,
+    hir::LangItem::RemAssign,
+    hir::LangItem::Rem,
+    hir::LangItem::ShlAssign,
+    hir::LangItem::Shl,
+    hir::LangItem::ShrAssign,
+    hir::LangItem::Shr,
+    hir::LangItem::Sub,
 ];
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index 6758e80..e761da7 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -1628,6 +1628,7 @@ fn classify_name_ref<'db>(
                     }
                 }
                 qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
+                qualifier_ctx.abi_node = error_node.children().find_map(ast::Abi::cast);
             }
 
             if let PathKind::Item { .. } = path_ctx.kind
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
index 303c712..c526c7f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -57,7 +57,8 @@ pub struct CompletionItem {
 
     /// Additional info to show in the UI pop up.
     pub detail: Option<String>,
-    pub documentation: Option<Documentation>,
+    // FIXME: Make this with `'db` lifetime.
+    pub documentation: Option<Documentation<'static>>,
 
     /// Whether this item is marked as deprecated
     pub deprecated: bool,
@@ -488,7 +489,8 @@ pub(crate) struct Builder {
     insert_text: Option<String>,
     is_snippet: bool,
     detail: Option<String>,
-    documentation: Option<Documentation>,
+    // FIXME: Make this with `'db` lifetime.
+    documentation: Option<Documentation<'static>>,
     lookup: Option<SmolStr>,
     kind: CompletionItemKind,
     text_edit: Option<TextEdit>,
@@ -644,11 +646,11 @@ pub(crate) fn set_detail(&mut self, detail: Option<impl Into<String>>) -> &mut B
         self
     }
     #[allow(unused)]
-    pub(crate) fn documentation(&mut self, docs: Documentation) -> &mut Builder {
+    pub(crate) fn documentation(&mut self, docs: Documentation<'_>) -> &mut Builder {
         self.set_documentation(Some(docs))
     }
-    pub(crate) fn set_documentation(&mut self, docs: Option<Documentation>) -> &mut Builder {
-        self.documentation = docs;
+    pub(crate) fn set_documentation(&mut self, docs: Option<Documentation<'_>>) -> &mut Builder {
+        self.documentation = docs.map(Documentation::into_owned);
         self
     }
     pub(crate) fn set_deprecated(&mut self, deprecated: bool) -> &mut Builder {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
index 8a0aaf3..f7a118d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -274,7 +274,7 @@ pub fn resolve_completion_edits(
     let _p = tracing::info_span!("resolve_completion_edits").entered();
     let sema = hir::Semantics::new(db);
 
-    let editioned_file_id = sema.attach_first_edition(file_id)?;
+    let editioned_file_id = sema.attach_first_edition(file_id);
 
     let original_file = sema.parse(editioned_file_id);
     let original_token =
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index c0f09e1..bc5589a 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -10,7 +10,7 @@
 pub(crate) mod union_literal;
 pub(crate) mod variant;
 
-use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type, sym};
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
 use ide_db::text_edit::TextEdit;
 use ide_db::{
     RootDatabase, SnippetCap, SymbolKind,
@@ -91,8 +91,7 @@ fn is_immediately_after_macro_bang(&self) -> bool {
     }
 
     fn is_deprecated(&self, def: impl HasAttrs) -> bool {
-        let attrs = def.attrs(self.db());
-        attrs.by_key(sym::deprecated).exists()
+        def.attrs(self.db()).is_deprecated()
     }
 
     fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool {
@@ -115,7 +114,7 @@ fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool {
     }
 
     // FIXME: remove this
-    fn docs(&self, def: impl HasDocs) -> Option<Documentation> {
+    fn docs(&self, def: impl HasDocs) -> Option<Documentation<'a>> {
         def.docs(self.db())
     }
 }
@@ -320,7 +319,9 @@ pub(crate) fn render_expr(
     );
     let edit = TextEdit::replace(source_range, snippet);
     item.snippet_edit(ctx.config.snippet_cap?, edit);
-    item.documentation(Documentation::new(String::from("Autogenerated expression by term search")));
+    item.documentation(Documentation::new_owned(String::from(
+        "Autogenerated expression by term search",
+    )));
     item.set_relevance(crate::CompletionRelevance {
         type_match: compute_type_match(ctx, &expr.ty(ctx.db)),
         ..Default::default()
@@ -553,7 +554,7 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
     }
 }
 
-fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<Documentation> {
+fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<Documentation<'_>> {
     use hir::ModuleDef::*;
     match resolution {
         ScopeDef::ModuleDef(Module(it)) => it.docs(db),
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
index 6c89e49..8b14f05 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
@@ -180,7 +180,7 @@ fn symbol_kind(self) -> SymbolKind {
         }
     }
 
-    fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+    fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
         match self {
             Variant::Struct(it) => it.docs(db),
             Variant::EnumVariant(it) => it.docs(db),
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
index 312d3bd..60474a3 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
@@ -108,7 +108,7 @@ fn build_completion(
     label: SmolStr,
     lookup: SmolStr,
     pat: String,
-    def: impl HasDocs + Copy,
+    def: impl HasDocs,
     adt_ty: hir::Type<'_>,
     // Missing in context of match statement completions
     is_variant_missing: bool,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
index 37d0fa1..cfd6340 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
@@ -1,7 +1,7 @@
 //! Code common to structs, unions, and enum variants.
 
 use crate::context::CompletionContext;
-use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind, sym};
+use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind};
 use ide_db::SnippetCap;
 use itertools::Itertools;
 use syntax::SmolStr;
@@ -105,8 +105,8 @@ pub(crate) fn visible_fields(
         .copied()
         .collect::<Vec<_>>();
     let has_invisible_field = n_fields - fields.len() > 0;
-    let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(sym::non_exhaustive).exists()
-        && item.krate(ctx.db) != module.krate();
+    let is_foreign_non_exhaustive =
+        item.attrs(ctx.db).is_non_exhaustive() && item.krate(ctx.db) != module.krate();
     let fields_omitted = has_invisible_field || is_foreign_non_exhaustive;
     Some((fields, fields_omitted))
 }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index 83606d2..cb1adfc 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -160,12 +160,12 @@ pub(crate) fn position(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (RootDatabase, FilePosition) {
     let mut database = RootDatabase::default();
-    let change_fixture = ChangeFixture::parse(&database, ra_fixture);
+    let change_fixture = ChangeFixture::parse(ra_fixture);
     database.enable_proc_attr_macros();
     database.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let offset = range_or_offset.expect_offset();
-    let position = FilePosition { file_id: file_id.file_id(&database), offset };
+    let position = FilePosition { file_id: file_id.file_id(), offset };
     (database, position)
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
index e139a5e..94530bf 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
@@ -780,9 +780,9 @@ fn main() {
 }
 "#,
         expect![[r#"
-            ct SPECIAL_CONST (use dep::test_mod::TestTrait)           u8 DEPRECATED
-            fn weird_function() (use dep::test_mod::TestTrait)      fn() DEPRECATED
             me random_method(…) (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
+            fn weird_function() (use dep::test_mod::TestTrait)      fn() DEPRECATED
+            ct SPECIAL_CONST (use dep::test_mod::TestTrait)           u8 DEPRECATED
         "#]],
     );
 }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
index c031856..0b2be02 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
@@ -211,6 +211,105 @@ fn after_visibility_unsafe() {
 }
 
 #[test]
+fn after_abi() {
+    check_with_base_items(
+        r#"extern "C" $0"#,
+        expect![[r#"
+            kw async
+            kw const
+            kw enum
+            kw fn
+            kw impl
+            kw impl for
+            kw mod
+            kw pub
+            kw pub(crate)
+            kw pub(super)
+            kw static
+            kw struct
+            kw trait
+            kw type
+            kw union
+            kw unsafe
+            kw use
+        "#]],
+    );
+    check_with_base_items(
+        r#"extern "C" f$0"#,
+        expect![[r#"
+            kw async
+            kw const
+            kw enum
+            kw fn
+            kw impl
+            kw impl for
+            kw mod
+            kw pub
+            kw pub(crate)
+            kw pub(super)
+            kw static
+            kw struct
+            kw trait
+            kw type
+            kw union
+            kw unsafe
+            kw use
+        "#]],
+    );
+}
+
+#[test]
+fn after_extern_token() {
+    check_with_base_items(
+        r#"extern $0"#,
+        expect![[r#"
+            kw async
+            kw const
+            kw crate
+            kw enum
+            kw fn
+            kw impl
+            kw impl for
+            kw mod
+            kw pub
+            kw pub(crate)
+            kw pub(super)
+            kw static
+            kw struct
+            kw trait
+            kw type
+            kw union
+            kw unsafe
+            kw use
+        "#]],
+    );
+    check_with_base_items(
+        r#"extern cr$0"#,
+        expect![[r#"
+            kw async
+            kw const
+            kw crate
+            kw enum
+            kw fn
+            kw impl
+            kw impl for
+            kw mod
+            kw pub
+            kw pub(crate)
+            kw pub(super)
+            kw static
+            kw struct
+            kw trait
+            kw type
+            kw union
+            kw unsafe
+            kw use
+        "#]],
+    );
+    check_edit("crate", "extern $0", "extern crate $0;");
+}
+
+#[test]
 fn in_impl_assoc_item_list() {
     check_with_base_items(
         r#"impl Struct { $0 }"#,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index c051fd8..9ce85b2 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -5,8 +5,10 @@
 
 // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
 
+use std::borrow::Cow;
+
 use crate::RootDatabase;
-use crate::documentation::{DocsRangeMap, Documentation, HasDocs};
+use crate::documentation::{Documentation, HasDocs};
 use crate::famous_defs::FamousDefs;
 use arrayvec::ArrayVec;
 use either::Either;
@@ -21,7 +23,7 @@
 use span::Edition;
 use stdx::{format_to, impl_from};
 use syntax::{
-    SyntaxKind, SyntaxNode, SyntaxToken, TextSize,
+    SyntaxKind, SyntaxNode, SyntaxToken,
     ast::{self, AstNode},
     match_ast,
 };
@@ -199,21 +201,25 @@ pub fn name(&self, db: &RootDatabase) -> Option<Name> {
         Some(name)
     }
 
-    pub fn docs(
+    pub fn docs<'db>(
         &self,
-        db: &RootDatabase,
+        db: &'db RootDatabase,
         famous_defs: Option<&FamousDefs<'_, '_>>,
         display_target: DisplayTarget,
-    ) -> Option<Documentation> {
-        self.docs_with_rangemap(db, famous_defs, display_target).map(|(docs, _)| docs)
+    ) -> Option<Documentation<'db>> {
+        self.docs_with_rangemap(db, famous_defs, display_target).map(|docs| match docs {
+            Either::Left(Cow::Borrowed(docs)) => Documentation::new_borrowed(docs.docs()),
+            Either::Left(Cow::Owned(docs)) => Documentation::new_owned(docs.into_docs()),
+            Either::Right(docs) => docs,
+        })
     }
 
-    pub fn docs_with_rangemap(
+    pub fn docs_with_rangemap<'db>(
         &self,
-        db: &RootDatabase,
+        db: &'db RootDatabase,
         famous_defs: Option<&FamousDefs<'_, '_>>,
         display_target: DisplayTarget,
-    ) -> Option<(Documentation, Option<DocsRangeMap>)> {
+    ) -> Option<Either<Cow<'db, hir::Docs>, Documentation<'db>>> {
         let docs = match self {
             Definition::Macro(it) => it.docs_with_rangemap(db),
             Definition::Field(it) => it.docs_with_rangemap(db),
@@ -229,15 +235,13 @@ pub fn docs_with_rangemap(
                 it.docs_with_rangemap(db).or_else(|| {
                     // docs are missing, try to fall back to the docs of the aliased item.
                     let adt = it.ty(db).as_adt()?;
-                    let (docs, range_map) = adt.docs_with_rangemap(db)?;
+                    let mut docs = adt.docs_with_rangemap(db)?.into_owned();
                     let header_docs = format!(
                         "*This is the documentation for* `{}`\n\n",
                         adt.display(db, display_target)
                     );
-                    let offset = TextSize::new(header_docs.len() as u32);
-                    let range_map = range_map.shift_docstring_line_range(offset);
-                    let docs = header_docs + docs.as_str();
-                    Some((Documentation::new(docs), range_map))
+                    docs.prepend_str(&header_docs);
+                    Some(Cow::Owned(docs))
                 })
             }
             Definition::BuiltinType(it) => {
@@ -246,7 +250,7 @@ pub fn docs_with_rangemap(
                     let primitive_mod =
                         format!("prim_{}", it.name().display(fd.0.db, display_target.edition));
                     let doc_owner = find_std_module(fd, &primitive_mod, display_target.edition)?;
-                    doc_owner.docs_with_rangemap(fd.0.db)
+                    doc_owner.docs_with_rangemap(db)
                 })
             }
             Definition::BuiltinLifetime(StaticLifetime) => None,
@@ -282,7 +286,7 @@ pub fn docs_with_rangemap(
                     );
                 }
 
-                return Some((Documentation::new(docs.replace('*', "\\*")), None));
+                return Some(Either::Right(Documentation::new_owned(docs.replace('*', "\\*"))));
             }
             Definition::ToolModule(_) => None,
             Definition::DeriveHelper(_) => None,
@@ -299,7 +303,7 @@ pub fn docs_with_rangemap(
             let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
             item.docs_with_rangemap(db)
         })
-        .map(|(docs, range_map)| (docs, Some(range_map)))
+        .map(Either::Left)
     }
 
     pub fn label(&self, db: &RootDatabase, display_target: DisplayTarget) -> String {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs
index cab19aa..4c4691c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs
@@ -1,337 +1,100 @@
 //! Documentation attribute related utilities.
-use either::Either;
-use hir::{
-    AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile,
-    db::{DefDatabase, HirDatabase},
-    resolve_doc_path_on, sym,
-};
-use itertools::Itertools;
-use span::{TextRange, TextSize};
-use syntax::{
-    AstToken,
-    ast::{self, IsString},
-};
+use std::borrow::Cow;
+
+use hir::{HasAttrs, db::HirDatabase, resolve_doc_path_on};
 
 /// Holds documentation
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Documentation(String);
+pub struct Documentation<'db>(Cow<'db, str>);
 
-impl Documentation {
-    pub fn new(s: String) -> Self {
-        Documentation(s)
+impl<'db> Documentation<'db> {
+    #[inline]
+    pub fn new_owned(s: String) -> Self {
+        Documentation(Cow::Owned(s))
     }
 
+    #[inline]
+    pub fn new_borrowed(s: &'db str) -> Self {
+        Documentation(Cow::Borrowed(s))
+    }
+
+    #[inline]
+    pub fn into_owned(self) -> Documentation<'static> {
+        Documentation::new_owned(self.0.into_owned())
+    }
+
+    #[inline]
     pub fn as_str(&self) -> &str {
         &self.0
     }
 }
 
-impl From<Documentation> for String {
-    fn from(Documentation(string): Documentation) -> Self {
-        string
+pub trait HasDocs: HasAttrs + Copy {
+    fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
+        let docs = match self.docs_with_rangemap(db)? {
+            Cow::Borrowed(docs) => Documentation::new_borrowed(docs.docs()),
+            Cow::Owned(docs) => Documentation::new_owned(docs.into_docs()),
+        };
+        Some(docs)
     }
-}
-
-pub trait HasDocs: HasAttrs {
-    fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
-    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)>;
-    fn resolve_doc_path(
-        self,
-        db: &dyn HirDatabase,
-        link: &str,
-        ns: Option<hir::Namespace>,
-        is_inner_doc: bool,
-    ) -> Option<hir::DocLinkDef>;
-}
-/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
-#[derive(Debug)]
-pub struct DocsRangeMap {
-    source_map: AttrSourceMap,
-    // (docstring-line-range, attr_index, attr-string-range)
-    // a mapping from the text range of a line of the [`Documentation`] to the attribute index and
-    // the original (untrimmed) syntax doc line
-    mapping: Vec<(TextRange, AttrId, TextRange)>,
-}
-
-impl DocsRangeMap {
-    /// Maps a [`TextRange`] relative to the documentation string back to its AST range
-    pub fn map(&self, range: TextRange) -> Option<(InFile<TextRange>, AttrId)> {
-        let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
-        let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
-        if !line_docs_range.contains_range(range) {
-            return None;
-        }
-
-        let relative_range = range - line_docs_range.start();
-
-        let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
-        match source {
-            Either::Left(attr) => {
-                let string = get_doc_string_in_attr(attr)?;
-                let text_range = string.open_quote_text_range()?;
-                let range = TextRange::at(
-                    text_range.end() + original_line_src_range.start() + relative_range.start(),
-                    string.syntax().text_range().len().min(range.len()),
-                );
-                Some((InFile { file_id, value: range }, idx))
-            }
-            Either::Right(comment) => {
-                let text_range = comment.syntax().text_range();
-                let range = TextRange::at(
-                    text_range.start()
-                        + TextSize::try_from(comment.prefix().len()).ok()?
-                        + original_line_src_range.start()
-                        + relative_range.start(),
-                    text_range.len().min(range.len()),
-                );
-                Some((InFile { file_id, value: range }, idx))
-            }
-        }
-    }
-
-    pub fn shift_docstring_line_range(self, offset: TextSize) -> DocsRangeMap {
-        let mapping = self
-            .mapping
-            .into_iter()
-            .map(|(buf_offset, id, base_offset)| {
-                let buf_offset = buf_offset.checked_add(offset).unwrap();
-                (buf_offset, id, base_offset)
-            })
-            .collect_vec();
-        DocsRangeMap { source_map: self.source_map, mapping }
-    }
-}
-
-pub fn docs_with_rangemap(
-    db: &dyn DefDatabase,
-    attrs: &AttrsWithOwner,
-) -> Option<(Documentation, DocsRangeMap)> {
-    let docs = attrs
-        .by_key(sym::doc)
-        .attrs()
-        .filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id)));
-    let indent = doc_indent(attrs);
-    let mut buf = String::new();
-    let mut mapping = Vec::new();
-    for (doc, idx) in docs {
-        if !doc.is_empty() {
-            let mut base_offset = 0;
-            for raw_line in doc.split('\n') {
-                let line = raw_line.trim_end();
-                let line_len = line.len();
-                let (offset, line) = match line.char_indices().nth(indent) {
-                    Some((offset, _)) => (offset, &line[offset..]),
-                    None => (0, line),
-                };
-                let buf_offset = buf.len();
-                buf.push_str(line);
-                mapping.push((
-                    TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
-                    idx,
-                    TextRange::at(
-                        (base_offset + offset).try_into().ok()?,
-                        line_len.try_into().ok()?,
-                    ),
-                ));
-                buf.push('\n');
-                base_offset += raw_line.len() + 1;
-            }
-        } else {
-            buf.push('\n');
-        }
-    }
-    buf.pop();
-    if buf.is_empty() {
-        None
-    } else {
-        Some((Documentation(buf), DocsRangeMap { mapping, source_map: attrs.source_map(db) }))
-    }
-}
-
-pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
-    let docs = attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape());
-    let indent = doc_indent(attrs);
-    let mut buf = String::new();
-    for doc in docs {
-        // str::lines doesn't yield anything for the empty string
-        if !doc.is_empty() {
-            // We don't trim trailing whitespace from doc comments as multiple trailing spaces
-            // indicates a hard line break in Markdown.
-            let lines = doc.lines().map(|line| {
-                line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..])
-            });
-
-            buf.extend(Itertools::intersperse(lines, "\n"));
-        }
-        buf.push('\n');
-    }
-    buf.pop();
-    if buf.is_empty() { None } else { Some(buf) }
-}
-
-macro_rules! impl_has_docs {
-    ($($def:ident,)*) => {$(
-        impl HasDocs for hir::$def {
-            fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
-                docs_from_attrs(&self.attrs(db)).map(Documentation)
-            }
-            fn docs_with_rangemap(
-                self,
-                db: &dyn HirDatabase,
-            ) -> Option<(Documentation, DocsRangeMap)> {
-                docs_with_rangemap(db, &self.attrs(db))
-            }
-            fn resolve_doc_path(
-                self,
-                db: &dyn HirDatabase,
-                link: &str,
-                ns: Option<hir::Namespace>,
-                is_inner_doc: bool,
-            ) -> Option<hir::DocLinkDef> {
-                resolve_doc_path_on(db, self, link, ns, is_inner_doc)
-            }
-        }
-    )*};
-}
-
-impl_has_docs![
-    Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate,
-];
-
-macro_rules! impl_has_docs_enum {
-    ($($variant:ident),* for $enum:ident) => {$(
-        impl HasDocs for hir::$variant {
-            fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
-                hir::$enum::$variant(self).docs(db)
-            }
-
-            fn docs_with_rangemap(
-                self,
-                db: &dyn HirDatabase,
-            ) -> Option<(Documentation, DocsRangeMap)> {
-                hir::$enum::$variant(self).docs_with_rangemap(db)
-            }
-            fn resolve_doc_path(
-                self,
-                db: &dyn HirDatabase,
-                link: &str,
-                ns: Option<hir::Namespace>,
-                is_inner_doc: bool,
-            ) -> Option<hir::DocLinkDef> {
-                hir::$enum::$variant(self).resolve_doc_path(db, link, ns, is_inner_doc)
-            }
-        }
-    )*};
-}
-
-impl_has_docs_enum![Struct, Union, Enum for Adt];
-
-impl HasDocs for hir::AssocItem {
-    fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
-        match self {
-            hir::AssocItem::Function(it) => it.docs(db),
-            hir::AssocItem::Const(it) => it.docs(db),
-            hir::AssocItem::TypeAlias(it) => it.docs(db),
-        }
-    }
-
-    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
-        match self {
-            hir::AssocItem::Function(it) => it.docs_with_rangemap(db),
-            hir::AssocItem::Const(it) => it.docs_with_rangemap(db),
-            hir::AssocItem::TypeAlias(it) => it.docs_with_rangemap(db),
-        }
-    }
-
-    fn resolve_doc_path(
-        self,
-        db: &dyn HirDatabase,
-        link: &str,
-        ns: Option<hir::Namespace>,
-        is_inner_doc: bool,
-    ) -> Option<hir::DocLinkDef> {
-        match self {
-            hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
-            hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
-            hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
-        }
-    }
-}
-
-impl HasDocs for hir::ExternCrateDecl {
-    fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
-        let crate_docs = docs_from_attrs(&self.resolved_crate(db)?.root_module().attrs(db));
-        let decl_docs = docs_from_attrs(&self.attrs(db));
-        match (decl_docs, crate_docs) {
-            (None, None) => None,
-            (Some(decl_docs), None) => Some(decl_docs),
-            (None, Some(crate_docs)) => Some(crate_docs),
-            (Some(mut decl_docs), Some(crate_docs)) => {
-                decl_docs.push('\n');
-                decl_docs.push('\n');
-                decl_docs += &crate_docs;
-                Some(decl_docs)
-            }
-        }
-        .map(Documentation::new)
-    }
-
-    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
-        let crate_docs = docs_with_rangemap(db, &self.resolved_crate(db)?.root_module().attrs(db));
-        let decl_docs = docs_with_rangemap(db, &self.attrs(db));
-        match (decl_docs, crate_docs) {
-            (None, None) => None,
-            (Some(decl_docs), None) => Some(decl_docs),
-            (None, Some(crate_docs)) => Some(crate_docs),
-            (
-                Some((Documentation(mut decl_docs), mut decl_range_map)),
-                Some((Documentation(crate_docs), crate_range_map)),
-            ) => {
-                decl_docs.push('\n');
-                decl_docs.push('\n');
-                let offset = TextSize::new(decl_docs.len() as u32);
-                decl_docs += &crate_docs;
-                let crate_range_map = crate_range_map.shift_docstring_line_range(offset);
-                decl_range_map.mapping.extend(crate_range_map.mapping);
-                Some((Documentation(decl_docs), decl_range_map))
-            }
-        }
+    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<Cow<'_, hir::Docs>> {
+        self.hir_docs(db).map(Cow::Borrowed)
     }
     fn resolve_doc_path(
         self,
         db: &dyn HirDatabase,
         link: &str,
         ns: Option<hir::Namespace>,
-        is_inner_doc: bool,
+        is_inner_doc: hir::IsInnerDoc,
     ) -> Option<hir::DocLinkDef> {
         resolve_doc_path_on(db, self, link, ns, is_inner_doc)
     }
 }
 
-fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
-    match it.expr() {
-        // #[doc = lit]
-        Some(ast::Expr::Literal(lit)) => match lit.kind() {
-            ast::LiteralKind::String(it) => Some(it),
-            _ => None,
-        },
-        // #[cfg_attr(..., doc = "", ...)]
-        None => {
-            // FIXME: See highlight injection for what to do here
-            None
-        }
-        _ => None,
-    }
+macro_rules! impl_has_docs {
+    ($($def:ident,)*) => {$(
+        impl HasDocs for hir::$def {}
+    )*};
 }
 
-fn doc_indent(attrs: &hir::Attrs) -> usize {
-    let mut min = !0;
-    for val in attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) {
-        if let Some(m) =
-            val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min()
-        {
-            min = min.min(m);
+impl_has_docs![
+    Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate,
+    AssocItem, Struct, Union, Enum,
+];
+
+impl HasDocs for hir::ExternCrateDecl {
+    fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
+        let crate_docs = self.resolved_crate(db)?.hir_docs(db);
+        let decl_docs = self.hir_docs(db);
+        match (decl_docs, crate_docs) {
+            (None, None) => None,
+            (Some(docs), None) | (None, Some(docs)) => {
+                Some(Documentation::new_borrowed(docs.docs()))
+            }
+            (Some(decl_docs), Some(crate_docs)) => {
+                let mut docs = String::with_capacity(
+                    decl_docs.docs().len() + "\n\n".len() + crate_docs.docs().len(),
+                );
+                docs.push_str(decl_docs.docs());
+                docs.push_str("\n\n");
+                docs.push_str(crate_docs.docs());
+                Some(Documentation::new_owned(docs))
+            }
         }
     }
-    min
+
+    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<Cow<'_, hir::Docs>> {
+        let crate_docs = self.resolved_crate(db)?.hir_docs(db);
+        let decl_docs = self.hir_docs(db);
+        match (decl_docs, crate_docs) {
+            (None, None) => None,
+            (Some(docs), None) | (None, Some(docs)) => Some(Cow::Borrowed(docs)),
+            (Some(decl_docs), Some(crate_docs)) => {
+                let mut docs = decl_docs.clone();
+                docs.append_str("\n\n");
+                docs.append(crate_docs);
+                Some(Cow::Owned(docs))
+            }
+        }
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs b/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs
index a9d596d..c9a670b 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs
@@ -25,18 +25,14 @@ fn from_ra_fixture(
         // We don't want a mistake in the fixture to crash r-a, so we wrap this in `catch_unwind()`.
         std::panic::catch_unwind(|| {
             let mut db = RootDatabase::default();
-            let fixture = test_fixture::ChangeFixture::parse_with_proc_macros(
-                &db,
-                text,
-                minicore.0,
-                Vec::new(),
-            );
+            let fixture =
+                test_fixture::ChangeFixture::parse_with_proc_macros(text, minicore.0, Vec::new());
             db.apply_change(fixture.change);
             let files = fixture
                 .files
                 .into_iter()
                 .zip(fixture.file_lines)
-                .map(|(file_id, range)| (file_id.file_id(&db), range))
+                .map(|(file_id, range)| (file_id.file_id(), range))
                 .collect();
             (db, files, fixture.sysroot_files)
         })
@@ -526,7 +522,7 @@ fn upmap_from_ra_fixture(
     String,
     Symbol,
     SmolStr,
-    Documentation,
+    Documentation<'_>,
     SymbolKind,
     CfgExpr,
     ReferenceCategory,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
index eacd9b9..36a6938 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
@@ -33,7 +33,7 @@ pub fn is_rust_fence(s: &str) -> bool {
 
 const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
 
-pub fn format_docs(src: &Documentation) -> String {
+pub fn format_docs(src: &Documentation<'_>) -> String {
     format_docs_(src.as_str())
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index 018c841..f52b345 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -12,7 +12,7 @@
 use hir::{
     Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs,
     HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer,
-    ModuleSource, PathResolution, Semantics, Visibility, sym,
+    ModuleSource, PathResolution, Semantics, Visibility,
 };
 use memchr::memmem::Finder;
 use parser::SyntaxKind;
@@ -169,7 +169,7 @@ fn crate_graph(db: &RootDatabase) -> SearchScope {
             entries.extend(
                 source_root
                     .iter()
-                    .map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)),
+                    .map(|id| (EditionedFileId::new(db, id, crate_data.edition, krate), None)),
             );
         }
         SearchScope { entries }
@@ -183,11 +183,9 @@ fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
 
             let source_root = db.file_source_root(root_file).source_root_id(db);
             let source_root = db.source_root(source_root).source_root(db);
-            entries.extend(
-                source_root
-                    .iter()
-                    .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)),
-            );
+            entries.extend(source_root.iter().map(|id| {
+                (EditionedFileId::new(db, id, rev_dep.edition(db), rev_dep.into()), None)
+            }));
         }
         SearchScope { entries }
     }
@@ -201,7 +199,7 @@ fn krate(db: &RootDatabase, of: hir::Crate) -> SearchScope {
         SearchScope {
             entries: source_root
                 .iter()
-                .map(|id| (EditionedFileId::new(db, id, of.edition(db)), None))
+                .map(|id| (EditionedFileId::new(db, id, of.edition(db), of.into()), None))
                 .collect(),
         }
     }
@@ -368,7 +366,7 @@ fn search_scope(&self, db: &RootDatabase) -> SearchScope {
         if let Definition::Macro(macro_def) = self {
             return match macro_def.kind(db) {
                 hir::MacroKind::Declarative => {
-                    if macro_def.attrs(db).by_key(sym::macro_export).exists() {
+                    if macro_def.attrs(db).is_macro_export() {
                         SearchScope::reverse_dependencies(db, module.krate())
                     } else {
                         SearchScope::krate(db, module.krate())
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
index 30d1df4..427a510 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -3,7 +3,7 @@
         Module {
             id: ModuleId {
                 krate: Crate(
-                    Id(3000),
+                    Id(2c00),
                 ),
                 block: None,
                 local_id: Idx::<ModuleData>(0),
@@ -16,7 +16,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3401,
+                                3801,
                             ),
                         },
                     ),
@@ -24,7 +24,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -50,7 +50,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3400,
+                                3800,
                             ),
                         },
                     ),
@@ -58,7 +58,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -84,7 +84,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3400,
+                                3800,
                             ),
                         },
                     ),
@@ -92,7 +92,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -118,7 +118,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3400,
+                                3800,
                             ),
                         },
                     ),
@@ -126,7 +126,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -152,7 +152,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3400,
+                                3800,
                             ),
                         },
                     ),
@@ -160,7 +160,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -186,7 +186,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3401,
+                                3801,
                             ),
                         },
                     ),
@@ -194,7 +194,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -220,7 +220,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3400,
+                                3800,
                             ),
                         },
                     ),
@@ -228,7 +228,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 5ef0ecb..cc13019 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -3,7 +3,7 @@
         Module {
             id: ModuleId {
                 krate: Crate(
-                    Id(3000),
+                    Id(2c00),
                 ),
                 block: None,
                 local_id: Idx::<ModuleData>(0),
@@ -22,7 +22,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -49,14 +49,14 @@
                 def: TypeAlias(
                     TypeAlias {
                         id: TypeAliasId(
-                            6800,
+                            6c00,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -88,7 +88,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -115,14 +115,14 @@
                 def: Const(
                     Const {
                         id: ConstId(
-                            6000,
+                            6400,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -147,14 +147,14 @@
                 def: Const(
                     Const {
                         id: ConstId(
-                            6002,
+                            6402,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -180,7 +180,7 @@
                     Enum(
                         Enum {
                             id: EnumId(
-                                4c00,
+                                5000,
                             ),
                         },
                     ),
@@ -188,7 +188,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -214,7 +214,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4800,
+                                4c00,
                             ),
                         ),
                     },
@@ -222,7 +222,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -248,7 +248,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4800,
+                                4c00,
                             ),
                         ),
                     },
@@ -256,7 +256,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -281,14 +281,14 @@
                 def: Static(
                     Static {
                         id: StaticId(
-                            6400,
+                            6800,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -314,7 +314,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4401,
+                                4801,
                             ),
                         },
                     ),
@@ -322,7 +322,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -348,7 +348,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4400,
+                                4800,
                             ),
                         },
                     ),
@@ -356,7 +356,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: MacroFile(
                         MacroCallId(
-                            Id(3c00),
+                            Id(4000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -382,7 +382,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4405,
+                                4805,
                             ),
                         },
                     ),
@@ -390,7 +390,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -418,7 +418,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4406,
+                                4806,
                             ),
                         },
                     ),
@@ -426,7 +426,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -454,7 +454,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4407,
+                                4807,
                             ),
                         },
                     ),
@@ -462,7 +462,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -488,7 +488,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4402,
+                                4802,
                             ),
                         },
                     ),
@@ -496,7 +496,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -521,14 +521,14 @@
                 def: Trait(
                     Trait {
                         id: TraitId(
-                            5800,
+                            5c00,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -554,7 +554,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4800,
+                                4c00,
                             ),
                         ),
                     },
@@ -562,7 +562,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -588,7 +588,7 @@
                     Union(
                         Union {
                             id: UnionId(
-                                5000,
+                                5400,
                             ),
                         },
                     ),
@@ -596,7 +596,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -622,7 +622,7 @@
                     Module {
                         id: ModuleId {
                             krate: Crate(
-                                Id(3000),
+                                Id(2c00),
                             ),
                             block: None,
                             local_id: Idx::<ModuleData>(1),
@@ -632,7 +632,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -658,7 +658,7 @@
                     Module {
                         id: ModuleId {
                             krate: Crate(
-                                Id(3000),
+                                Id(2c00),
                             ),
                             block: None,
                             local_id: Idx::<ModuleData>(2),
@@ -668,7 +668,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -694,7 +694,7 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                3801,
+                                3c01,
                             ),
                         ),
                     },
@@ -702,7 +702,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -727,14 +727,14 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            5c02,
+                            6002,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -761,14 +761,14 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            5c01,
+                            6001,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -796,7 +796,7 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                3800,
+                                3c00,
                             ),
                         ),
                     },
@@ -804,7 +804,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -829,14 +829,14 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            5c00,
+                            6000,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -862,7 +862,7 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                3801,
+                                3c01,
                             ),
                         ),
                     },
@@ -870,7 +870,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -895,14 +895,14 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            5c03,
+                            6003,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -930,7 +930,7 @@
         Module {
             id: ModuleId {
                 krate: Crate(
-                    Id(3000),
+                    Id(2c00),
                 ),
                 block: None,
                 local_id: Idx::<ModuleData>(1),
@@ -943,7 +943,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4403,
+                                4803,
                             ),
                         },
                     ),
@@ -951,7 +951,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2000),
+                            Id(3000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -977,7 +977,7 @@
         Module {
             id: ModuleId {
                 krate: Crate(
-                    Id(3000),
+                    Id(2c00),
                 ),
                 block: None,
                 local_id: Idx::<ModuleData>(2),
@@ -989,14 +989,14 @@
                 def: Trait(
                     Trait {
                         id: TraitId(
-                            5800,
+                            5c00,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2001),
+                            Id(3001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -1022,7 +1022,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4800,
+                                4c00,
                             ),
                         ),
                     },
@@ -1030,7 +1030,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2001),
+                            Id(3001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -1056,7 +1056,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4404,
+                                4804,
                             ),
                         },
                     ),
@@ -1064,7 +1064,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2001),
+                            Id(3001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -1090,7 +1090,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4800,
+                                4c00,
                             ),
                         ),
                     },
@@ -1098,7 +1098,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2001),
+                            Id(3001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -1124,7 +1124,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4404,
+                                4804,
                             ),
                         },
                     ),
@@ -1132,7 +1132,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(2001),
+                            Id(3001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
index 22872b5..3ab837a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
@@ -13,7 +13,7 @@
         loc: DeclarationLocation {
             hir_file_id: FileId(
                 EditionedFileId(
-                    Id(2001),
+                    Id(3001),
                 ),
             ),
             ptr: SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt
index 9f98bf8..a6a808d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt
@@ -13,7 +13,7 @@
         loc: DeclarationLocation {
             hir_file_id: FileId(
                 EditionedFileId(
-                    Id(2001),
+                    Id(3001),
                 ),
             ),
             ptr: SyntaxNodePtr {
@@ -47,7 +47,7 @@
         loc: DeclarationLocation {
             hir_file_id: FileId(
                 EditionedFileId(
-                    Id(2000),
+                    Id(3000),
                 ),
             ),
             ptr: SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
index 61e2838..7b9fdb1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
@@ -114,8 +114,7 @@ fn assoc_item_of_trait(
 #[cfg(test)]
 mod tests {
     use expect_test::{Expect, expect};
-    use hir::FilePosition;
-    use hir::Semantics;
+    use hir::{EditionedFileId, FilePosition, Semantics};
     use span::Edition;
     use syntax::ast::{self, AstNode};
     use test_fixture::ChangeFixture;
@@ -127,10 +126,11 @@ pub(crate) fn position(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (RootDatabase, FilePosition) {
         let mut database = RootDatabase::default();
-        let change_fixture = ChangeFixture::parse(&database, ra_fixture);
+        let change_fixture = ChangeFixture::parse(ra_fixture);
         database.apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ($0)");
+        let file_id = EditionedFileId::from_span_guess_origin(&database, file_id);
         let offset = range_or_offset.expect_offset();
         (database, FilePosition { file_id, offset })
     }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
index 8611ef6..dfa9639 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -95,7 +95,7 @@ fn inactive_item() {
   //^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
 
     #[cfg(no)] #[cfg(no2)] mod m;
-  //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no and no2 are disabled
+  //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
 
     #[cfg(all(not(a), b))] enum E {}
   //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: b is disabled
@@ -130,7 +130,6 @@ trait Bar {
     /// Tests that `cfg` attributes behind `cfg_attr` is handled properly.
     #[test]
     fn inactive_via_cfg_attr() {
-        cov_mark::check!(cfg_attr_active);
         check(
             r#"
     #[cfg_attr(not(never), cfg(no))] fn f() {}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
index 8b708f2..9aa7aed 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target(
     ctx: &DiagnosticsContext<'_>,
     d: &hir::InvalidDeriveTarget,
 ) -> Diagnostic {
-    let display_range = ctx.sema.diagnostics_display_range(d.node);
+    let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
 
     Diagnostic::new(
         DiagnosticCode::RustcHardError("E0774"),
@@ -29,7 +29,7 @@ fn fails_on_function() {
 //- minicore:derive
 mod __ {
     #[derive()]
-  //^^^^^^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
+   // ^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
     fn main() {}
 }
             "#,
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
index 6a1ecae..a44b043 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -13,7 +13,7 @@
 // This diagnostic is shown for proc macros that have been specifically disabled via `rust-analyzer.procMacro.ignored`.
 pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic {
     // Use more accurate position if available.
-    let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
+    let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
     Diagnostic::new(
         DiagnosticCode::Ra(d.kind, if d.error { Severity::Error } else { Severity::WeakWarning }),
         d.message.clone(),
@@ -27,8 +27,10 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) ->
 // This diagnostic is shown for macro expansion errors.
 pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefError) -> Diagnostic {
     // Use more accurate position if available.
-    let display_range =
-        ctx.resolve_precise_location(&d.node.map(|it| it.syntax_node_ptr()), d.name);
+    let display_range = match d.name {
+        Some(name) => ctx.sema.diagnostics_display_range_for_range(d.node.with_value(name)),
+        None => ctx.sema.diagnostics_display_range(d.node.map(|it| it.syntax_node_ptr())),
+    };
     Diagnostic::new(
         DiagnosticCode::Ra("macro-def-error", Severity::Error),
         d.message.clone(),
@@ -135,10 +137,12 @@ macro_rules! env { () => {} }
 #[rustc_builtin_macro]
 macro_rules! concat { () => {} }
 
-  include!(concat!(env!("OUT_DIR"), "/out.rs"));
-                      //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
-                 //^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
-         //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+  include!(concat!(
+        // ^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+    env!(
+  //^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+        "OUT_DIR"), "/out.rs"));
+      //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
 "#,
         );
     }
@@ -182,7 +186,7 @@ fn main() {
            //^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
 
     include!(concat!("does ", "not ", "exist"));
-                  //^^^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
+                  // ^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
 
     env!(invalid);
        //^^^^^^^ error: expected string literal
@@ -289,7 +293,7 @@ fn include_does_not_break_diagnostics() {
 //- /include-me.rs
 /// long doc that pushes the diagnostic range beyond the first file's text length
   #[err]
-//^^^^^^error: unresolved macro `err`
+ // ^^^ error: unresolved macro `err`
 mod prim_never {}
 "#,
         );
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
index 701b30b..7d0c71f 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -7,7 +7,7 @@ pub(crate) fn malformed_derive(
     ctx: &DiagnosticsContext<'_>,
     d: &hir::MalformedDerive,
 ) -> Diagnostic {
-    let display_range = ctx.sema.diagnostics_display_range(d.node);
+    let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
 
     Diagnostic::new(
         DiagnosticCode::RustcHardError("E0777"),
@@ -28,7 +28,7 @@ fn invalid_input() {
 //- minicore:derive
 mod __ {
     #[derive = "aaaa"]
-  //^^^^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`
+   // ^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`
     struct Foo;
 }
             "#,
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index df1cd76..59215f3 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -1094,4 +1094,19 @@ fn main() {
             "#,
         );
     }
+
+    #[test]
+    fn multiple_target_feature_enable() {
+        check_diagnostics(
+            r#"
+#[target_feature(enable = "avx2,fma")]
+fn foo() {}
+
+#[target_feature(enable = "avx2", enable = "fma")]
+fn bar() {
+    foo();
+}
+        "#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
index a87b8c4..030c82c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -8,8 +8,7 @@ pub(crate) fn unresolved_macro_call(
     ctx: &DiagnosticsContext<'_>,
     d: &hir::UnresolvedMacroCall,
 ) -> Diagnostic {
-    // Use more accurate position if available.
-    let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location);
+    let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
     let bang = if d.is_bang { "!" } else { "" };
     Diagnostic::new(
         DiagnosticCode::RustcHardError("unresolved-macro-call"),
@@ -76,7 +75,7 @@ fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
             r#"
     mod _test_inner {
         #![empty_attr]
-      //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
+        // ^^^^^^^^^^ error: unresolved macro `empty_attr`
     }
 "#,
         );
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index 1530e64..41ae854 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -90,7 +90,7 @@ mod handlers {
     Crate, DisplayTarget, InFile, Semantics, db::ExpandDatabase, diagnostics::AnyDiagnostic,
 };
 use ide_db::{
-    EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
+    FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
     assists::{Assist, AssistId, AssistResolveStrategy, ExprFillDefaultMode},
     base_db::{ReleaseChannel, RootQueryDb as _},
     generated::lints::{CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS, DEFAULT_LINTS, Lint, LintGroup},
@@ -102,7 +102,7 @@ mod handlers {
 use itertools::Itertools;
 use syntax::{
     AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, T, TextRange,
-    ast::{self, AstNode, HasAttrs},
+    ast::{self, AstNode},
 };
 
 // FIXME: Make this an enum
@@ -277,31 +277,6 @@ struct DiagnosticsContext<'a> {
     is_nightly: bool,
 }
 
-impl DiagnosticsContext<'_> {
-    fn resolve_precise_location(
-        &self,
-        node: &InFile<SyntaxNodePtr>,
-        precise_location: Option<TextRange>,
-    ) -> FileRange {
-        let sema = &self.sema;
-        (|| {
-            let precise_location = precise_location?;
-            let root = sema.parse_or_expand(node.file_id);
-            match root.covering_element(precise_location) {
-                syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)),
-                syntax::NodeOrToken::Token(it) => {
-                    node.with_value(it).original_file_range_opt(sema.db)
-                }
-            }
-        })()
-        .map(|frange| ide_db::FileRange {
-            file_id: frange.file_id.file_id(self.sema.db),
-            range: frange.range,
-        })
-        .unwrap_or_else(|| sema.diagnostics_display_range(*node))
-    }
-}
-
 /// Request parser level diagnostics for the given [`FileId`].
 pub fn syntax_diagnostics(
     db: &RootDatabase,
@@ -315,9 +290,7 @@ pub fn syntax_diagnostics(
     }
 
     let sema = Semantics::new(db);
-    let editioned_file_id = sema
-        .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+    let editioned_file_id = sema.attach_first_edition(file_id);
 
     let (file_id, _) = editioned_file_id.unpack(db);
 
@@ -346,9 +319,7 @@ pub fn semantic_diagnostics(
 ) -> Vec<Diagnostic> {
     let _p = tracing::info_span!("semantic_diagnostics").entered();
     let sema = Semantics::new(db);
-    let editioned_file_id = sema
-        .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+    let editioned_file_id = sema.attach_first_edition(file_id);
 
     let (file_id, edition) = editioned_file_id.unpack(db);
     let mut res = Vec::new();
@@ -426,7 +397,7 @@ pub fn semantic_diagnostics(
                         Diagnostic::new(
                             DiagnosticCode::SyntaxError,
                             format!("Syntax Error in Expansion: {err}"),
-                            ctx.resolve_precise_location(&d.node.clone(), d.precise_location),
+                            ctx.sema.diagnostics_display_range_for_range(d.range),
                         )
                 }));
                 continue;
@@ -677,7 +648,7 @@ fn find_outline_mod_lint_severity(
     let lint_groups = lint_groups(&diag.code, edition);
     lint_attrs(
         sema,
-        ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
+        &ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
         edition,
     )
     .for_each(|(lint, severity)| {
@@ -698,7 +669,7 @@ fn lint_severity_at(
         .ancestors()
         .filter_map(ast::AnyHasAttrs::cast)
         .find_map(|ancestor| {
-            lint_attrs(sema, ancestor, edition)
+            lint_attrs(sema, &ancestor, edition)
                 .find_map(|(lint, severity)| lint_groups.contains(&lint).then_some(severity))
         })
         .or_else(|| {
@@ -706,13 +677,13 @@ fn lint_severity_at(
         })
 }
 
+// FIXME: Switch this to analysis' `expand_cfg_attr`.
 fn lint_attrs<'a>(
     sema: &'a Semantics<'a, RootDatabase>,
-    ancestor: ast::AnyHasAttrs,
+    ancestor: &'a ast::AnyHasAttrs,
     edition: Edition,
 ) -> impl Iterator<Item = (SmolStr, Severity)> + 'a {
-    ancestor
-        .attrs_including_inner()
+    ast::attrs_including_inner(ancestor)
         .filter_map(|attr| {
             attr.as_simple_call().and_then(|(name, value)| match &*name {
                 "allow" | "expect" => Some(Either::Left(iter::once((Severity::Allow, value)))),
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
index 181cc74..de26879 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
@@ -17,7 +17,7 @@ pub fn ssr_from_comment(
     frange: FileRange,
 ) -> Option<(MatchFinder<'_>, TextRange)> {
     let comment = {
-        let file_id = EditionedFileId::current_edition(db, frange.file_id);
+        let file_id = EditionedFileId::current_edition_guess_origin(db, frange.file_id);
 
         let file = db.parse(file_id);
         file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
index 66ece4e..977dfb7 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
@@ -125,9 +125,7 @@ pub fn in_context(
     ) -> Result<MatchFinder<'db>, SsrError> {
         restrict_ranges.retain(|range| !range.range.is_empty());
         let sema = Semantics::new(db);
-        let file_id = sema
-            .attach_first_edition(lookup_context.file_id)
-            .unwrap_or_else(|| EditionedFileId::current_edition(db, lookup_context.file_id));
+        let file_id = sema.attach_first_edition(lookup_context.file_id);
         let resolution_scope = resolving::ResolutionScope::new(
             &sema,
             hir::FilePosition { file_id, offset: lookup_context.offset },
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
index 56484ae..5f54c66 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
@@ -6,7 +6,7 @@
 };
 use hir::FileRange;
 use ide_db::{
-    EditionedFileId, FileId, FxHashSet,
+    FileId, FxHashSet,
     defs::Definition,
     search::{SearchScope, UsageSearchResult},
     symbol_index::LocalRoots,
@@ -136,11 +136,7 @@ fn search_scope(&self) -> SearchScope {
         // seems to get put into a single source root.
         let mut files = Vec::new();
         self.search_files_do(|file_id| {
-            files.push(
-                self.sema
-                    .attach_first_edition(file_id)
-                    .unwrap_or_else(|| EditionedFileId::current_edition(self.sema.db, file_id)),
-            );
+            files.push(self.sema.attach_first_edition(file_id));
         });
         SearchScope::files(&files)
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index c197d55..0ed91cf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -13,13 +13,13 @@
 use url::Url;
 
 use hir::{
-    Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase, sym,
+    Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase,
 };
 use ide_db::{
     RootDatabase,
     base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb},
     defs::{Definition, NameClass, NameRefClass},
-    documentation::{DocsRangeMap, Documentation, HasDocs, docs_with_rangemap},
+    documentation::{Documentation, HasDocs},
     helpers::pick_best_token,
 };
 use syntax::{
@@ -54,7 +54,7 @@ pub(crate) fn rewrite_links(
     db: &RootDatabase,
     markdown: &str,
     definition: Definition,
-    range_map: Option<DocsRangeMap>,
+    range_map: Option<&hir::Docs>,
 ) -> String {
     let mut cb = broken_link_clone_cb;
     let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb))
@@ -74,9 +74,9 @@ pub(crate) fn rewrite_links(
                 TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
             let is_inner_doc = range_map
                 .as_ref()
-                .and_then(|range_map| range_map.map(text_range))
-                .map(|(_, attr_id)| attr_id.is_inner_attr())
-                .unwrap_or(false);
+                .and_then(|range_map| range_map.find_ast_range(text_range))
+                .map(|(_, is_inner)| is_inner)
+                .unwrap_or(hir::IsInnerDoc::No);
             if let Some((target, title)) =
                 rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type)
             {
@@ -187,7 +187,7 @@ pub(crate) fn external_docs(
 /// Extracts all links from a given markdown text returning the definition text range, link-text
 /// and the namespace if known.
 pub(crate) fn extract_definitions_from_docs(
-    docs: &Documentation,
+    docs: &Documentation<'_>,
 ) -> Vec<(TextRange, String, Option<hir::Namespace>)> {
     Parser::new_with_broken_link_callback(
         docs.as_str(),
@@ -214,7 +214,7 @@ pub(crate) fn resolve_doc_path_for_def(
     def: Definition,
     link: &str,
     ns: Option<hir::Namespace>,
-    is_inner_doc: bool,
+    is_inner_doc: hir::IsInnerDoc,
 ) -> Option<Definition> {
     match def {
         Definition::Module(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
@@ -324,11 +324,11 @@ pub(crate) fn get_definition_with_descend_at<T>(
             let token_start = t.text_range().start();
             let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
             let (attributes, def) = Self::doc_attributes(sema, &node, is_inner)?;
-            let (docs, doc_mapping) = docs_with_rangemap(sema.db, &attributes)?;
+            let doc_mapping = attributes.hir_docs(sema.db)?;
             let (in_expansion_range, link, ns, is_inner) =
-                extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
-                    let (mapped, idx) = doc_mapping.map(range)?;
-                    (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, idx.is_inner_attr()))
+                extract_definitions_from_docs(&Documentation::new_borrowed(doc_mapping.docs())).into_iter().find_map(|(range, link, ns)| {
+                    let (mapped, is_inner) = doc_mapping.find_ast_range(range)?;
+                    (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, is_inner))
                 })?;
             // get the relative range to the doc/attribute in the expansion
             let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
@@ -416,7 +416,7 @@ fn rewrite_intra_doc_link(
     def: Definition,
     target: &str,
     title: &str,
-    is_inner_doc: bool,
+    is_inner_doc: hir::IsInnerDoc,
     link_type: LinkType,
 ) -> Option<(String, String)> {
     let (link, ns) = parse_intra_doc_link(target);
@@ -659,14 +659,12 @@ fn filename_and_frag_for_def(
         Definition::Crate(_) => String::from("index.html"),
         Definition::Module(m) => match m.name(db) {
             // `#[doc(keyword = "...")]` is internal used only by rust compiler
-            Some(name) => {
-                match m.attrs(db).by_key(sym::doc).find_string_value_in_tt(sym::keyword) {
-                    Some(kw) => {
-                        format!("keyword.{kw}.html")
-                    }
-                    None => format!("{}/index.html", name.as_str()),
+            Some(name) => match m.doc_keyword(db) {
+                Some(kw) => {
+                    format!("keyword.{kw}.html")
                 }
-            }
+                None => format!("{}/index.html", name.as_str()),
+            },
             None => String::from("index.html"),
         },
         Definition::Trait(t) => {
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
index 3fd8855..34ffc11 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
@@ -1,11 +1,11 @@
-use std::iter;
+use std::{borrow::Cow, iter};
 
 use expect_test::{Expect, expect};
 use hir::Semantics;
 use ide_db::{
     FilePosition, FileRange, RootDatabase,
     defs::Definition,
-    documentation::{DocsRangeMap, Documentation, HasDocs},
+    documentation::{Documentation, HasDocs},
 };
 use itertools::Itertools;
 use syntax::{AstNode, SyntaxNode, ast, match_ast};
@@ -45,9 +45,9 @@ fn check_external_docs(
 fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
     let (analysis, position) = fixture::position(ra_fixture);
     let sema = &Semantics::new(&analysis.db);
-    let (cursor_def, docs, range) = def_under_cursor(sema, &position);
+    let (cursor_def, docs) = def_under_cursor(sema, &position);
     let res =
-        hir::attach_db(sema.db, || rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range)));
+        hir::attach_db(sema.db, || rewrite_links(sema.db, docs.docs(), cursor_def, Some(&docs)));
     expect.assert_eq(&res)
 }
 
@@ -57,33 +57,36 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
     let (analysis, position, mut expected) = fixture::annotations(ra_fixture);
     expected.sort_by_key(key_fn);
     let sema = &Semantics::new(&analysis.db);
-    let (cursor_def, docs, range) = def_under_cursor(sema, &position);
-    let defs = extract_definitions_from_docs(&docs);
-    let actual: Vec<_> = defs
-        .into_iter()
-        .flat_map(|(text_range, link, ns)| {
-            let attr = range.map(text_range);
-            let is_inner_attr = attr.map(|(_file, attr)| attr.is_inner_attr()).unwrap_or(false);
-            let def = hir::attach_db(sema.db, || {
-                resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr)
-                    .unwrap_or_else(|| panic!("Failed to resolve {link}"))
-            });
-            def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link))
-        })
-        .map(|(nav_target, link)| {
-            let range =
-                FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() };
-            (range, link)
-        })
-        .sorted_by_key(key_fn)
-        .collect();
-    assert_eq!(expected, actual);
+    hir::attach_db(sema.db, || {
+        let (cursor_def, docs) = def_under_cursor(sema, &position);
+        let defs = extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs()));
+        let actual: Vec<_> = defs
+            .into_iter()
+            .flat_map(|(text_range, link, ns)| {
+                let attr = docs.find_ast_range(text_range);
+                let is_inner_attr =
+                    attr.map(|(_file, is_inner)| is_inner).unwrap_or(hir::IsInnerDoc::No);
+                let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr)
+                    .unwrap_or_else(|| panic!("Failed to resolve {link}"));
+                def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link))
+            })
+            .map(|(nav_target, link)| {
+                let range = FileRange {
+                    file_id: nav_target.file_id,
+                    range: nav_target.focus_or_full_range(),
+                };
+                (range, link)
+            })
+            .sorted_by_key(key_fn)
+            .collect();
+        assert_eq!(expected, actual);
+    });
 }
 
-fn def_under_cursor(
-    sema: &Semantics<'_, RootDatabase>,
+fn def_under_cursor<'db>(
+    sema: &Semantics<'db, RootDatabase>,
     position: &FilePosition,
-) -> (Definition, Documentation, DocsRangeMap) {
+) -> (Definition, Cow<'db, hir::Docs>) {
     let (docs, def) = sema
         .parse_guess_edition(position.file_id)
         .syntax()
@@ -94,14 +97,14 @@ fn def_under_cursor(
         .find_map(|it| node_to_def(sema, &it))
         .expect("no def found")
         .unwrap();
-    let (docs, range) = docs.expect("no docs found for cursor def");
-    (def, docs, range)
+    let docs = docs.expect("no docs found for cursor def");
+    (def, docs)
 }
 
-fn node_to_def(
-    sema: &Semantics<'_, RootDatabase>,
+fn node_to_def<'db>(
+    sema: &Semantics<'db, RootDatabase>,
     node: &SyntaxNode,
-) -> Option<Option<(Option<(Documentation, DocsRangeMap)>, Definition)>> {
+) -> Option<Option<(Option<Cow<'db, hir::Docs>>, Definition)>> {
     Some(match_ast! {
         match node {
             ast::SourceFile(it)  => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Module(def))),
diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
index 094a4a7..9d38a86 100644
--- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
@@ -26,7 +26,7 @@ pub struct ExpandedMacro {
 // ![Expand Macro Recursively](https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif)
 pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
     let sema = Semantics::new(db);
-    let file_id = sema.attach_first_edition(position.file_id)?;
+    let file_id = sema.attach_first_edition(position.file_id);
     let file = sema.parse(file_id);
     let krate = sema.file_to_module_def(file_id.file_id(db))?.krate().into();
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/fixture.rs b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
index fbf8904..1a8591d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
@@ -7,10 +7,10 @@
 /// Creates analysis for a single file.
 pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+    let change_fixture = ChangeFixture::parse(ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
-    (host.analysis(), change_fixture.files[0].file_id(&host.db))
+    (host.analysis(), change_fixture.files[0].file_id())
 }
 
 /// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -18,23 +18,23 @@ pub(crate) fn position(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, FilePosition) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+    let change_fixture = ChangeFixture::parse(ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let offset = range_or_offset.expect_offset();
-    (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset })
+    (host.analysis(), FilePosition { file_id: file_id.file_id(), offset })
 }
 
 /// Creates analysis for a single file, returns range marked with a pair of $0.
 pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileRange) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+    let change_fixture = ChangeFixture::parse(ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let range = range_or_offset.expect_range();
-    (host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range })
+    (host.analysis(), FileRange { file_id: file_id.file_id(), range })
 }
 
 /// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0.
@@ -42,11 +42,11 @@ pub(crate) fn range_or_position(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, FileId, RangeOrOffset) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+    let change_fixture = ChangeFixture::parse(ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
-    (host.analysis(), file_id.file_id(&host.db), range_or_offset)
+    (host.analysis(), file_id.file_id(), range_or_offset)
 }
 
 /// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -54,25 +54,24 @@ pub(crate) fn annotations(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, FilePosition, Vec<(FileRange, String)>) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+    let change_fixture = ChangeFixture::parse(ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let offset = range_or_offset.expect_offset();
 
-    let db = &host.db;
     let annotations = change_fixture
         .files
         .iter()
         .flat_map(|&file_id| {
-            let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap();
+            let file_text = host.analysis().file_text(file_id.file_id()).unwrap();
             let annotations = extract_annotations(&file_text);
             annotations
                 .into_iter()
-                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
+                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data))
         })
         .collect();
-    (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations)
+    (host.analysis(), FilePosition { file_id: file_id.file_id(), offset }, annotations)
 }
 
 /// Creates analysis from a multi-file fixture with annotations without $0
@@ -80,20 +79,19 @@ pub(crate) fn annotations_without_marker(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, Vec<(FileRange, String)>) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
+    let change_fixture = ChangeFixture::parse(ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
 
-    let db = &host.db;
     let annotations = change_fixture
         .files
         .iter()
         .flat_map(|&file_id| {
-            let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap();
+            let file_text = host.analysis().file_text(file_id.file_id()).unwrap();
             let annotations = extract_annotations(&file_text);
             annotations
                 .into_iter()
-                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
+                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data))
         })
         .collect();
     (host.analysis(), annotations)
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index d663b70..e1db370 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -18,7 +18,7 @@
     helpers::pick_best_token,
 };
 use itertools::Itertools;
-use span::{Edition, FileId};
+use span::FileId;
 use syntax::{
     AstNode, AstToken,
     SyntaxKind::*,
@@ -50,8 +50,7 @@ pub(crate) fn goto_definition(
 ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
     let sema = &Semantics::new(db);
     let file = sema.parse_guess_edition(file_id).syntax().clone();
-    let edition =
-        sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
+    let edition = sema.attach_first_edition(file_id).edition(db);
     let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
         IDENT
         | INT_NUMBER
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
index 0572bca..a29da4f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -384,7 +384,7 @@ trait Bar {}
 
 fn test() {
     #[derive(Copy)]
-  //^^^^^^^^^^^^^^^
+   // ^^^^^^^^^^^^
     struct Foo$0;
 
     impl Foo {}
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index 04ce5a7..acba573 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -60,9 +60,7 @@ pub(crate) fn highlight_related(
     ide_db::FilePosition { offset, file_id }: ide_db::FilePosition,
 ) -> Option<Vec<HighlightedRange>> {
     let _p = tracing::info_span!("highlight_related").entered();
-    let file_id = sema
-        .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
+    let file_id = sema.attach_first_edition(file_id);
     let syntax = sema.parse(file_id).syntax().clone();
 
     let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index fa4b4b6..958de89 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -6,9 +6,7 @@
 use std::{iter, ops::Not};
 
 use either::Either;
-use hir::{
-    DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, LangItem, Semantics,
-};
+use hir::{DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, Semantics};
 use ide_db::{
     FileRange, FxIndexSet, MiniCore, Ranker, RootDatabase,
     defs::{Definition, IdentClass, NameRefClass, OperatorClass},
@@ -137,8 +135,7 @@ pub(crate) fn hover(
 ) -> Option<RangeInfo<HoverResult>> {
     let sema = &hir::Semantics::new(db);
     let file = sema.parse_guess_edition(file_id).syntax().clone();
-    let edition =
-        sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
+    let edition = sema.attach_first_edition(file_id).edition(db);
     let display_target = sema.first_crate(file_id)?.to_display_target(db);
     let mut res = if range.is_empty() {
         hover_offset(
@@ -675,10 +672,10 @@ fn walk_and_push_ty(
         } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
             push_new_def(trait_.into());
         } else if let Some(tp) = t.as_type_param(db) {
-            let sized_trait = LangItem::Sized.resolve_trait(db, t.krate(db).into());
+            let sized_trait = hir::Trait::lang(db, t.krate(db), hir::LangItem::Sized);
             tp.trait_bounds(db)
                 .into_iter()
-                .filter(|&it| Some(it.into()) != sized_trait)
+                .filter(|&it| Some(it) != sized_trait)
                 .for_each(|it| push_new_def(it.into()));
         }
     });
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index a1eff3a..a892985 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -1,5 +1,5 @@
 //! Logic for rendering the different hover messages
-use std::{env, mem, ops::Not};
+use std::{borrow::Cow, env, mem, ops::Not};
 
 use either::Either;
 use hir::{
@@ -11,7 +11,7 @@
 use ide_db::{
     RootDatabase,
     defs::{Definition, find_std_module},
-    documentation::{DocsRangeMap, HasDocs},
+    documentation::{Documentation, HasDocs},
     famous_defs::FamousDefs,
     generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
     syntax_helpers::prettify_macro_expansion,
@@ -278,9 +278,9 @@ pub(super) fn keyword(
         keyword_hints(sema, token, parent, edition, display_target);
 
     let doc_owner = find_std_module(&famous_defs, &keyword_mod, edition)?;
-    let (docs, range_map) = doc_owner.docs_with_rangemap(sema.db)?;
+    let docs = doc_owner.docs_with_rangemap(sema.db)?;
     let (markup, range_map) =
-        markup(Some(docs.into()), Some(range_map), description, None, None, String::new());
+        markup(Some(Either::Left(docs)), description, None, None, String::new());
     let markup = process_markup(sema.db, Definition::Module(doc_owner), &markup, range_map, config);
     Some(HoverResult { markup, actions })
 }
@@ -370,12 +370,12 @@ pub(super) fn process_markup(
     db: &RootDatabase,
     def: Definition,
     markup: &Markup,
-    markup_range_map: Option<DocsRangeMap>,
+    markup_range_map: Option<hir::Docs>,
     config: &HoverConfig<'_>,
 ) -> Markup {
     let markup = markup.as_str();
     let markup = if config.links_in_hover {
-        rewrite_links(db, markup, def, markup_range_map)
+        rewrite_links(db, markup, def, markup_range_map.as_ref())
     } else {
         remove_links(markup)
     };
@@ -484,7 +484,7 @@ pub(super) fn definition(
     config: &HoverConfig<'_>,
     edition: Edition,
     display_target: DisplayTarget,
-) -> (Markup, Option<DocsRangeMap>) {
+) -> (Markup, Option<hir::Docs>) {
     let mod_path = definition_path(db, &def, edition);
     let label = match def {
         Definition::Trait(trait_) => trait_
@@ -520,12 +520,7 @@ pub(super) fn definition(
         }
         _ => def.label(db, display_target),
     };
-    let (docs, range_map) =
-        if let Some((docs, doc_range)) = def.docs_with_rangemap(db, famous_defs, display_target) {
-            (Some(docs), doc_range)
-        } else {
-            (None, None)
-        };
+    let docs = def.docs_with_rangemap(db, famous_defs, display_target);
     let value = || match def {
         Definition::Variant(it) => {
             if !it.parent_enum(db).is_data_carrying(db) {
@@ -842,14 +837,7 @@ pub(super) fn definition(
         }
     };
 
-    markup(
-        docs.map(Into::into),
-        range_map,
-        desc,
-        extra.is_empty().not().then_some(extra),
-        mod_path,
-        subst_types,
-    )
+    markup(docs, desc, extra.is_empty().not().then_some(extra), mod_path, subst_types)
 }
 
 #[derive(Debug)]
@@ -1083,8 +1071,8 @@ fn closure_ty(
     };
     let mut markup = format!("```rust\n{}\n```", c.display_with_impl(sema.db, display_target));
 
-    if let Some(trait_) = c.fn_trait(sema.db).get_id(sema.db, original.krate(sema.db).into()) {
-        push_new_def(hir::Trait::from(trait_).into())
+    if let Some(trait_) = c.fn_trait(sema.db).get_id(sema.db, original.krate(sema.db)) {
+        push_new_def(trait_.into())
     }
     if let Some(layout) = render_memory_layout(
         config.memory_layout,
@@ -1124,13 +1112,12 @@ fn definition_path(db: &RootDatabase, &def: &Definition, edition: Edition) -> Op
 }
 
 fn markup(
-    docs: Option<String>,
-    range_map: Option<DocsRangeMap>,
+    docs: Option<Either<Cow<'_, hir::Docs>, Documentation<'_>>>,
     rust: String,
     extra: Option<String>,
     mod_path: Option<String>,
     subst_types: String,
-) -> (Markup, Option<DocsRangeMap>) {
+) -> (Markup, Option<hir::Docs>) {
     let mut buf = String::new();
 
     if let Some(mod_path) = mod_path
@@ -1151,10 +1138,21 @@ fn markup(
     if let Some(doc) = docs {
         format_to!(buf, "\n___\n\n");
         let offset = TextSize::new(buf.len() as u32);
-        let buf_range_map = range_map.map(|range_map| range_map.shift_docstring_line_range(offset));
-        format_to!(buf, "{}", doc);
+        let docs_str = match &doc {
+            Either::Left(docs) => docs.docs(),
+            Either::Right(docs) => docs.as_str(),
+        };
+        format_to!(buf, "{}", docs_str);
+        let range_map = match doc {
+            Either::Left(range_map) => {
+                let mut range_map = range_map.into_owned();
+                range_map.shift_by(offset);
+                Some(range_map)
+            }
+            Either::Right(_) => None,
+        };
 
-        (buf.into(), buf_range_map)
+        (buf.into(), range_map)
     } else {
         (buf.into(), None)
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
index 6dd9e84..deacc7f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -89,9 +89,7 @@ pub(crate) fn inlay_hints(
 ) -> Vec<InlayHint> {
     let _p = tracing::info_span!("inlay_hints").entered();
     let sema = Semantics::new(db);
-    let file_id = sema
-        .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+    let file_id = sema.attach_first_edition(file_id);
     let file = sema.parse(file_id);
     let file = file.syntax();
 
@@ -142,9 +140,7 @@ pub(crate) fn inlay_hints_resolve(
 ) -> Option<InlayHint> {
     let _p = tracing::info_span!("inlay_hints_resolve").entered();
     let sema = Semantics::new(db);
-    let file_id = sema
-        .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+    let file_id = sema.attach_first_edition(file_id);
     let file = sema.parse(file_id);
     let file = file.syntax();
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs
index ab3ce5b..49d7d454 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs
@@ -16,6 +16,8 @@
     inlay_hints::LazyProperty,
 };
 
+const ELLIPSIS: &str = "…";
+
 pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
     sema: &Semantics<'_, RootDatabase>,
@@ -60,6 +62,12 @@ pub(super) fn hints(
 
         let module = ast::Module::cast(list.syntax().parent()?)?;
         (format!("mod {}", module.name()?), module.name().map(name))
+    } else if let Some(match_arm_list) = ast::MatchArmList::cast(node.clone()) {
+        closing_token = match_arm_list.r_curly_token()?;
+
+        let match_expr = ast::MatchExpr::cast(match_arm_list.syntax().parent()?)?;
+        let label = format_match_label(&match_expr, config)?;
+        (label, None)
     } else if let Some(label) = ast::Label::cast(node.clone()) {
         // in this case, `ast::Label` could be seen as a part of `ast::BlockExpr`
         // the actual number of lines in this case should be the line count of the parent BlockExpr,
@@ -91,7 +99,7 @@ pub(super) fn hints(
         match_ast! {
             match parent {
                 ast::Fn(it) => {
-                    (format!("fn {}", it.name()?), it.name().map(name))
+                    (format!("{}fn {}", fn_qualifiers(&it), it.name()?), it.name().map(name))
                 },
                 ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)),
                 ast::Const(it) => {
@@ -101,6 +109,33 @@ pub(super) fn hints(
                         (format!("const {}", it.name()?), it.name().map(name))
                     }
                 },
+                ast::LoopExpr(loop_expr) => {
+                    if loop_expr.label().is_some() {
+                        return None;
+                    }
+                    ("loop".into(), None)
+                },
+                ast::WhileExpr(while_expr) => {
+                    if while_expr.label().is_some() {
+                        return None;
+                    }
+                    (keyword_with_condition("while", while_expr.condition(), config), None)
+                },
+                ast::ForExpr(for_expr) => {
+                    if for_expr.label().is_some() {
+                        return None;
+                    }
+                    let label = format_for_label(&for_expr, config)?;
+                    (label, None)
+                },
+                ast::IfExpr(if_expr) => {
+                    let label = label_for_if_block(&if_expr, &block, config)?;
+                    (label, None)
+                },
+                ast::LetElse(let_else) => {
+                    let label = format_let_else_label(&let_else, config)?;
+                    (label, None)
+                },
                 _ => return None,
             }
         }
@@ -154,11 +189,117 @@ pub(super) fn hints(
     None
 }
 
+fn fn_qualifiers(func: &ast::Fn) -> String {
+    let mut qualifiers = String::new();
+    if func.const_token().is_some() {
+        qualifiers.push_str("const ");
+    }
+    if func.async_token().is_some() {
+        qualifiers.push_str("async ");
+    }
+    if func.unsafe_token().is_some() {
+        qualifiers.push_str("unsafe ");
+    }
+    qualifiers
+}
+
+fn keyword_with_condition(
+    keyword: &str,
+    condition: Option<ast::Expr>,
+    config: &InlayHintsConfig<'_>,
+) -> String {
+    if let Some(expr) = condition {
+        return format!("{keyword} {}", snippet_from_node(expr.syntax(), config));
+    }
+    keyword.to_owned()
+}
+
+fn format_for_label(for_expr: &ast::ForExpr, config: &InlayHintsConfig<'_>) -> Option<String> {
+    let pat = for_expr.pat()?;
+    let iterable = for_expr.iterable()?;
+    Some(format!(
+        "for {} in {}",
+        snippet_from_node(pat.syntax(), config),
+        snippet_from_node(iterable.syntax(), config)
+    ))
+}
+
+fn format_match_label(
+    match_expr: &ast::MatchExpr,
+    config: &InlayHintsConfig<'_>,
+) -> Option<String> {
+    let expr = match_expr.expr()?;
+    Some(format!("match {}", snippet_from_node(expr.syntax(), config)))
+}
+
+fn label_for_if_block(
+    if_expr: &ast::IfExpr,
+    block: &ast::BlockExpr,
+    config: &InlayHintsConfig<'_>,
+) -> Option<String> {
+    if if_expr.then_branch().is_some_and(|then_branch| then_branch.syntax() == block.syntax()) {
+        Some(keyword_with_condition("if", if_expr.condition(), config))
+    } else if matches!(
+        if_expr.else_branch(),
+        Some(ast::ElseBranch::Block(else_block)) if else_block.syntax() == block.syntax()
+    ) {
+        Some("else".into())
+    } else {
+        None
+    }
+}
+
+fn format_let_else_label(let_else: &ast::LetElse, config: &InlayHintsConfig<'_>) -> Option<String> {
+    let stmt = let_else.syntax().parent().and_then(ast::LetStmt::cast)?;
+    let pat = stmt.pat()?;
+    let initializer = stmt.initializer()?;
+    Some(format!(
+        "let {} = {} else",
+        snippet_from_node(pat.syntax(), config),
+        snippet_from_node(initializer.syntax(), config)
+    ))
+}
+
+fn snippet_from_node(node: &SyntaxNode, config: &InlayHintsConfig<'_>) -> String {
+    let mut text = node.text().to_string();
+    if text.contains('\n') {
+        return ELLIPSIS.into();
+    }
+
+    let Some(limit) = config.max_length else {
+        return text;
+    };
+    if limit == 0 {
+        return ELLIPSIS.into();
+    }
+
+    if text.len() <= limit {
+        return text;
+    }
+
+    let boundary = text.floor_char_boundary(limit.min(text.len()));
+    if boundary == text.len() {
+        return text;
+    }
+
+    let cut = text[..boundary]
+        .char_indices()
+        .rev()
+        .find(|&(_, ch)| ch == ' ')
+        .map(|(idx, _)| idx)
+        .unwrap_or(0);
+    text.truncate(cut);
+    text.push_str(ELLIPSIS);
+    text
+}
+
 #[cfg(test)]
 mod tests {
+    use expect_test::expect;
+
     use crate::{
         InlayHintsConfig,
-        inlay_hints::tests::{DISABLED_CONFIG, check_with_config},
+        inlay_hints::tests::{DISABLED_CONFIG, check_expect, check_with_config},
     };
 
     #[test]
@@ -179,6 +320,10 @@ fn h<T>(with: T, arguments: u8, ...) {
   }
 //^ fn h
 
+async fn async_fn() {
+  }
+//^ async fn async_fn
+
 trait Tr {
     fn f();
     fn g() {
@@ -260,4 +405,124 @@ fn test() {
 "#,
         );
     }
+
+    #[test]
+    fn hints_closing_brace_additional_blocks() {
+        check_expect(
+            InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG },
+            r#"
+fn demo() {
+    loop {
+
+    }
+
+    while let Some(value) = next() {
+
+    }
+
+    for value in iter {
+
+    }
+
+    if cond {
+
+    }
+
+    if let Some(x) = maybe {
+
+    }
+
+    if other {
+    } else {
+
+    }
+
+    let Some(v) = maybe else {
+
+    };
+
+    match maybe {
+        Some(v) => {
+
+        }
+        value if check(value) => {
+
+        }
+        None => {}
+    }
+}
+"#,
+            expect![[r#"
+                [
+                    (
+                        364..365,
+                        [
+                            InlayHintLabelPart {
+                                text: "fn demo",
+                                linked_location: Some(
+                                    Computed(
+                                        FileRangeWrapper {
+                                            file_id: FileId(
+                                                0,
+                                            ),
+                                            range: 3..7,
+                                        },
+                                    ),
+                                ),
+                                tooltip: "",
+                            },
+                        ],
+                    ),
+                    (
+                        28..29,
+                        [
+                            "loop",
+                        ],
+                    ),
+                    (
+                        73..74,
+                        [
+                            "while let Some(value) = next()",
+                        ],
+                    ),
+                    (
+                        105..106,
+                        [
+                            "for value in iter",
+                        ],
+                    ),
+                    (
+                        127..128,
+                        [
+                            "if cond",
+                        ],
+                    ),
+                    (
+                        164..165,
+                        [
+                            "if let Some(x) = maybe",
+                        ],
+                    ),
+                    (
+                        200..201,
+                        [
+                            "else",
+                        ],
+                    ),
+                    (
+                        240..241,
+                        [
+                            "let Some(v) = maybe else",
+                        ],
+                    ),
+                    (
+                        362..363,
+                        [
+                            "match maybe",
+                        ],
+                    ),
+                ]
+            "#]],
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index 113cb83..9436264 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -332,7 +332,8 @@ pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
     pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
         // FIXME edition
         self.with_db(|db| {
-            let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
+            let editioned_file_id_wrapper =
+                EditionedFileId::current_edition_guess_origin(&self.db, file_id);
 
             db.parse(editioned_file_id_wrapper).tree()
         })
@@ -361,7 +362,7 @@ pub fn extend_selection(&self, frange: FileRange) -> Cancellable<TextRange> {
     /// supported).
     pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
         self.with_db(|db| {
-            let file_id = EditionedFileId::current_edition(&self.db, position.file_id);
+            let file_id = EditionedFileId::current_edition_guess_origin(&self.db, position.file_id);
             let parse = db.parse(file_id);
             let file = parse.tree();
             matching_brace::matching_brace(&file, position.offset)
@@ -422,7 +423,7 @@ pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<Expande
     pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
         self.with_db(|db| {
             let editioned_file_id_wrapper =
-                EditionedFileId::current_edition(&self.db, frange.file_id);
+                EditionedFileId::current_edition_guess_origin(&self.db, frange.file_id);
             let parse = db.parse(editioned_file_id_wrapper);
             join_lines::join_lines(config, &parse.tree(), frange.range)
         })
@@ -463,7 +464,8 @@ pub fn file_structure(
     ) -> Cancellable<Vec<StructureNode>> {
         // FIXME: Edition
         self.with_db(|db| {
-            let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
+            let editioned_file_id_wrapper =
+                EditionedFileId::current_edition_guess_origin(&self.db, file_id);
             let source_file = db.parse(editioned_file_id_wrapper).tree();
             file_structure::file_structure(&source_file, config)
         })
@@ -494,7 +496,8 @@ pub fn inlay_hints_resolve(
     /// Returns the set of folding ranges.
     pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
         self.with_db(|db| {
-            let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
+            let editioned_file_id_wrapper =
+                EditionedFileId::current_edition_guess_origin(&self.db, file_id);
 
             folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
         })
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
index 7d5d905..4c4d57f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -51,7 +51,8 @@ pub struct NavigationTarget {
     pub kind: Option<SymbolKind>,
     pub container_name: Option<Symbol>,
     pub description: Option<String>,
-    pub docs: Option<Documentation>,
+    // FIXME: Use the database lifetime here.
+    pub docs: Option<Documentation<'static>>,
     /// In addition to a `name` field, a `NavigationTarget` may also be aliased
     /// In such cases we want a `NavigationTarget` to be accessible by its alias
     pub alias: Option<Symbol>,
@@ -157,7 +158,7 @@ pub(crate) fn from_module_to_decl(
                             full_range,
                             SymbolKind::Module,
                         );
-                        res.docs = module.docs(db);
+                        res.docs = module.docs(db).map(Documentation::into_owned);
                         res.description = Some(
                             module.display(db, module.krate().to_display_target(db)).to_string(),
                         );
@@ -429,7 +430,7 @@ fn try_to_nav(
                 D::KIND,
             )
             .map(|mut res| {
-                res.docs = self.docs(db);
+                res.docs = self.docs(db).map(Documentation::into_owned);
                 res.description = hir::attach_db(db, || {
                     Some(self.display(db, self.krate(db).to_display_target(db)).to_string())
                 });
@@ -522,7 +523,7 @@ fn try_to_nav(
                     SymbolKind::Module,
                 );
 
-                res.docs = self.docs(db);
+                res.docs = self.docs(db).map(Documentation::into_owned);
                 res.description = Some(self.display(db, krate.to_display_target(db)).to_string());
                 res.container_name = container_name(db, *self);
                 res
@@ -544,10 +545,9 @@ fn try_to_nav(
             FieldSource::Named(it) => {
                 NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map(
                     |mut res| {
-                        res.docs = self.docs(db);
-                        res.description = hir::attach_db(db, || {
-                            Some(self.display(db, krate.to_display_target(db)).to_string())
-                        });
+                        res.docs = self.docs(db).map(Documentation::into_owned);
+                        res.description =
+                            Some(self.display(db, krate.to_display_target(db)).to_string());
                         res
                     },
                 )
@@ -586,7 +586,7 @@ fn try_to_nav(
                 self.kind(db).into(),
             )
             .map(|mut res| {
-                res.docs = self.docs(db);
+                res.docs = self.docs(db).map(Documentation::into_owned);
                 res
             }),
         )
@@ -916,7 +916,7 @@ pub(crate) fn orig_range_with_focus_r(
 ) -> UpmappingResult<(FileRange, Option<TextRange>)> {
     let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) };
 
-    let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind;
+    let call = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap());
 
     let def_range =
         || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db);
@@ -942,7 +942,8 @@ pub(crate) fn orig_range_with_focus_r(
                             // name lies outside the node, so instead point to the macro call which
                             // *should* contain the name
                             _ => {
-                                let kind = call_kind();
+                                let call = call();
+                                let kind = call.kind;
                                 let range = kind.clone().original_call_range_with_input(db);
                                 //If the focus range is in the attribute/derive body, we
                                 // need to point the call site to the entire body, if not, fall back
@@ -954,7 +955,7 @@ pub(crate) fn orig_range_with_focus_r(
                                 {
                                     range
                                 } else {
-                                    kind.original_call_range(db)
+                                    kind.original_call_range(db, call.krate)
                                 }
                             }
                         },
@@ -983,11 +984,14 @@ pub(crate) fn orig_range_with_focus_r(
                         },
                     ),
                     // node is in macro def, just show the focus
-                    _ => (
-                        // show the macro call
-                        (call_kind().original_call_range(db), None),
-                        Some((focus_range, Some(focus_range))),
-                    ),
+                    _ => {
+                        let call = call();
+                        (
+                            // show the macro call
+                            (call.kind.original_call_range(db, call.krate), None),
+                            Some((focus_range, Some(focus_range))),
+                        )
+                    }
                 }
             }
             // lost name? can't happen for single tokens
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index 516cc7f..0738b7f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -28,7 +28,6 @@
 use itertools::Itertools;
 use macros::UpmapFromRaFixture;
 use nohash_hasher::IntMap;
-use span::Edition;
 use syntax::AstToken;
 use syntax::{
     AstNode,
@@ -419,10 +418,7 @@ fn handle_control_flow_keywords(
     FilePosition { file_id, offset }: FilePosition,
 ) -> Option<ReferenceSearchResult> {
     let file = sema.parse_guess_edition(file_id);
-    let edition = sema
-        .attach_first_edition(file_id)
-        .map(|it| it.edition(sema.db))
-        .unwrap_or(Edition::CURRENT);
+    let edition = sema.attach_first_edition(file_id).edition(sema.db);
     let token = pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind {
         _ if kind.is_keyword(edition) => 4,
         T![=>] => 3,
@@ -1124,7 +1120,10 @@ pub fn quux$0() {}
         check_with_scope(
             code,
             Some(&mut |db| {
-                SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2)))
+                SearchScope::single_file(EditionedFileId::current_edition_guess_origin(
+                    db,
+                    FileId::from_raw(2),
+                ))
             }),
             expect![[r#"
                 quux Function FileId(0) 19..35 26..30
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index ce59639..7955e9b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -121,7 +121,7 @@ pub(crate) fn rename(
 ) -> RenameResult<SourceChange> {
     let sema = Semantics::new(db);
     let file_id = sema
-        .attach_first_edition(position.file_id)
+        .attach_first_edition_opt(position.file_id)
         .ok_or_else(|| format_err!("No references found at position"))?;
     let source_file = sema.parse(file_id);
     let syntax = source_file.syntax();
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index 2086a19..9e17ab2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -3,17 +3,13 @@
 use arrayvec::ArrayVec;
 use ast::HasName;
 use cfg::{CfgAtom, CfgExpr};
-use hir::{
-    AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase,
-    sym,
-};
+use hir::{AsAssocItem, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase, sym};
 use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
 use ide_db::impl_empty_upmap_from_ra_fixture;
 use ide_db::{
     FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind,
     base_db::RootQueryDb,
     defs::Definition,
-    documentation::docs_from_attrs,
     helpers::visit_file_defs,
     search::{FileReferenceNode, SearchScope},
 };
@@ -323,7 +319,7 @@ pub(crate) fn runnable_fn(
     def: hir::Function,
 ) -> Option<Runnable> {
     let edition = def.krate(sema.db).edition(sema.db);
-    let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db));
+    let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db).cfgs(sema.db));
     let kind = if !under_cfg_test && def.is_main(sema.db) {
         RunnableKind::Bin
     } else {
@@ -358,7 +354,7 @@ pub(crate) fn runnable_fn(
     let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db);
     let update_test = UpdateTest::find_snapshot_macro(sema, file_range);
 
-    let cfg = def.attrs(sema.db).cfg();
+    let cfg = def.attrs(sema.db).cfgs(sema.db).cloned();
     Some(Runnable { use_name_in_title: false, nav, kind, cfg, update_test })
 }
 
@@ -366,8 +362,8 @@ pub(crate) fn runnable_mod(
     sema: &Semantics<'_, RootDatabase>,
     def: hir::Module,
 ) -> Option<Runnable> {
-    if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db)))
-    {
+    let cfg = def.attrs(sema.db).cfgs(sema.db);
+    if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) {
         return None;
     }
     let path = def
@@ -381,8 +377,7 @@ pub(crate) fn runnable_mod(
         })
         .join("::");
 
-    let attrs = def.attrs(sema.db);
-    let cfg = attrs.cfg();
+    let cfg = cfg.cloned();
     let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site();
 
     let module_source = sema.module_definition_node(def);
@@ -409,10 +404,10 @@ pub(crate) fn runnable_impl(
     let display_target = def.module(sema.db).krate().to_display_target(sema.db);
     let edition = display_target.edition;
     let attrs = def.attrs(sema.db);
-    if !has_runnable_doc_test(&attrs) {
+    if !has_runnable_doc_test(sema.db, &attrs) {
         return None;
     }
-    let cfg = attrs.cfg();
+    let cfg = attrs.cfgs(sema.db).cloned();
     let nav = def.try_to_nav(sema)?.call_site();
     let ty = def.self_ty(sema.db);
     let adt_name = ty.as_adt()?.name(sema.db);
@@ -442,8 +437,16 @@ pub(crate) fn runnable_impl(
     })
 }
 
-fn has_cfg_test(attrs: AttrsWithOwner) -> bool {
-    attrs.cfgs().any(|cfg| matches!(&cfg, CfgExpr::Atom(CfgAtom::Flag(s)) if *s == sym::test))
+fn has_cfg_test(cfg: Option<&CfgExpr>) -> bool {
+    return cfg.is_some_and(has_cfg_test_impl);
+
+    fn has_cfg_test_impl(cfg: &CfgExpr) -> bool {
+        match cfg {
+            CfgExpr::Atom(CfgAtom::Flag(s)) => *s == sym::test,
+            CfgExpr::Any(cfgs) | CfgExpr::All(cfgs) => cfgs.iter().any(has_cfg_test_impl),
+            _ => false,
+        }
+    }
 }
 
 /// Creates a test mod runnable for outline modules at the top of their definition.
@@ -453,8 +456,8 @@ fn runnable_mod_outline_definition(
 ) -> Option<Runnable> {
     def.as_source_file_id(sema.db)?;
 
-    if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db)))
-    {
+    let cfg = def.attrs(sema.db).cfgs(sema.db);
+    if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) {
         return None;
     }
     let path = def
@@ -468,8 +471,7 @@ fn runnable_mod_outline_definition(
         })
         .join("::");
 
-    let attrs = def.attrs(sema.db);
-    let cfg = attrs.cfg();
+    let cfg = cfg.cloned();
 
     let mod_source = sema.module_definition_node(def);
     let mod_syntax = mod_source.file_syntax(sema.db);
@@ -508,7 +510,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op
     let display_target = krate
         .unwrap_or_else(|| (*db.all_crates().last().expect("no crate graph present")).into())
         .to_display_target(db);
-    if !has_runnable_doc_test(&attrs) {
+    if !has_runnable_doc_test(db, &attrs) {
         return None;
     }
     let def_name = def.name(db)?;
@@ -554,7 +556,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op
         use_name_in_title: false,
         nav,
         kind: RunnableKind::DocTest { test_id },
-        cfg: attrs.cfg(),
+        cfg: attrs.cfgs(db).cloned(),
         update_test: UpdateTest::default(),
     };
     Some(res)
@@ -571,15 +573,15 @@ fn from_fn(db: &dyn HirDatabase, fn_def: hir::Function) -> TestAttr {
     }
 }
 
-fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool {
+fn has_runnable_doc_test(db: &RootDatabase, attrs: &hir::AttrsWithOwner) -> bool {
     const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
     const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] =
         &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"];
 
-    docs_from_attrs(attrs).is_some_and(|doc| {
+    attrs.hir_docs(db).is_some_and(|doc| {
         let mut in_code_block = false;
 
-        for line in doc.lines() {
+        for line in doc.docs().lines() {
             if let Some(header) =
                 RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence))
             {
diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
index f9ec448..78dc3f7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
@@ -31,7 +31,7 @@
 /// edited.
 #[derive(Debug)]
 pub struct SignatureHelp {
-    pub doc: Option<Documentation>,
+    pub doc: Option<Documentation<'static>>,
     pub signature: String,
     pub active_parameter: Option<usize>,
     parameters: Vec<TextRange>,
@@ -84,8 +84,7 @@ pub(crate) fn signature_help(
         // this prevents us from leaving the CallExpression
         .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
     let token = sema.descend_into_macros_single_exact(token);
-    let edition =
-        sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
+    let edition = sema.attach_first_edition(file_id).edition(db);
     let display_target = sema.first_crate(file_id)?.to_display_target(db);
 
     for node in token.parent_ancestors() {
@@ -174,7 +173,7 @@ fn signature_help_for_call(
     let mut fn_params = None;
     match callable.kind() {
         hir::CallableKind::Function(func) => {
-            res.doc = func.docs(db);
+            res.doc = func.docs(db).map(Documentation::into_owned);
             if func.is_async(db) {
                 format_to!(res.signature, "async ");
             }
@@ -199,7 +198,7 @@ fn signature_help_for_call(
             });
         }
         hir::CallableKind::TupleStruct(strukt) => {
-            res.doc = strukt.docs(db);
+            res.doc = strukt.docs(db).map(Documentation::into_owned);
             format_to!(res.signature, "struct {}", strukt.name(db).display(db, edition));
 
             let generic_params = GenericDef::Adt(strukt.into())
@@ -212,7 +211,7 @@ fn signature_help_for_call(
             }
         }
         hir::CallableKind::TupleEnumVariant(variant) => {
-            res.doc = variant.docs(db);
+            res.doc = variant.docs(db).map(Documentation::into_owned);
             format_to!(
                 res.signature,
                 "enum {}",
@@ -320,33 +319,33 @@ fn signature_help_for_generics(
     let db = sema.db;
     match generics_def {
         hir::GenericDef::Function(it) => {
-            res.doc = it.docs(db);
+            res.doc = it.docs(db).map(Documentation::into_owned);
             format_to!(res.signature, "fn {}", it.name(db).display(db, edition));
         }
         hir::GenericDef::Adt(hir::Adt::Enum(it)) => {
-            res.doc = it.docs(db);
+            res.doc = it.docs(db).map(Documentation::into_owned);
             format_to!(res.signature, "enum {}", it.name(db).display(db, edition));
             if let Some(variant) = variant {
                 // In paths, generics of an enum can be specified *after* one of its variants.
                 // eg. `None::<u8>`
                 // We'll use the signature of the enum, but include the docs of the variant.
-                res.doc = variant.docs(db);
+                res.doc = variant.docs(db).map(Documentation::into_owned);
             }
         }
         hir::GenericDef::Adt(hir::Adt::Struct(it)) => {
-            res.doc = it.docs(db);
+            res.doc = it.docs(db).map(Documentation::into_owned);
             format_to!(res.signature, "struct {}", it.name(db).display(db, edition));
         }
         hir::GenericDef::Adt(hir::Adt::Union(it)) => {
-            res.doc = it.docs(db);
+            res.doc = it.docs(db).map(Documentation::into_owned);
             format_to!(res.signature, "union {}", it.name(db).display(db, edition));
         }
         hir::GenericDef::Trait(it) => {
-            res.doc = it.docs(db);
+            res.doc = it.docs(db).map(Documentation::into_owned);
             format_to!(res.signature, "trait {}", it.name(db).display(db, edition));
         }
         hir::GenericDef::TypeAlias(it) => {
-            res.doc = it.docs(db);
+            res.doc = it.docs(db).map(Documentation::into_owned);
             format_to!(res.signature, "type {}", it.name(db).display(db, edition));
         }
         // These don't have generic args that can be specified
@@ -501,7 +500,7 @@ fn signature_help_for_tuple_struct_pat(
     let fields: Vec<_> = if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res {
         let en = variant.parent_enum(db);
 
-        res.doc = en.docs(db);
+        res.doc = en.docs(db).map(Documentation::into_owned);
         format_to!(
             res.signature,
             "enum {}::{} (",
@@ -518,7 +517,7 @@ fn signature_help_for_tuple_struct_pat(
 
         match adt {
             hir::Adt::Struct(it) => {
-                res.doc = it.docs(db);
+                res.doc = it.docs(db).map(Documentation::into_owned);
                 format_to!(res.signature, "struct {} (", it.name(db).display(db, edition));
                 it.fields(db)
             }
@@ -628,7 +627,7 @@ fn signature_help_for_record_<'db>(
         fields = variant.fields(db);
         let en = variant.parent_enum(db);
 
-        res.doc = en.docs(db);
+        res.doc = en.docs(db).map(Documentation::into_owned);
         format_to!(
             res.signature,
             "enum {}::{} {{ ",
@@ -645,12 +644,12 @@ fn signature_help_for_record_<'db>(
         match adt {
             hir::Adt::Struct(it) => {
                 fields = it.fields(db);
-                res.doc = it.docs(db);
+                res.doc = it.docs(db).map(Documentation::into_owned);
                 format_to!(res.signature, "struct {} {{ ", it.name(db).display(db, edition));
             }
             hir::Adt::Union(it) => {
                 fields = it.fields(db);
-                res.doc = it.docs(db);
+                res.doc = it.docs(db).map(Documentation::into_owned);
                 format_to!(res.signature, "union {} {{ ", it.name(db).display(db, edition));
             }
             _ => return None,
@@ -746,12 +745,12 @@ pub(crate) fn position(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (RootDatabase, FilePosition) {
         let mut database = RootDatabase::default();
-        let change_fixture = ChangeFixture::parse(&database, ra_fixture);
+        let change_fixture = ChangeFixture::parse(ra_fixture);
         database.apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ($0)");
         let offset = range_or_offset.expect_offset();
-        let position = FilePosition { file_id: file_id.file_id(&database), offset };
+        let position = FilePosition { file_id: file_id.file_id(), offset };
         (database, position)
     }
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index 052de0f38..0cf2e15 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -10,7 +10,6 @@
     documentation::Documentation,
     famous_defs::FamousDefs,
 };
-use span::Edition;
 use syntax::{AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T, TextRange};
 
 use crate::navigation_target::UpmappingResult;
@@ -42,9 +41,18 @@ pub struct ReferenceData {
 
 #[derive(Debug)]
 pub struct TokenStaticData {
-    pub documentation: Option<Documentation>,
+    // FIXME: Make this have the lifetime of the database.
+    pub documentation: Option<Documentation<'static>>,
     pub hover: Option<HoverResult>,
+    /// The position of the token itself.
+    ///
+    /// For example, in `fn foo() {}` this is the position of `foo`.
     pub definition: Option<FileRange>,
+    /// The position of the entire definition that this token belongs to.
+    ///
+    /// For example, in `fn foo() {}` this is the position from `fn`
+    /// to the closing brace.
+    pub definition_body: Option<FileRange>,
     pub references: Vec<ReferenceData>,
     pub moniker: Option<MonikerResult>,
     pub display_name: Option<String>,
@@ -109,7 +117,7 @@ fn documentation_for_definition(
     sema: &Semantics<'_, RootDatabase>,
     def: Definition,
     scope_node: &SyntaxNode,
-) -> Option<Documentation> {
+) -> Option<Documentation<'static>> {
     let famous_defs = match &def {
         Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())),
         _ => None,
@@ -124,6 +132,7 @@ fn documentation_for_definition(
             })
             .to_display_target(sema.db),
     )
+    .map(Documentation::into_owned)
 }
 
 // FIXME: This is a weird function
@@ -194,10 +203,7 @@ fn add_file(&mut self, file_id: FileId) {
         // hovers
         let sema = hir::Semantics::new(self.db);
         let root = sema.parse_guess_edition(file_id).syntax().clone();
-        let edition = sema
-            .attach_first_edition(file_id)
-            .map(|it| it.edition(self.db))
-            .unwrap_or(Edition::CURRENT);
+        let edition = sema.attach_first_edition(file_id).edition(sema.db);
         let display_target = match sema.first_crate(file_id) {
             Some(krate) => krate.to_display_target(sema.db),
             None => return,
@@ -248,6 +254,10 @@ fn add_file(&mut self, file_id: FileId) {
                     definition: def.try_to_nav(&sema).map(UpmappingResult::call_site).map(|it| {
                         FileRange { file_id: it.file_id, range: it.focus_or_full_range() }
                     }),
+                    definition_body: def
+                        .try_to_nav(&sema)
+                        .map(UpmappingResult::call_site)
+                        .map(|it| FileRange { file_id: it.file_id, range: it.full_range }),
                     references: vec![],
                     moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
                     display_name: def
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index 531c7e1..fd5ede8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -197,9 +197,7 @@ pub(crate) fn highlight(
 ) -> Vec<HlRange> {
     let _p = tracing::info_span!("highlight").entered();
     let sema = Semantics::new(db);
-    let file_id = sema
-        .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+    let file_id = sema.attach_first_edition(file_id);
 
     // Determine the root based on the given range.
     let (root, range_to_highlight) = {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index 33df4a8..8937f82 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -3,7 +3,7 @@
 use std::ops::ControlFlow;
 
 use either::Either;
-use hir::{AsAssocItem, HasAttrs, HasVisibility, Semantics, sym};
+use hir::{AsAssocItem, HasAttrs, HasVisibility, Semantics};
 use ide_db::{
     FxHashMap, RootDatabase, SymbolKind,
     defs::{Definition, IdentClass, NameClass, NameRefClass},
@@ -413,7 +413,7 @@ fn highlight_name_ref(
             if is_from_builtin_crate {
                 h |= HlMod::DefaultLibrary;
             }
-            let is_deprecated = resolved_krate.attrs(sema.db).by_key(sym::deprecated).exists();
+            let is_deprecated = resolved_krate.attrs(sema.db).is_deprecated();
             if is_deprecated {
                 h |= HlMod::Deprecated;
             }
@@ -701,7 +701,7 @@ pub(super) fn highlight_def(
     }
 
     if let Some(attrs) = attrs
-        && attrs.by_key(sym::deprecated).exists()
+        && attrs.is_deprecated()
     {
         h |= HlMod::Deprecated;
     }
@@ -751,7 +751,7 @@ fn highlight_method_call(
     let is_from_other_crate = krate.as_ref().map_or(false, |krate| def_crate != *krate);
     let is_from_builtin_crate = def_crate.is_builtin(sema.db);
     let is_public = func.visibility(sema.db) == hir::Visibility::Public;
-    let is_deprecated = func.attrs(sema.db).by_key(sym::deprecated).exists();
+    let is_deprecated = func.attrs(sema.db).is_deprecated();
 
     if is_from_other_crate {
         h |= HlMod::Library;
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
index ff617b3..74567e8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
@@ -1,6 +1,6 @@
 //! Renders a bit of code as HTML.
 
-use hir::{EditionedFileId, Semantics};
+use hir::Semantics;
 use ide_db::MiniCore;
 use oorandom::Rand32;
 use stdx::format_to;
@@ -18,9 +18,7 @@ pub(crate) fn highlight_as_html_with_config(
     rainbow: bool,
 ) -> String {
     let sema = Semantics::new(db);
-    let file_id = sema
-        .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+    let file_id = sema.attach_first_edition(file_id);
     let file = sema.parse(file_id);
     let file = file.syntax();
     fn rainbowify(seed: u64) -> String {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
index 7955f5a..26d2bb5 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
@@ -1,16 +1,13 @@
 //! "Recursive" Syntax highlighting for code in doctests and fixtures.
 
-use std::mem;
-
-use either::Either;
-use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym};
-use ide_db::range_mapper::RangeMapper;
+use hir::{EditionedFileId, HirFileId, InFile, Semantics};
 use ide_db::{
-    SymbolKind, defs::Definition, documentation::docs_with_rangemap, rust_doc::is_rust_fence,
+    SymbolKind, defs::Definition, documentation::Documentation, range_mapper::RangeMapper,
+    rust_doc::is_rust_fence,
 };
 use syntax::{
-    AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize,
-    ast::{self, AstNode, IsString, QuoteOffsets},
+    SyntaxNode, TextRange, TextSize,
+    ast::{self, IsString},
 };
 
 use crate::{
@@ -96,118 +93,79 @@ pub(super) fn doc_comment(
         None => return,
     };
     let src_file_id: HirFileId = src_file_id.into();
+    let Some(docs) = attributes.hir_docs(sema.db) else { return };
 
     // Extract intra-doc links and emit highlights for them.
-    if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) {
-        extract_definitions_from_docs(&docs)
-            .into_iter()
-            .filter_map(|(range, link, ns)| {
-                doc_mapping
-                    .map(range)
-                    .filter(|(mapping, _)| mapping.file_id == src_file_id)
-                    .and_then(|(InFile { value: mapped_range, .. }, attr_id)| {
-                        Some(mapped_range).zip(resolve_doc_path_for_def(
-                            sema.db,
-                            def,
-                            &link,
-                            ns,
-                            attr_id.is_inner_attr(),
-                        ))
-                    })
-            })
-            .for_each(|(range, def)| {
-                hl.add(HlRange {
-                    range,
-                    highlight: module_def_to_hl_tag(def)
-                        | HlMod::Documentation
-                        | HlMod::Injected
-                        | HlMod::IntraDocLink,
-                    binding_hash: None,
+    extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs()))
+        .into_iter()
+        .filter_map(|(range, link, ns)| {
+            docs.find_ast_range(range)
+                .filter(|(mapping, _)| mapping.file_id == src_file_id)
+                .and_then(|(InFile { value: mapped_range, .. }, is_inner)| {
+                    Some(mapped_range)
+                        .zip(resolve_doc_path_for_def(sema.db, def, &link, ns, is_inner))
                 })
+        })
+        .for_each(|(range, def)| {
+            hl.add(HlRange {
+                range,
+                highlight: module_def_to_hl_tag(def)
+                    | HlMod::Documentation
+                    | HlMod::Injected
+                    | HlMod::IntraDocLink,
+                binding_hash: None,
             })
-    }
+        });
 
     // Extract doc-test sources from the docs and calculate highlighting for them.
 
     let mut inj = RangeMapper::default();
     inj.add_unmapped("fn doctest() {\n");
 
-    let attrs_source_map = attributes.source_map(sema.db);
-
     let mut is_codeblock = false;
     let mut is_doctest = false;
 
-    let mut new_comments = Vec::new();
-    let mut string;
+    let mut has_doctests = false;
 
-    for attr in attributes.by_key(sym::doc).attrs() {
-        let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
+    let mut docs_offset = TextSize::new(0);
+    for mut line in docs.docs().split('\n') {
+        let mut line_docs_offset = docs_offset;
+        docs_offset += TextSize::of(line) + TextSize::of("\n");
+
+        match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) {
+            Some(idx) => {
+                is_codeblock = !is_codeblock;
+                // Check whether code is rust by inspecting fence guards
+                let guards = &line[idx + RUSTDOC_FENCE_LENGTH..];
+                let is_rust = is_rust_fence(guards);
+                is_doctest = is_codeblock && is_rust;
+                continue;
+            }
+            None if !is_doctest => continue,
+            None => (),
+        }
+
+        // lines marked with `#` should be ignored in output, we skip the `#` char
+        if line.starts_with('#') {
+            line_docs_offset += TextSize::of("#");
+            line = &line["#".len()..];
+        }
+
+        let Some((InFile { file_id, value: mapped_range }, _)) =
+            docs.find_ast_range(TextRange::at(line_docs_offset, TextSize::of(line)))
+        else {
+            continue;
+        };
         if file_id != src_file_id {
             continue;
         }
-        let (line, range) = match &src {
-            Either::Left(it) => {
-                string = match find_doc_string_in_attr(attr, it) {
-                    Some(it) => it,
-                    None => continue,
-                };
-                let text = string.text();
-                let text_range = string.syntax().text_range();
-                match string.quote_offsets() {
-                    Some(QuoteOffsets { contents, .. }) => {
-                        (&text[contents - text_range.start()], contents)
-                    }
-                    None => (text, text_range),
-                }
-            }
-            Either::Right(comment) => {
-                let value = comment.prefix().len();
-                let range = comment.syntax().text_range();
-                (
-                    &comment.text()[value..],
-                    TextRange::new(range.start() + TextSize::try_from(value).unwrap(), range.end()),
-                )
-            }
-        };
 
-        let mut range_start = range.start();
-        for line in line.split('\n') {
-            let line_len = TextSize::from(line.len() as u32);
-            let prev_range_start = {
-                let next_range_start = range_start + line_len + TextSize::from(1);
-                mem::replace(&mut range_start, next_range_start)
-            };
-            let mut pos = TextSize::from(0);
-
-            match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) {
-                Some(idx) => {
-                    is_codeblock = !is_codeblock;
-                    // Check whether code is rust by inspecting fence guards
-                    let guards = &line[idx + RUSTDOC_FENCE_LENGTH..];
-                    let is_rust = is_rust_fence(guards);
-                    is_doctest = is_codeblock && is_rust;
-                    continue;
-                }
-                None if !is_doctest => continue,
-                None => (),
-            }
-
-            // whitespace after comment is ignored
-            if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) {
-                pos += TextSize::of(ws);
-            }
-            // lines marked with `#` should be ignored in output, we skip the `#` char
-            if line[pos.into()..].starts_with('#') {
-                pos += TextSize::of('#');
-            }
-
-            new_comments.push(TextRange::at(prev_range_start, pos));
-            inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start);
-            inj.add_unmapped("\n");
-        }
+        has_doctests = true;
+        inj.add(line, mapped_range);
+        inj.add_unmapped("\n");
     }
 
-    if new_comments.is_empty() {
+    if !has_doctests {
         return; // no need to run an analysis on an empty file
     }
 
@@ -240,37 +198,6 @@ pub(super) fn doc_comment(
             }
         }
     }
-
-    for range in new_comments {
-        hl.add(HlRange {
-            range,
-            highlight: HlTag::Comment | HlMod::Documentation,
-            binding_hash: None,
-        });
-    }
-}
-
-fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::String> {
-    match it.expr() {
-        // #[doc = lit]
-        Some(ast::Expr::Literal(lit)) => match lit.kind() {
-            ast::LiteralKind::String(it) => Some(it),
-            _ => None,
-        },
-        // #[cfg_attr(..., doc = "", ...)]
-        None => {
-            // We gotta hunt the string token manually here
-            let text = attr.string_value()?.as_str();
-            // FIXME: We just pick the first string literal that has the same text as the doc attribute
-            // This means technically we might highlight the wrong one
-            it.syntax()
-                .descendants_with_tokens()
-                .filter_map(NodeOrToken::into_token)
-                .filter_map(ast::String::cast)
-                .find(|string| string.text().get(1..string.text().len() - 1) == Some(text))
-        }
-        _ => None,
-    }
 }
 
 fn module_def_to_hl_tag(def: Definition) -> HlTag {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index e1c45e9..b5c3df6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -43,21 +43,21 @@
 </style>
 <pre><code><span class="comment documentation">//! This is a module to test doc injection.</span>
 <span class="comment documentation">//! ```</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="comment documentation">//! </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
 <span class="comment documentation">//! ```</span>
 
 <span class="comment documentation">//! Syntactic name ref highlighting testing</span>
 <span class="comment documentation">//! ```rust</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword crate_root injected">self</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">other</span><span class="none injected"> </span><span class="keyword injected">as</span><span class="none injected"> </span><span class="module crate_root declaration injected">otter</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">core</span><span class="semicolon injected">;</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">trait</span><span class="none injected"> </span><span class="trait declaration injected">T</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="keyword injected">type</span><span class="none injected"> </span><span class="type_alias associated declaration injected static trait">Assoc</span><span class="semicolon injected">;</span><span class="none injected"> </span><span class="brace injected">}</span>
-<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">f</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">Arg</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">-&gt;</span><span class="none injected"> </span><span class="keyword injected">use</span><span class="angle injected">&lt;</span><span class="struct injected">Arg</span><span class="angle injected">&gt;</span><span class="none injected"> </span><span class="keyword injected">where</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="colon injected">:</span><span class="none injected"> </span><span class="trait injected">T</span><span class="comparison injected">&lt;</span><span class="struct injected">Assoc</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="comparison injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword crate_root injected">self</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">other</span><span class="none injected"> </span><span class="keyword injected">as</span><span class="none injected"> </span><span class="module crate_root declaration injected">otter</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">core</span><span class="semicolon injected">;</span>
+<span class="comment documentation">//! </span><span class="keyword injected">trait</span><span class="none injected"> </span><span class="trait declaration injected">T</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="keyword injected">type</span><span class="none injected"> </span><span class="type_alias associated declaration injected static trait">Assoc</span><span class="semicolon injected">;</span><span class="none injected"> </span><span class="brace injected">}</span>
+<span class="comment documentation">//! </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">f</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">Arg</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">-&gt;</span><span class="none injected"> </span><span class="keyword injected">use</span><span class="angle injected">&lt;</span><span class="struct injected">Arg</span><span class="angle injected">&gt;</span><span class="none injected"> </span><span class="keyword injected">where</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="colon injected">:</span><span class="none injected"> </span><span class="trait injected">T</span><span class="comparison injected">&lt;</span><span class="struct injected">Assoc</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="comparison injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
 <span class="comment documentation">//! ```</span>
 <span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span>
 
 <span class="comment documentation">/// ```</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span>
 <span class="comment documentation">/// ```</span>
 <span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
     <span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span>
@@ -66,15 +66,15 @@
 <span class="comment documentation">/// This is an impl of </span><span class="struct documentation injected intra_doc_link">[`Foo`]</span><span class="comment documentation"> with a code block.</span>
 <span class="comment documentation">///</span>
 <span class="comment documentation">/// ```</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
+<span class="comment documentation">/// </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
 <span class="comment documentation">///</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
+<span class="comment documentation">/// </span><span class="brace injected">}</span>
 <span class="comment documentation">/// ```</span>
 <span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span>
     <span class="comment documentation">/// ```</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span>
+    <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span>
     <span class="comment">//    KILLER WHALE</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected">    Ishmael."</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// </span><span class="string_literal injected">    Ishmael."</span><span class="semicolon injected">;</span>
     <span class="comment documentation">/// ```</span>
     <span class="keyword">pub</span> <span class="keyword const">const</span> <span class="constant associated const declaration public static">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span> <span class="operator">=</span> <span class="bool_literal">true</span><span class="semicolon">;</span>
 
@@ -83,8 +83,8 @@
     <span class="comment documentation">/// # Examples</span>
     <span class="comment documentation">///</span>
     <span class="comment documentation">/// ```</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> #</span><span class="none injected"> </span><span class="attribute_bracket attribute injected">#</span><span class="attribute_bracket attribute injected">!</span><span class="attribute_bracket attribute injected">[</span><span class="builtin_attr attribute injected">allow</span><span class="parenthesis attribute injected">(</span><span class="none attribute injected">unused_mut</span><span class="parenthesis attribute injected">)</span><span class="attribute_bracket attribute injected">]</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="keyword injected">mut</span><span class="none injected"> </span><span class="variable declaration injected mutable">foo</span><span class="colon injected">:</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// #</span><span class="none injected"> </span><span class="attribute_bracket attribute injected">#</span><span class="attribute_bracket attribute injected">!</span><span class="attribute_bracket attribute injected">[</span><span class="builtin_attr attribute injected">allow</span><span class="parenthesis attribute injected">(</span><span class="none attribute injected">unused_mut</span><span class="parenthesis attribute injected">)</span><span class="attribute_bracket attribute injected">]</span>
+    <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="keyword injected">mut</span><span class="none injected"> </span><span class="variable declaration injected mutable">foo</span><span class="colon injected">:</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
     <span class="comment documentation">/// ```</span>
     <span class="keyword">pub</span> <span class="keyword const">const</span> <span class="keyword">fn</span> <span class="function associated const declaration public static">new</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="struct">Foo</span> <span class="brace">{</span>
         <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">bar</span><span class="colon">:</span> <span class="bool_literal">true</span> <span class="brace">}</span>
@@ -95,38 +95,38 @@
     <span class="comment documentation">/// # Examples</span>
     <span class="comment documentation">///</span>
     <span class="comment documentation">/// ```</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">use</span><span class="none injected"> </span><span class="module injected">x</span><span class="operator injected">::</span><span class="module injected">y</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// </span><span class="keyword injected">use</span><span class="none injected"> </span><span class="module injected">x</span><span class="operator injected">::</span><span class="module injected">y</span><span class="semicolon injected">;</span>
     <span class="comment documentation">///</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
     <span class="comment documentation">///</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// calls bar on foo</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">assert</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="none injected">foo</span><span class="operator injected">.</span><span class="none injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// </span><span class="comment injected">// calls bar on foo</span>
+    <span class="comment documentation">/// </span><span class="macro injected">assert</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="none injected">foo</span><span class="operator injected">.</span><span class="none injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
     <span class="comment documentation">///</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">bar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="variable injected">foo</span><span class="operator injected">.</span><span class="field injected">bar</span><span class="none injected"> </span><span class="logical injected">||</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="constant injected">bar</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">bar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="variable injected">foo</span><span class="operator injected">.</span><span class="field injected">bar</span><span class="none injected"> </span><span class="logical injected">||</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="constant injected">bar</span><span class="semicolon injected">;</span>
     <span class="comment documentation">///</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">/* multi-line</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">       comment */</span>
+    <span class="comment documentation">/// </span><span class="comment injected">/* multi-line</span>
+    <span class="comment documentation">/// </span><span class="comment injected">       comment */</span>
     <span class="comment documentation">///</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected reference">multi_line_string</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Foo</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected">  bar</span><span class="escape_sequence injected">\n</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected">         "</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected reference">multi_line_string</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Foo</span>
+    <span class="comment documentation">/// </span><span class="string_literal injected">  bar</span><span class="escape_sequence injected">\n</span>
+    <span class="comment documentation">/// </span><span class="string_literal injected">         "</span><span class="semicolon injected">;</span>
     <span class="comment documentation">///</span>
     <span class="comment documentation">/// ```</span>
     <span class="comment documentation">///</span>
     <span class="comment documentation">/// ```rust,no_run</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="method injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="method injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
     <span class="comment documentation">/// ```</span>
     <span class="comment documentation">///</span>
     <span class="comment documentation">/// ~~~rust,no_run</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// code block with tilde.</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="method injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// </span><span class="comment injected">// code block with tilde.</span>
+    <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="method injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
     <span class="comment documentation">/// ~~~</span>
     <span class="comment documentation">///</span>
     <span class="comment documentation">/// ```</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// functions</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">T</span><span class="comma injected">,</span><span class="none injected"> </span><span class="keyword const injected">const</span><span class="none injected"> </span><span class="const_param const declaration injected">X</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">usize</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="value_param declaration injected">arg</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">i32</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="none injected">    </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">x</span><span class="colon injected">:</span><span class="none injected"> </span><span class="type_param injected">T</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="const_param const injected">X</span><span class="semicolon injected">;</span>
-    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
+    <span class="comment documentation">/// </span><span class="comment injected">// functions</span>
+    <span class="comment documentation">/// </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">T</span><span class="comma injected">,</span><span class="none injected"> </span><span class="keyword const injected">const</span><span class="none injected"> </span><span class="const_param const declaration injected">X</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">usize</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="value_param declaration injected">arg</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">i32</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
+    <span class="comment documentation">/// </span><span class="none injected">    </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">x</span><span class="colon injected">:</span><span class="none injected"> </span><span class="type_param injected">T</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="const_param const injected">X</span><span class="semicolon injected">;</span>
+    <span class="comment documentation">/// </span><span class="brace injected">}</span>
     <span class="comment documentation">/// ```</span>
     <span class="comment documentation">///</span>
     <span class="comment documentation">/// ```sh</span>
@@ -151,8 +151,8 @@
 <span class="brace">}</span>
 
 <span class="comment documentation">/// ```</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">macro_rules</span><span class="macro_bang injected">!</span><span class="none injected"> </span><span class="macro declaration injected public">noop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="colon injected">:</span><span class="none injected">expr</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">=</span><span class="operator injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="none injected"> </span><span class="brace injected">}</span><span class="brace injected">}</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected public">noop</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="numeric_literal injected macro">1</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">/// </span><span class="keyword injected">macro_rules</span><span class="macro_bang injected">!</span><span class="none injected"> </span><span class="macro declaration injected public">noop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="colon injected">:</span><span class="none injected">expr</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">=</span><span class="operator injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="none injected"> </span><span class="brace injected">}</span><span class="brace injected">}</span>
+<span class="comment documentation">/// </span><span class="macro injected public">noop</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="numeric_literal injected macro">1</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 <span class="comment documentation">/// ```</span>
 <span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration public">noop</span> <span class="brace">{</span>
     <span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="operator">&gt;</span> <span class="brace">{</span>
@@ -161,18 +161,18 @@
 <span class="brace">}</span>
 
 <span class="comment documentation">/// ```rust</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">///</span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 <span class="comment documentation">/// ```</span>
 <span class="comment documentation">///</span>
 <span class="comment documentation">/// ```</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
+<span class="comment documentation">///</span><span class="none injected"> </span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
 <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"false"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
 <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="attribute_bracket attribute">]</span>
 <span class="comment documentation">/// ```</span>
 <span class="comment documentation">///</span>
 <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```rust"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
 <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```ignore"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
-<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="module injected">alloc</span><span class="operator injected">::</span><span class="macro injected">vec</span><span class="macro_bang injected">!</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
+<span class="comment documentation">///</span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="module injected">alloc</span><span class="operator injected">::</span><span class="macro injected">vec</span><span class="macro_bang injected">!</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 <span class="comment documentation">/// ```</span>
 <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">mix_and_match</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
 
@@ -188,7 +188,7 @@
 <span class="comment documentation">/**
     Really, I don't get it
     ```rust
-</span><span class="comment documentation"> </span><span class="none injected">   </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
+    </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
     ```
     </span><span class="function documentation injected intra_doc_link">[`block_comments`]</span><span class="comment documentation"> tests these without indentation
 */</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
index ed55ac5..0381865 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -75,7 +75,10 @@ pub(crate) fn on_char_typed(
     // FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
     // causing the editor to feel sluggish!
     let edition = Edition::CURRENT_FIXME;
-    let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
+    let editioned_file_id_wrapper = EditionedFileId::from_span_guess_origin(
+        db,
+        span::EditionedFileId::new(position.file_id, edition),
+    );
     let file = &db.parse(editioned_file_id_wrapper);
     let char_matches_position =
         file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
index fdc583a..76a2802 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
@@ -51,7 +51,7 @@
 // ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
 pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
     let editioned_file_id_wrapper =
-        ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
+        ide_db::base_db::EditionedFileId::current_edition_guess_origin(db, position.file_id);
     let parse = db.parse(editioned_file_id_wrapper);
     let file = parse.tree();
     let token = file.syntax().token_at_offset(position.offset).left_biased()?;
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
index 2cd7514..e1a7e4e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
@@ -1,4 +1,4 @@
-use hir::{EditionedFileId, Semantics, db::DefDatabase};
+use hir::{Semantics, db::DefDatabase};
 use ide_db::{FileId, RootDatabase};
 
 // Feature: Debug ItemTree
@@ -10,8 +10,6 @@
 // | VS Code | **rust-analyzer: Debug ItemTree** |
 pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
     let sema = Semantics::new(db);
-    let file_id = sema
-        .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+    let file_id = sema.attach_first_edition(file_id);
     db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db))
 }
diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
index 9e262c3..b6e55b9 100644
--- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
@@ -18,6 +18,7 @@
 smallvec.workspace = true
 arrayvec.workspace = true
 ra-ap-rustc_lexer.workspace = true
+salsa.workspace = true
 bitflags.workspace = true
 
 # local deps
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
index 9e4b78c..ffe3bdd 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -2,7 +2,7 @@
 
 use intern::Symbol;
 use rustc_hash::FxHashMap;
-use span::{Edition, Span};
+use span::Span;
 use stdx::itertools::Itertools;
 use syntax::{
     AstNode,
@@ -44,16 +44,16 @@ fn benchmark_expand_macro_rules() {
     if skip_slow_tests() {
         return;
     }
+    let db = salsa::DatabaseImpl::default();
     let rules = macro_rules_fixtures();
-    let invocations = invocation_fixtures(&rules);
+    let invocations = invocation_fixtures(&db, &rules);
 
     let hash: usize = {
         let _pt = bench("mbe expand macro rules");
         invocations
             .into_iter()
             .map(|(id, tt)| {
-                let res =
-                    rules[&id].expand(&tt, |_| (), MacroCallStyle::FnLike, DUMMY, Edition::CURRENT);
+                let res = rules[&id].expand(&db, &tt, |_| (), MacroCallStyle::FnLike, DUMMY);
                 assert!(res.err.is_none());
                 res.value.0.0.len()
             })
@@ -93,6 +93,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::TopSubtree<Span>> {
 
 /// Generate random invocation fixtures from rules
 fn invocation_fixtures(
+    db: &dyn salsa::Database,
     rules: &FxHashMap<String, DeclarativeMacro>,
 ) -> Vec<(String, tt::TopSubtree<Span>)> {
     let mut seed = 123456789;
@@ -124,10 +125,7 @@ fn invocation_fixtures(
                     }
                     let subtree = builder.build();
 
-                    if it
-                        .expand(&subtree, |_| (), MacroCallStyle::FnLike, DUMMY, Edition::CURRENT)
-                        .err
-                        .is_none()
+                    if it.expand(db, &subtree, |_| (), MacroCallStyle::FnLike, DUMMY).err.is_none()
                     {
                         res.push((name.clone(), subtree));
                         break;
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
index 5074021..6510fef 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -7,7 +7,7 @@
 
 use intern::Symbol;
 use rustc_hash::FxHashMap;
-use span::{Edition, Span};
+use span::Span;
 
 use crate::{
     ExpandError, ExpandErrorKind, ExpandResult, MacroCallStyle, MatchedArmIndex,
@@ -15,12 +15,12 @@
 };
 
 pub(crate) fn expand_rules(
+    db: &dyn salsa::Database,
     rules: &[crate::Rule],
     input: &tt::TopSubtree<Span>,
     marker: impl Fn(&mut Span) + Copy,
     call_style: MacroCallStyle,
     call_site: Span,
-    def_site_edition: Edition,
 ) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
     let mut match_: Option<(matcher::Match<'_>, &crate::Rule, usize)> = None;
     for (idx, rule) in rules.iter().enumerate() {
@@ -29,7 +29,7 @@ pub(crate) fn expand_rules(
             continue;
         }
 
-        let new_match = matcher::match_(&rule.lhs, input, def_site_edition);
+        let new_match = matcher::match_(db, &rule.lhs, input);
 
         if new_match.err.is_none() {
             // If we find a rule that applies without errors, we're done.
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
index 189efcd..4da8b30 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -63,7 +63,7 @@
 
 use intern::{Symbol, sym};
 use smallvec::{SmallVec, smallvec};
-use span::{Edition, Span};
+use span::Span;
 use tt::{
     DelimSpan,
     iter::{TtElement, TtIter},
@@ -112,11 +112,11 @@ fn add_err(&mut self, err: ExpandError) {
 
 /// Matching errors are added to the `Match`.
 pub(super) fn match_<'t>(
+    db: &dyn salsa::Database,
     pattern: &'t MetaTemplate,
     input: &'t tt::TopSubtree<Span>,
-    edition: Edition,
 ) -> Match<'t> {
-    let mut res = match_loop(pattern, input, edition);
+    let mut res = match_loop(db, pattern, input);
     res.bound_count = count(res.bindings.bindings());
     return res;
 
@@ -365,6 +365,7 @@ struct MatchState<'t> {
 /// - `error_items`: the set of items in errors, used for error-resilient parsing
 #[inline]
 fn match_loop_inner<'t>(
+    db: &dyn salsa::Database,
     src: TtIter<'t, Span>,
     stack: &[TtIter<'t, Span>],
     res: &mut Match<'t>,
@@ -375,7 +376,6 @@ fn match_loop_inner<'t>(
     eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
     error_items: &mut SmallVec<[MatchState<'t>; 1]>,
     delim_span: tt::DelimSpan<Span>,
-    edition: Edition,
 ) {
     macro_rules! try_push {
         ($items: expr, $it:expr) => {
@@ -486,7 +486,7 @@ macro_rules! try_push {
             OpDelimited::Op(Op::Var { kind, name, .. }) => {
                 if let &Some(kind) = kind {
                     let mut fork = src.clone();
-                    let match_res = match_meta_var(kind, &mut fork, delim_span, edition);
+                    let match_res = match_meta_var(db, kind, &mut fork, delim_span);
                     match match_res.err {
                         None => {
                             // Some meta variables are optional (e.g. vis)
@@ -621,9 +621,9 @@ macro_rules! try_push {
 }
 
 fn match_loop<'t>(
+    db: &dyn salsa::Database,
     pattern: &'t MetaTemplate,
     src: &'t tt::TopSubtree<Span>,
-    edition: Edition,
 ) -> Match<'t> {
     let span = src.top_subtree().delimiter.delim_span();
     let mut src = src.iter();
@@ -655,6 +655,7 @@ fn match_loop<'t>(
         stdx::always!(next_items.is_empty());
 
         match_loop_inner(
+            db,
             src.clone(),
             &stack,
             &mut res,
@@ -665,7 +666,6 @@ fn match_loop<'t>(
             &mut eof_items,
             &mut error_items,
             span,
-            edition,
         );
         stdx::always!(cur_items.is_empty());
 
@@ -772,14 +772,14 @@ fn match_loop<'t>(
 }
 
 fn match_meta_var<'t>(
+    db: &dyn salsa::Database,
     kind: MetaVarKind,
     input: &mut TtIter<'t, Span>,
     delim_span: DelimSpan<Span>,
-    edition: Edition,
 ) -> ExpandResult<Fragment<'t>> {
     let fragment = match kind {
         MetaVarKind::Path => {
-            return expect_fragment(input, parser::PrefixEntryPoint::Path, edition, delim_span)
+            return expect_fragment(db, input, parser::PrefixEntryPoint::Path, delim_span)
                 .map(Fragment::Path);
         }
         MetaVarKind::Expr(expr) => {
@@ -807,7 +807,7 @@ fn match_meta_var<'t>(
                 }
                 _ => {}
             };
-            return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition, delim_span)
+            return expect_fragment(db, input, parser::PrefixEntryPoint::Expr, delim_span)
                 .map(Fragment::Expr);
         }
         MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
@@ -853,7 +853,7 @@ fn match_meta_var<'t>(
         MetaVarKind::Item => parser::PrefixEntryPoint::Item,
         MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
     };
-    expect_fragment(input, fragment, edition, delim_span).map(Fragment::Tokens)
+    expect_fragment(db, input, fragment, delim_span).map(Fragment::Tokens)
 }
 
 fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
index 843c288..1193c42 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -275,13 +275,13 @@ pub fn rule_styles(&self) -> MacroCallStyles {
 
     pub fn expand(
         &self,
+        db: &dyn salsa::Database,
         tt: &tt::TopSubtree<Span>,
         marker: impl Fn(&mut Span) + Copy,
         call_style: MacroCallStyle,
         call_site: Span,
-        def_site_edition: Edition,
     ) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
-        expander::expand_rules(&self.rules, tt, marker, call_style, call_site, def_site_edition)
+        expander::expand_rules(db, &self.rules, tt, marker, call_style, call_site)
     }
 }
 
@@ -390,16 +390,15 @@ fn from(result: Result<T, E>) -> Self {
 }
 
 pub fn expect_fragment<'t>(
+    db: &dyn salsa::Database,
     tt_iter: &mut TtIter<'t, Span>,
     entry_point: ::parser::PrefixEntryPoint,
-    edition: ::parser::Edition,
     delim_span: DelimSpan<Span>,
 ) -> ExpandResult<tt::TokenTreesView<'t, Span>> {
     use ::parser;
     let buffer = tt_iter.remaining();
-    // FIXME: Pass the correct edition per token. Due to the split between mbe and hir-expand it's complicated.
-    let parser_input = to_parser_input(buffer, &mut |_ctx| edition);
-    let tree_traversal = entry_point.parse(&parser_input, edition);
+    let parser_input = to_parser_input(buffer, &mut |ctx| ctx.edition(db));
+    let tree_traversal = entry_point.parse(&parser_input);
     let mut cursor = buffer.cursor();
     let mut error = false;
     for step in tree_traversal.iter() {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tests.rs b/src/tools/rust-analyzer/crates/mbe/src/tests.rs
index 110a266..4a1af31 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tests.rs
@@ -22,6 +22,7 @@ fn check_(
     expect: expect_test::Expect,
     parse: parser::TopEntryPoint,
 ) {
+    let db = salsa::DatabaseImpl::default();
     let decl_tt = &syntax_bridge::parse_to_token_tree(
         def_edition,
         SpanAnchor {
@@ -49,6 +50,7 @@ fn check_(
     )
     .unwrap();
     let res = mac.expand(
+        &db,
         &arg_tt,
         |_| (),
         crate::MacroCallStyle::FnLike,
@@ -57,7 +59,6 @@ fn check_(
             anchor: call_anchor,
             ctx: SyntaxContext::root(Edition::CURRENT),
         },
-        def_edition,
     );
     let mut expect_res = String::new();
     if let Some(err) = res.err {
@@ -66,12 +67,8 @@ fn check_(
     if render_debug {
         format_to!(expect_res, "{:#?}\n\n", res.value.0);
     }
-    let (node, _) = syntax_bridge::token_tree_to_syntax_node(
-        &res.value.0,
-        parse,
-        &mut |_| def_edition,
-        def_edition,
-    );
+    let (node, _) =
+        syntax_bridge::token_tree_to_syntax_node(&res.value.0, parse, &mut |_| def_edition);
     format_to!(
         expect_res,
         "{}",
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
index b9d5bff..8f74acd1 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
@@ -59,7 +59,7 @@ pub(crate) fn generic_arg(p: &mut Parser<'_>) -> bool {
 
         // test edition_2015_dyn_prefix_inside_generic_arg 2015
         // type A = Foo<dyn T>;
-        T![ident] if !p.edition().at_least_2018() && types::is_dyn_weak(p) => type_arg(p),
+        T![ident] if !p.current_edition().at_least_2018() && types::is_dyn_weak(p) => type_arg(p),
         // test macro_inside_generic_arg
         // type A = Foo<syn::Token![_]>;
         k if PATH_NAME_REF_KINDS.contains(k) => {
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs
index a7e97c5..c62356d 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs
@@ -54,7 +54,9 @@ fn type_with_bounds_cond(p: &mut Parser<'_>, allow_bounds: bool) {
         T![dyn] => dyn_trait_type(p),
         // Some path types are not allowed to have bounds (no plus)
         T![<] => path_type_bounds(p, allow_bounds),
-        T![ident] if !p.edition().at_least_2018() && is_dyn_weak(p) => dyn_trait_type_weak(p),
+        T![ident] if !p.current_edition().at_least_2018() && is_dyn_weak(p) => {
+            dyn_trait_type_weak(p)
+        }
         _ if paths::is_path_start(p) => path_or_macro_type(p, allow_bounds),
         LIFETIME_IDENT if p.nth_at(1, T![+]) => bare_dyn_trait_type(p),
         _ => {
diff --git a/src/tools/rust-analyzer/crates/parser/src/input.rs b/src/tools/rust-analyzer/crates/parser/src/input.rs
index 331bc58..57eeb43 100644
--- a/src/tools/rust-analyzer/crates/parser/src/input.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/input.rs
@@ -1,5 +1,7 @@
 //! See [`Input`].
 
+use edition::Edition;
+
 use crate::SyntaxKind;
 
 #[allow(non_camel_case_types)]
@@ -16,6 +18,7 @@ pub struct Input {
     kind: Vec<SyntaxKind>,
     joint: Vec<bits>,
     contextual_kind: Vec<SyntaxKind>,
+    edition: Vec<Edition>,
 }
 
 /// `pub` impl used by callers to create `Tokens`.
@@ -26,15 +29,16 @@ pub fn with_capacity(capacity: usize) -> Self {
             kind: Vec::with_capacity(capacity),
             joint: Vec::with_capacity(capacity / size_of::<bits>()),
             contextual_kind: Vec::with_capacity(capacity),
+            edition: Vec::with_capacity(capacity),
         }
     }
     #[inline]
-    pub fn push(&mut self, kind: SyntaxKind) {
-        self.push_impl(kind, SyntaxKind::EOF)
+    pub fn push(&mut self, kind: SyntaxKind, edition: Edition) {
+        self.push_impl(kind, SyntaxKind::EOF, edition)
     }
     #[inline]
-    pub fn push_ident(&mut self, contextual_kind: SyntaxKind) {
-        self.push_impl(SyntaxKind::IDENT, contextual_kind)
+    pub fn push_ident(&mut self, contextual_kind: SyntaxKind, edition: Edition) {
+        self.push_impl(SyntaxKind::IDENT, contextual_kind, edition)
     }
     /// Sets jointness for the last token we've pushed.
     ///
@@ -59,13 +63,14 @@ pub fn was_joint(&mut self) {
         self.joint[idx] |= 1 << b_idx;
     }
     #[inline]
-    fn push_impl(&mut self, kind: SyntaxKind, contextual_kind: SyntaxKind) {
+    fn push_impl(&mut self, kind: SyntaxKind, contextual_kind: SyntaxKind, edition: Edition) {
         let idx = self.len();
         if idx.is_multiple_of(bits::BITS as usize) {
             self.joint.push(0);
         }
         self.kind.push(kind);
         self.contextual_kind.push(contextual_kind);
+        self.edition.push(edition);
     }
 }
 
@@ -77,6 +82,9 @@ pub(crate) fn kind(&self, idx: usize) -> SyntaxKind {
     pub(crate) fn contextual_kind(&self, idx: usize) -> SyntaxKind {
         self.contextual_kind.get(idx).copied().unwrap_or(SyntaxKind::EOF)
     }
+    pub(crate) fn edition(&self, idx: usize) -> Edition {
+        self.edition[idx]
+    }
     pub(crate) fn is_joint(&self, n: usize) -> bool {
         let (idx, b_idx) = self.bit_index(n);
         self.joint[idx] & (1 << b_idx) != 0
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
index 53444ef..3dd2c1f 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -88,7 +88,7 @@ pub enum TopEntryPoint {
 }
 
 impl TopEntryPoint {
-    pub fn parse(&self, input: &Input, edition: Edition) -> Output {
+    pub fn parse(&self, input: &Input) -> Output {
         let _p = tracing::info_span!("TopEntryPoint::parse", ?self).entered();
         let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
             TopEntryPoint::SourceFile => grammar::entry::top::source_file,
@@ -99,7 +99,7 @@ pub fn parse(&self, input: &Input, edition: Edition) -> Output {
             TopEntryPoint::Expr => grammar::entry::top::expr,
             TopEntryPoint::MetaItem => grammar::entry::top::meta_item,
         };
-        let mut p = parser::Parser::new(input, edition);
+        let mut p = parser::Parser::new(input);
         entry_point(&mut p);
         let events = p.finish();
         let res = event::process(events);
@@ -151,7 +151,7 @@ pub enum PrefixEntryPoint {
 }
 
 impl PrefixEntryPoint {
-    pub fn parse(&self, input: &Input, edition: Edition) -> Output {
+    pub fn parse(&self, input: &Input) -> Output {
         let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
             PrefixEntryPoint::Vis => grammar::entry::prefix::vis,
             PrefixEntryPoint::Block => grammar::entry::prefix::block,
@@ -164,7 +164,7 @@ pub fn parse(&self, input: &Input, edition: Edition) -> Output {
             PrefixEntryPoint::Item => grammar::entry::prefix::item,
             PrefixEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
         };
-        let mut p = parser::Parser::new(input, edition);
+        let mut p = parser::Parser::new(input);
         entry_point(&mut p);
         let events = p.finish();
         event::process(events)
@@ -188,9 +188,9 @@ pub fn for_node(
     ///
     /// Tokens must start with `{`, end with `}` and form a valid brace
     /// sequence.
-    pub fn parse(self, tokens: &Input, edition: Edition) -> Output {
+    pub fn parse(self, tokens: &Input) -> Output {
         let Reparser(r) = self;
-        let mut p = parser::Parser::new(tokens, edition);
+        let mut p = parser::Parser::new(tokens);
         r(&mut p);
         let events = p.finish();
         event::process(events)
diff --git a/src/tools/rust-analyzer/crates/parser/src/parser.rs b/src/tools/rust-analyzer/crates/parser/src/parser.rs
index ca02d9f..c41bd59 100644
--- a/src/tools/rust-analyzer/crates/parser/src/parser.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/parser.rs
@@ -26,14 +26,13 @@ pub(crate) struct Parser<'t> {
     pos: usize,
     events: Vec<Event>,
     steps: Cell<u32>,
-    edition: Edition,
 }
 
 const PARSER_STEP_LIMIT: usize = if cfg!(debug_assertions) { 150_000 } else { 15_000_000 };
 
 impl<'t> Parser<'t> {
-    pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
-        Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0), edition }
+    pub(super) fn new(inp: &'t Input) -> Parser<'t> {
+        Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0) }
     }
 
     pub(crate) fn finish(self) -> Vec<Event> {
@@ -291,8 +290,8 @@ fn push_event(&mut self, event: Event) {
         self.events.push(event);
     }
 
-    pub(crate) fn edition(&self) -> Edition {
-        self.edition
+    pub(crate) fn current_edition(&self) -> Edition {
+        self.inp.edition(self.pos)
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
index d5e5139..3c19e02 100644
--- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
@@ -38,12 +38,13 @@ pub fn to_input(&self, edition: Edition) -> crate::Input {
                 res.push_ident(
                     SyntaxKind::from_contextual_keyword(token_text, edition)
                         .unwrap_or(SyntaxKind::IDENT),
+                    edition,
                 )
             } else {
                 if was_joint {
                     res.was_joint();
                 }
-                res.push(kind);
+                res.push(kind, edition);
                 // Tag the token as joint if it is float with a fractional part
                 // we use this jointness to inform the parser about what token split
                 // event to emit when we encounter a float literal in a field access
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests.rs
index 4b19ddc..cec50aa 100644
--- a/src/tools/rust-analyzer/crates/parser/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/tests.rs
@@ -80,7 +80,7 @@ fn parse_err() {
 fn parse(entry: TopEntryPoint, text: &str, edition: Edition) -> (String, bool) {
     let lexed = LexedStr::new(edition, text);
     let input = lexed.to_input(edition);
-    let output = entry.parse(&input, edition);
+    let output = entry.parse(&input);
 
     let mut buf = String::new();
     let mut errors = Vec::new();
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
index e2268ee..9d93a2a 100644
--- a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
@@ -86,7 +86,7 @@ fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
     let input = lexed.to_input(Edition::CURRENT);
 
     let mut n_tokens = 0;
-    for step in entry.parse(&input, Edition::CURRENT).iter() {
+    for step in entry.parse(&input).iter() {
         match step {
             Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
             Step::FloatSplit { .. } => n_tokens += 1,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs
index b0e80de..6803722 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs
@@ -13,6 +13,13 @@
 /// Represents requests sent from the client to the proc-macro-srv.
 #[derive(Debug, Serialize, Deserialize)]
 pub enum Request {
+    // IMPORTANT: Keep his first, otherwise postcard will break as its not a self describing format
+    // As such, this is the only request that needs to be supported across all protocol versions
+    // and by keeping it first, we ensure it always has the same discriminant encoding in postcard
+    /// Performs an API version check between the client and the server.
+    /// Since [`VERSION_CHECK_VERSION`]
+    ApiVersionCheck {},
+
     /// Retrieves a list of macros from a given dynamic library.
     /// Since [`NO_VERSION_CHECK_VERSION`]
     ListMacros { dylib_path: Utf8PathBuf },
@@ -21,10 +28,6 @@ pub enum Request {
     /// Since [`NO_VERSION_CHECK_VERSION`]
     ExpandMacro(Box<ExpandMacro>),
 
-    /// Performs an API version check between the client and the server.
-    /// Since [`VERSION_CHECK_VERSION`]
-    ApiVersionCheck {},
-
     /// Sets server-specific configurations.
     /// Since [`RUST_ANALYZER_SPAN_SUPPORT`]
     SetConfig(ServerConfig),
@@ -44,6 +47,13 @@ pub enum SpanMode {
 /// Represents responses sent from the proc-macro-srv to the client.
 #[derive(Debug, Serialize, Deserialize)]
 pub enum Response {
+    // IMPORTANT: Keep his first, otherwise postcard will break as its not a self describing format
+    // As such, this is the only request that needs to be supported across all protocol versions
+    // and by keeping it first, we ensure it always has the same discriminant encoding in postcard
+    /// Returns the API version supported by the server.
+    /// Since [`NO_VERSION_CHECK_VERSION`]
+    ApiVersionCheck(u32),
+
     /// Returns a list of available macros in a dynamic library.
     /// Since [`NO_VERSION_CHECK_VERSION`]
     ListMacros(Result<Vec<(String, ProcMacroKind)>, String>),
@@ -52,10 +62,6 @@ pub enum Response {
     /// Since [`NO_VERSION_CHECK_VERSION`]
     ExpandMacro(Result<FlatTree, PanicMessage>),
 
-    /// Returns the API version supported by the server.
-    /// Since [`NO_VERSION_CHECK_VERSION`]
-    ApiVersionCheck(u32),
-
     /// Confirms the application of a configuration update.
     /// Since [`RUST_ANALYZER_SPAN_SUPPORT`]
     SetConfig(ServerConfig),
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
index d22e3f1..92e9038 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
@@ -216,16 +216,7 @@ pub fn from_tokenstream(
             text: Vec::new(),
             version,
         };
-        let group = proc_macro_srv::Group {
-            delimiter: proc_macro_srv::Delimiter::None,
-            stream: Some(tokenstream),
-            span: proc_macro_srv::DelimSpan {
-                open: call_site,
-                close: call_site,
-                entire: call_site,
-            },
-        };
-        w.write_tokenstream(&group);
+        w.write_tokenstream(call_site, &tokenstream);
 
         FlatTree {
             subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
@@ -267,16 +258,7 @@ pub fn from_tokenstream_raw<T: SpanTransformer<Table = ()>>(
             text: Vec::new(),
             version,
         };
-        let group = proc_macro_srv::Group {
-            delimiter: proc_macro_srv::Delimiter::None,
-            stream: Some(tokenstream),
-            span: proc_macro_srv::DelimSpan {
-                open: call_site,
-                close: call_site,
-                entire: call_site,
-            },
-        };
-        w.write_tokenstream(&group);
+        w.write_tokenstream(call_site, &tokenstream);
 
         FlatTree {
             subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
@@ -491,7 +473,7 @@ fn span_for_token_id(table: &Self::Table, id: SpanId) -> Self::Span {
 }
 
 struct Writer<'a, 'span, S: SpanTransformer, W> {
-    work: VecDeque<(usize, W)>,
+    work: VecDeque<(usize, usize, W)>,
     string_table: FxHashMap<std::borrow::Cow<'a, str>, u32>,
     span_data_table: &'span mut S::Table,
     version: u32,
@@ -508,14 +490,13 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
     fn write_subtree(&mut self, root: tt::SubtreeView<'a, T::Span>) {
         let subtree = root.top_subtree();
         self.enqueue(subtree, root.iter());
-        while let Some((idx, subtree)) = self.work.pop_front() {
-            self.subtree(idx, subtree);
+        while let Some((idx, len, subtree)) = self.work.pop_front() {
+            self.subtree(idx, len, subtree);
         }
     }
 
-    fn subtree(&mut self, idx: usize, subtree: tt::iter::TtIter<'a, T::Span>) {
+    fn subtree(&mut self, idx: usize, n_tt: usize, subtree: tt::iter::TtIter<'a, T::Span>) {
         let mut first_tt = self.token_tree.len();
-        let n_tt = subtree.clone().count(); // FIXME: `count()` walks over the entire iterator.
         self.token_tree.resize(first_tt + n_tt, !0);
 
         self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
@@ -594,7 +575,8 @@ fn enqueue(
         let close = self.token_id_of(subtree.delimiter.close);
         let delimiter_kind = subtree.delimiter.kind;
         self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
-        self.work.push_back((idx, contents));
+        // FIXME: `count()` walks over the entire iterator.
+        self.work.push_back((idx, contents.clone().count(), contents));
         idx as u32
     }
 }
@@ -624,26 +606,43 @@ pub(crate) fn intern_owned(&mut self, text: String) -> u32 {
 }
 
 #[cfg(feature = "sysroot-abi")]
-impl<'a, T: SpanTransformer> Writer<'a, '_, T, &'a proc_macro_srv::Group<T::Span>> {
-    fn write_tokenstream(&mut self, root: &'a proc_macro_srv::Group<T::Span>) {
-        self.enqueue_group(root);
+impl<'a, T: SpanTransformer>
+    Writer<'a, '_, T, Option<proc_macro_srv::TokenStreamIter<'a, T::Span>>>
+{
+    fn write_tokenstream(
+        &mut self,
+        call_site: T::Span,
+        root: &'a proc_macro_srv::TokenStream<T::Span>,
+    ) {
+        let call_site = self.token_id_of(call_site);
+        self.subtree.push(SubtreeRepr {
+            open: call_site,
+            close: call_site,
+            kind: tt::DelimiterKind::Invisible,
+            tt: [!0, !0],
+        });
+        self.work.push_back((0, root.len(), Some(root.iter())));
 
-        while let Some((idx, group)) = self.work.pop_front() {
-            self.group(idx, group);
+        while let Some((idx, len, group)) = self.work.pop_front() {
+            self.group(idx, len, group);
         }
     }
 
-    fn group(&mut self, idx: usize, group: &'a proc_macro_srv::Group<T::Span>) {
+    fn group(
+        &mut self,
+        idx: usize,
+        n_tt: usize,
+        group: Option<proc_macro_srv::TokenStreamIter<'a, T::Span>>,
+    ) {
         let mut first_tt = self.token_tree.len();
-        let n_tt = group.stream.as_ref().map_or(0, |it| it.len());
         self.token_tree.resize(first_tt + n_tt, !0);
 
         self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
 
-        for tt in group.stream.iter().flat_map(|it| it.iter()) {
+        for tt in group.into_iter().flatten() {
             let idx_tag = match tt {
                 proc_macro_srv::TokenTree::Group(group) => {
-                    let idx = self.enqueue_group(group);
+                    let idx = self.enqueue(group);
                     idx << 2
                 }
                 proc_macro_srv::TokenTree::Literal(lit) => {
@@ -706,7 +705,7 @@ fn group(&mut self, idx: usize, group: &'a proc_macro_srv::Group<T::Span>) {
         }
     }
 
-    fn enqueue_group(&mut self, group: &'a proc_macro_srv::Group<T::Span>) -> u32 {
+    fn enqueue(&mut self, group: &'a proc_macro_srv::Group<T::Span>) -> u32 {
         let idx = self.subtree.len();
         let open = self.token_id_of(group.span.open);
         let close = self.token_id_of(group.span.close);
@@ -717,7 +716,11 @@ fn enqueue_group(&mut self, group: &'a proc_macro_srv::Group<T::Span>) -> u32 {
             proc_macro_srv::Delimiter::None => tt::DelimiterKind::Invisible,
         };
         self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
-        self.work.push_back((idx, group));
+        self.work.push_back((
+            idx,
+            group.stream.as_ref().map_or(0, |stream| stream.len()),
+            group.stream.as_ref().map(|ts| ts.iter()),
+        ));
         idx as u32
     }
 }
@@ -959,9 +962,6 @@ pub(crate) fn read_tokenstream(
             };
             res[i] = Some(g);
         }
-        // FIXME: double check this
-        proc_macro_srv::TokenStream::new(vec![proc_macro_srv::TokenTree::Group(
-            res[0].take().unwrap(),
-        )])
+        res[0].take().unwrap().stream.unwrap_or_default()
     }
 }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
index a96cf2b..c3838a8 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
@@ -56,7 +56,7 @@
 
 pub use crate::bridge::*;
 pub use crate::server_impl::literal_from_str;
-pub use crate::token_stream::{TokenStream, literal_to_string};
+pub use crate::token_stream::{TokenStream, TokenStreamIter, literal_to_string};
 
 #[derive(Copy, Clone, Eq, PartialEq, Debug)]
 pub enum ProcMacroKind {
@@ -198,7 +198,13 @@ impl ProcMacroSrvSpan for SpanId {
     type Server = server_impl::token_id::SpanIdServer;
 
     fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
-        Self::Server { call_site, def_site, mixed_site }
+        Self::Server {
+            call_site,
+            def_site,
+            mixed_site,
+            tracked_env_vars: Default::default(),
+            tracked_paths: Default::default(),
+        }
     }
 }
 impl ProcMacroSrvSpan for Span {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs
index 3814320..5ac263b 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -1,6 +1,9 @@
 //! proc-macro server backend based on [`proc_macro_api::msg::SpanId`] as the backing span.
 //! This backend is rather inflexible, used by RustRover and older rust-analyzer versions.
-use std::ops::{Bound, Range};
+use std::{
+    collections::{HashMap, HashSet},
+    ops::{Bound, Range},
+};
 
 use intern::Symbol;
 use proc_macro::bridge::server;
@@ -24,6 +27,10 @@ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
 pub struct FreeFunctions;
 
 pub struct SpanIdServer {
+    // FIXME: Report this back to the caller to track as dependencies
+    pub tracked_env_vars: HashMap<Box<str>, Option<Box<str>>>,
+    // FIXME: Report this back to the caller to track as dependencies
+    pub tracked_paths: HashSet<Box<str>>,
     pub call_site: Span,
     pub def_site: Span,
     pub mixed_site: Span,
@@ -40,8 +47,13 @@ impl server::FreeFunctions for SpanIdServer {
     fn injected_env_var(&mut self, _: &str) -> Option<std::string::String> {
         None
     }
-    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {}
-    fn track_path(&mut self, _path: &str) {}
+    fn track_env_var(&mut self, var: &str, value: Option<&str>) {
+        self.tracked_env_vars.insert(var.into(), value.map(Into::into));
+    }
+    fn track_path(&mut self, path: &str) {
+        self.tracked_paths.insert(path.into());
+    }
+
     fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
         literal_from_str(s, self.call_site)
     }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
index ad3d9ee..20507a6 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
@@ -297,26 +297,38 @@ fn test_fn_like_macro_noop() {
 fn test_fn_like_macro_clone_ident_subtree() {
     assert_expand(
         "fn_like_clone_tokens",
-        r#"ident, []"#,
+        r#"ident, [ident2, ident3]"#,
         expect![[r#"
             IDENT 1 ident
             PUNCT 1 , [alone]
             GROUP [] 1 1 1
+              IDENT 1 ident2
+              PUNCT 1 , [alone]
+              IDENT 1 ident3
 
 
             IDENT 1 ident
             PUNCT 1 , [alone]
             GROUP [] 1 1 1
+              IDENT 1 ident2
+              PUNCT 1 , [alone]
+              IDENT 1 ident3
         "#]],
         expect![[r#"
             IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
             PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
-            GROUP [] 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@8..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024
+            GROUP [] 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@22..23#ROOT2024 42:Root[0000, 0]@7..23#ROOT2024
+              IDENT 42:Root[0000, 0]@8..14#ROOT2024 ident2
+              PUNCT 42:Root[0000, 0]@14..15#ROOT2024 , [alone]
+              IDENT 42:Root[0000, 0]@16..22#ROOT2024 ident3
 
 
             IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
             PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
-            GROUP [] 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024
+            GROUP [] 42:Root[0000, 0]@7..23#ROOT2024 42:Root[0000, 0]@7..23#ROOT2024 42:Root[0000, 0]@7..23#ROOT2024
+              IDENT 42:Root[0000, 0]@8..14#ROOT2024 ident2
+              PUNCT 42:Root[0000, 0]@14..15#ROOT2024 , [alone]
+              IDENT 42:Root[0000, 0]@16..22#ROOT2024 ident3
         "#]],
     );
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index 59a4de9..767672f 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -10,8 +10,8 @@
 
 use cfg::{CfgAtom, CfgDiff};
 use hir::{
-    Adt, AssocItem, Crate, DefWithBody, FindPathConfig, HasCrate, HasSource, HirDisplay, ModuleDef,
-    Name, crate_lang_items,
+    Adt, AssocItem, Crate, DefWithBody, FindPathConfig, HasSource, HirDisplay, ModuleDef, Name,
+    crate_lang_items,
     db::{DefDatabase, ExpandDatabase, HirDatabase},
     next_solver::{DbInterner, GenericArgs},
 };
@@ -20,6 +20,7 @@
     expr_store::BodySourceMap,
     hir::{ExprId, PatId},
 };
+use hir_ty::InferenceResult;
 use ide::{
     Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, Edition, InlayFieldsToResolve,
     InlayHintsConfig, LineCol, RootDatabase,
@@ -145,7 +146,9 @@ pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
                     if !source_root.is_library || self.with_deps {
                         let length = db.file_text(file_id).text(db).lines().count();
                         let item_stats = db
-                            .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
+                            .file_item_tree(
+                                EditionedFileId::current_edition_guess_origin(db, file_id).into(),
+                            )
                             .item_tree_stats()
                             .into();
 
@@ -155,7 +158,9 @@ pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
                     } else {
                         let length = db.file_text(file_id).text(db).lines().count();
                         let item_stats = db
-                            .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
+                            .file_item_tree(
+                                EditionedFileId::current_edition_guess_origin(db, file_id).into(),
+                            )
                             .item_tree_stats()
                             .into();
 
@@ -374,7 +379,7 @@ fn run_data_layout(&self, db: &RootDatabase, adts: &[hir::Adt], verbosity: Verbo
         let mut all = 0;
         let mut fail = 0;
         for &a in adts {
-            let interner = DbInterner::new_with(db, Some(a.krate(db).base()), None);
+            let interner = DbInterner::new_no_crate(db);
             let generic_params = db.generic_params(a.into());
             if generic_params.iter_type_or_consts().next().is_some()
                 || generic_params.iter_lt().next().is_some()
@@ -741,7 +746,7 @@ fn run_inference(
                 .par_iter()
                 .map_with(db.clone(), |snap, &body| {
                     snap.body(body.into());
-                    snap.infer(body.into());
+                    InferenceResult::for_body(snap, body.into());
                 })
                 .count();
             eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
@@ -798,7 +803,8 @@ fn run_inference(
             }
             bar.set_message(msg);
             let body = db.body(body_id.into());
-            let inference_result = catch_unwind(AssertUnwindSafe(|| db.infer(body_id.into())));
+            let inference_result =
+                catch_unwind(AssertUnwindSafe(|| InferenceResult::for_body(db, body_id.into())));
             let inference_result = match inference_result {
                 Ok(inference_result) => inference_result,
                 Err(p) => {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index 37f83f6..fbf3082 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -189,6 +189,13 @@ pub fn run(self) -> anyhow::Result<()> {
                     symbol_roles |= scip_types::SymbolRole::Definition as i32;
                 }
 
+                let enclosing_range = match token.definition_body {
+                    Some(def_body) if def_body.file_id == file_id => {
+                        text_range_to_scip_range(&line_index, def_body.range)
+                    }
+                    _ => Vec::new(),
+                };
+
                 occurrences.push(scip_types::Occurrence {
                     range: text_range_to_scip_range(&line_index, text_range),
                     symbol,
@@ -197,7 +204,7 @@ pub fn run(self) -> anyhow::Result<()> {
                     syntax_kind: Default::default(),
                     diagnostics: Vec::new(),
                     special_fields: Default::default(),
-                    enclosing_range: Vec::new(),
+                    enclosing_range,
                 });
             }
 
@@ -508,18 +515,19 @@ fn moniker_descriptors(identifier: &MonikerIdentifier) -> Vec<scip_types::Descri
 #[cfg(test)]
 mod test {
     use super::*;
+    use hir::FileRangeWrapper;
     use ide::{FilePosition, TextSize};
     use test_fixture::ChangeFixture;
     use vfs::VfsPath;
 
     fn position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (AnalysisHost, FilePosition) {
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(host.raw_database(), ra_fixture);
+        let change_fixture = ChangeFixture::parse(ra_fixture);
         host.raw_database_mut().apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ()");
         let offset = range_or_offset.expect_offset();
-        let position = FilePosition { file_id: file_id.file_id(host.raw_database()), offset };
+        let position = FilePosition { file_id: file_id.file_id(), offset };
         (host, position)
     }
 
@@ -870,7 +878,7 @@ fn documentation_matches_doc_comment() {
         let s = "/// foo\nfn bar() {}";
 
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(host.raw_database(), s);
+        let change_fixture = ChangeFixture::parse(s);
         host.raw_database_mut().apply_change(change_fixture.change);
 
         let analysis = host.analysis();
@@ -887,4 +895,32 @@ fn documentation_matches_doc_comment() {
 
         assert_eq!(token.documentation.as_ref().map(|d| d.as_str()), Some("foo"));
     }
+
+    #[test]
+    fn function_has_enclosing_range() {
+        let s = "fn foo() {}";
+
+        let mut host = AnalysisHost::default();
+        let change_fixture = ChangeFixture::parse(s);
+        host.raw_database_mut().apply_change(change_fixture.change);
+
+        let analysis = host.analysis();
+        let si = StaticIndex::compute(
+            &analysis,
+            VendoredLibrariesConfig::Included {
+                workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()),
+            },
+        );
+
+        let file = si.files.first().unwrap();
+        let (_, token_id) = file.tokens.get(1).unwrap(); // first token is file module, second is `foo`
+        let token = si.tokens.get(*token_id).unwrap();
+
+        let expected_range = FileRangeWrapper {
+            file_id: FileId::from_raw(0),
+            range: TextRange::new(0.into(), 11.into()),
+        };
+
+        assert_eq!(token.definition_body, Some(expected_range));
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index 975e81a..529cf12 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -72,7 +72,7 @@ pub fn run(self) -> anyhow::Result<()> {
                 let sr = db.source_root(root).source_root(db);
                 for file_id in sr.iter() {
                     for debug_info in match_finder.debug_where_text_equal(
-                        EditionedFileId::current_edition(db, file_id),
+                        EditionedFileId::current_edition_guess_origin(db, file_id),
                         debug_snippet,
                     ) {
                         println!("{debug_info:#?}");
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
index 0362e13..a400f86 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -1,9 +1,7 @@
 //! Reports references in code that the IDE layer cannot resolve.
 use hir::{AnyDiagnostic, Crate, Module, Semantics, db::HirDatabase, sym};
 use ide::{AnalysisHost, RootDatabase, TextRange};
-use ide_db::{
-    EditionedFileId, FxHashSet, LineIndexDatabase as _, base_db::SourceDatabase, defs::NameRefClass,
-};
+use ide_db::{FxHashSet, LineIndexDatabase as _, base_db::SourceDatabase, defs::NameRefClass};
 use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at};
 use parser::SyntaxKind;
 use syntax::{AstNode, WalkEvent, ast};
@@ -139,9 +137,7 @@ fn all_unresolved_references(
     sema: &Semantics<'_, RootDatabase>,
     file_id: FileId,
 ) -> Vec<TextRange> {
-    let file_id = sema
-        .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
+    let file_id = sema.attach_first_edition(file_id);
     let file = sema.parse(file_id);
     let root = file.syntax();
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs
index 674e862..4105527 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs
@@ -15,33 +15,36 @@
 use process_wrap::std::{StdChildWrapper, StdCommandWrap};
 use stdx::process::streaming_output;
 
-/// Cargo output is structured as one JSON per line. This trait abstracts parsing one line of
-/// cargo output into a Rust data type
-pub(crate) trait CargoParser<T>: Send + 'static {
+/// This trait abstracts parsing one line of JSON output into a Rust
+/// data type.
+///
+/// This is useful for `cargo check` output, `cargo test` output, as
+/// well as custom discover commands.
+pub(crate) trait JsonLinesParser<T>: Send + 'static {
     fn from_line(&self, line: &str, error: &mut String) -> Option<T>;
     fn from_eof(&self) -> Option<T>;
 }
 
-struct CargoActor<T> {
-    parser: Box<dyn CargoParser<T>>,
+struct CommandActor<T> {
+    parser: Box<dyn JsonLinesParser<T>>,
     sender: Sender<T>,
     stdout: ChildStdout,
     stderr: ChildStderr,
 }
 
-impl<T: Sized + Send + 'static> CargoActor<T> {
+impl<T: Sized + Send + 'static> CommandActor<T> {
     fn new(
-        parser: impl CargoParser<T>,
+        parser: impl JsonLinesParser<T>,
         sender: Sender<T>,
         stdout: ChildStdout,
         stderr: ChildStderr,
     ) -> Self {
         let parser = Box::new(parser);
-        CargoActor { parser, sender, stdout, stderr }
+        CommandActor { parser, sender, stdout, stderr }
     }
 }
 
-impl<T: Sized + Send + 'static> CargoActor<T> {
+impl<T: Sized + Send + 'static> CommandActor<T> {
     fn run(self, outfile: Option<Utf8PathBuf>) -> io::Result<(bool, String)> {
         // We manually read a line at a time, instead of using serde's
         // stream deserializers, because the deserializer cannot recover
@@ -113,6 +116,9 @@ fn run(self, outfile: Option<Utf8PathBuf>) -> io::Result<(bool, String)> {
     }
 }
 
+/// 'Join On Drop' wrapper for a child process.
+///
+/// This wrapper kills the process when the wrapper is dropped.
 struct JodGroupChild(Box<dyn StdChildWrapper>);
 
 impl Drop for JodGroupChild {
@@ -122,9 +128,9 @@ fn drop(&mut self) {
     }
 }
 
-/// A handle to a cargo process used for fly-checking.
+/// A handle to a shell command, such as cargo for diagnostics (flycheck).
 pub(crate) struct CommandHandle<T> {
-    /// The handle to the actual cargo process. As we cannot cancel directly from with
+    /// The handle to the actual child process. As we cannot cancel directly from with
     /// a read syscall dropping and therefore terminating the process is our best option.
     child: JodGroupChild,
     thread: stdx::thread::JoinHandle<io::Result<(bool, String)>>,
@@ -147,7 +153,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 impl<T: Sized + Send + 'static> CommandHandle<T> {
     pub(crate) fn spawn(
         mut command: Command,
-        parser: impl CargoParser<T>,
+        parser: impl JsonLinesParser<T>,
         sender: Sender<T>,
         out_file: Option<Utf8PathBuf>,
     ) -> std::io::Result<Self> {
@@ -167,7 +173,7 @@ pub(crate) fn spawn(
         let stdout = child.0.stdout().take().unwrap();
         let stderr = child.0.stderr().take().unwrap();
 
-        let actor = CargoActor::<T>::new(parser, sender, stdout, stderr);
+        let actor = CommandActor::<T>::new(parser, sender, stdout, stderr);
         let thread =
             stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, "CommandHandle")
                 .spawn(move || actor.run(out_file))
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 1b15d83..c380621 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -562,7 +562,7 @@ pub enum MaxSubstitutionLength {
         /// `DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and
         /// therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an
         /// existing workspace. As a reference for implementors, buck2's `rust-project` will likely
-        /// be useful: https://github.com/facebook/buck2/tree/main/integrations/rust-project.
+        /// be useful: <https://github.com/facebook/buck2/tree/main/integrations/rust-project>.
         workspace_discoverConfig: Option<DiscoverWorkspaceConfig> = None,
     }
 }
@@ -3566,23 +3566,13 @@ macro_rules! set {
         },
         "ImportGranularityDef" => set! {
             "type": "string",
-            "anyOf": [
-                {
-                    "enum": ["crate", "module", "item", "one"],
-                    "enumDescriptions": [
-                        "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
-                        "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
-                        "Flatten imports so that each has its own use statement.",
-                        "Merge all imports into a single use statement as long as they have the same visibility and attributes."
-                    ],
-                },
-                {
-                    "enum": ["preserve"],
-                    "enumDescriptions": [
-                        "Deprecated - unless `enforceGranularity` is `true`, the style of the current file is preferred over this setting. Behaves like `item`.",
-                    ],
-                    "deprecated": true,
-                }
+            "enum": ["crate", "module", "item", "one", "preserve"],
+            "enumDescriptions": [
+                "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
+                "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
+                "Flatten imports so that each has its own use statement.",
+                "Merge all imports into a single use statement as long as they have the same visibility and attributes.",
+                "Deprecated - unless `enforceGranularity` is `true`, the style of the current file is preferred over this setting. Behaves like `item`."
             ],
         },
         "ImportPrefixDef" => set! {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
index 4ec0c07..0e96eff 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
@@ -9,7 +9,7 @@
 use serde::{Deserialize, Serialize};
 use tracing::{info_span, span::EnteredSpan};
 
-use crate::command::{CargoParser, CommandHandle};
+use crate::command::{CommandHandle, JsonLinesParser};
 
 pub(crate) const ARG_PLACEHOLDER: &str = "{arg}";
 
@@ -118,7 +118,7 @@ fn new(data: DiscoverProjectData) -> Self {
 
 struct DiscoverProjectParser;
 
-impl CargoParser<DiscoverProjectMessage> for DiscoverProjectParser {
+impl JsonLinesParser<DiscoverProjectMessage> for DiscoverProjectParser {
     fn from_line(&self, line: &str, _error: &mut String) -> Option<DiscoverProjectMessage> {
         match serde_json::from_str::<DiscoverProjectData>(line) {
             Ok(data) => {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs
index 68337dd..14a4a17 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs
@@ -25,7 +25,7 @@
 use triomphe::Arc;
 
 use crate::{
-    command::{CargoParser, CommandHandle},
+    command::{CommandHandle, JsonLinesParser},
     diagnostics::DiagnosticsGeneration,
 };
 
@@ -753,7 +753,7 @@ enum CargoCheckMessage {
 
 struct CargoCheckParser;
 
-impl CargoParser<CargoCheckMessage> for CargoCheckParser {
+impl JsonLinesParser<CargoCheckMessage> for CargoCheckParser {
     fn from_line(&self, line: &str, error: &mut String) -> Option<CargoCheckMessage> {
         let mut deserializer = serde_json::Deserializer::from_str(line);
         deserializer.disable_recursion_limit();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
index e585c3f..86a35c7 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -119,7 +119,7 @@ pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSe
     }
 }
 
-pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
+pub(crate) fn documentation(documentation: Documentation<'_>) -> lsp_types::Documentation {
     let value = format_docs(&documentation);
     let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
     lsp_types::Documentation::MarkupContent(markup_content)
@@ -1975,7 +1975,7 @@ pub(crate) fn markup_content(
         ide::HoverDocFormat::Markdown => lsp_types::MarkupKind::Markdown,
         ide::HoverDocFormat::PlainText => lsp_types::MarkupKind::PlainText,
     };
-    let value = format_docs(&Documentation::new(markup.into()));
+    let value = format_docs(&Documentation::new_owned(markup.into()));
     lsp_types::MarkupContent { kind, value }
 }
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index 7b339fa..8b4748d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -485,8 +485,8 @@ fn handle_event(&mut self, event: Event) {
                 }
                 // delay initial cache priming until proc macros are loaded, or we will load up a bunch of garbage into salsa
                 let proc_macros_loaded = self.config.prefill_caches()
-                    && !self.config.expand_proc_macros()
-                    || self.fetch_proc_macros_queue.last_op_result().copied().unwrap_or(false);
+                    && (!self.config.expand_proc_macros()
+                        || self.fetch_proc_macros_queue.last_op_result().copied().unwrap_or(false));
                 if proc_macros_loaded {
                     self.prime_caches_queue.request_op("became quiescent".to_owned(), ());
                 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs
index 9a65e70..7111a15 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs
@@ -9,7 +9,7 @@
 use toolchain::Tool;
 
 use crate::{
-    command::{CargoParser, CommandHandle},
+    command::{CommandHandle, JsonLinesParser},
     flycheck::CargoOptions,
 };
 
@@ -57,7 +57,7 @@ pub(crate) fn new(test_target: &TestTarget) -> Self {
     }
 }
 
-impl CargoParser<CargoTestMessage> for CargoTestOutputParser {
+impl JsonLinesParser<CargoTestMessage> for CargoTestOutputParser {
     fn from_line(&self, line: &str, _error: &mut String) -> Option<CargoTestMessage> {
         let mut deserializer = serde_json::Deserializer::from_str(line);
         deserializer.disable_recursion_limit();
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
index 4e525be..1ded2b4 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
@@ -1,6 +1,6 @@
 //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
 
-use std::{fmt, hash::Hash};
+use std::{collections::VecDeque, fmt, hash::Hash};
 
 use intern::Symbol;
 use rustc_hash::{FxHashMap, FxHashSet};
@@ -102,26 +102,34 @@ pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
     SpanData<Ctx>: Copy + fmt::Debug,
     SpanMap: SpanMapper<SpanData<Ctx>>,
 {
-    let mut c = Converter::new(node, map, Default::default(), Default::default(), span, mode);
+    let mut c =
+        Converter::new(node, map, Default::default(), Default::default(), span, mode, |_, _| {
+            (true, Vec::new())
+        });
     convert_tokens(&mut c)
 }
 
 /// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
 /// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
 /// be injected or hidden from the output.
-pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
+pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap, OnEvent>(
     node: &SyntaxNode,
     map: SpanMap,
     append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
     remove: FxHashSet<SyntaxElement>,
     call_site: SpanData<Ctx>,
     mode: DocCommentDesugarMode,
+    on_enter: OnEvent,
 ) -> tt::TopSubtree<SpanData<Ctx>>
 where
     SpanMap: SpanMapper<SpanData<Ctx>>,
     SpanData<Ctx>: Copy + fmt::Debug,
+    OnEvent: FnMut(
+        &mut PreorderWithTokens,
+        &WalkEvent<SyntaxElement>,
+    ) -> (bool, Vec<tt::Leaf<SpanData<Ctx>>>),
 {
-    let mut c = Converter::new(node, map, append, remove, call_site, mode);
+    let mut c = Converter::new(node, map, append, remove, call_site, mode, on_enter);
     convert_tokens(&mut c)
 }
 
@@ -143,7 +151,6 @@ pub fn token_tree_to_syntax_node<Ctx>(
     tt: &tt::TopSubtree<SpanData<Ctx>>,
     entry_point: parser::TopEntryPoint,
     span_to_edition: &mut dyn FnMut(Ctx) -> Edition,
-    top_edition: Edition,
 ) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
 where
     Ctx: Copy + fmt::Debug + PartialEq + PartialEq + Eq + Hash,
@@ -151,7 +158,7 @@ pub fn token_tree_to_syntax_node<Ctx>(
     let buffer = tt.view().strip_invisible();
     let parser_input = to_parser_input(buffer, span_to_edition);
     // It matters what edition we parse with even when we escape all identifiers correctly.
-    let parser_output = entry_point.parse(&parser_input, top_edition);
+    let parser_output = entry_point.parse(&parser_input);
     let mut tree_sink = TtTreeSink::new(buffer.cursor());
     for event in parser_output.iter() {
         match event {
@@ -624,9 +631,9 @@ fn call_site(&self) -> S {
     }
 }
 
-struct Converter<SpanMap, S> {
+struct Converter<SpanMap, S, OnEvent> {
     current: Option<SyntaxToken>,
-    current_leaves: Vec<tt::Leaf<S>>,
+    current_leaves: VecDeque<tt::Leaf<S>>,
     preorder: PreorderWithTokens,
     range: TextRange,
     punct_offset: Option<(SyntaxToken, TextSize)>,
@@ -636,9 +643,13 @@ struct Converter<SpanMap, S> {
     remove: FxHashSet<SyntaxElement>,
     call_site: S,
     mode: DocCommentDesugarMode,
+    on_event: OnEvent,
 }
 
-impl<SpanMap, S> Converter<SpanMap, S> {
+impl<SpanMap, S, OnEvent> Converter<SpanMap, S, OnEvent>
+where
+    OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf<S>>),
+{
     fn new(
         node: &SyntaxNode,
         map: SpanMap,
@@ -646,8 +657,9 @@ fn new(
         remove: FxHashSet<SyntaxElement>,
         call_site: S,
         mode: DocCommentDesugarMode,
+        on_enter: OnEvent,
     ) -> Self {
-        let mut this = Converter {
+        let mut converter = Converter {
             current: None,
             preorder: node.preorder_with_tokens(),
             range: node.text_range(),
@@ -656,16 +668,21 @@ fn new(
             append,
             remove,
             call_site,
-            current_leaves: vec![],
+            current_leaves: VecDeque::new(),
             mode,
+            on_event: on_enter,
         };
-        let first = this.next_token();
-        this.current = first;
-        this
+        converter.current = converter.next_token();
+        converter
     }
 
     fn next_token(&mut self) -> Option<SyntaxToken> {
         while let Some(ev) = self.preorder.next() {
+            let (keep_event, insert_leaves) = (self.on_event)(&mut self.preorder, &ev);
+            self.current_leaves.extend(insert_leaves);
+            if !keep_event {
+                continue;
+            }
             match ev {
                 WalkEvent::Enter(token) => {
                     if self.remove.contains(&token) {
@@ -675,10 +692,9 @@ fn next_token(&mut self) -> Option<SyntaxToken> {
                             }
                             node => {
                                 self.preorder.skip_subtree();
-                                if let Some(mut v) = self.append.remove(&node) {
-                                    v.reverse();
+                                if let Some(v) = self.append.remove(&node) {
                                     self.current_leaves.extend(v);
-                                    return None;
+                                    continue;
                                 }
                             }
                         }
@@ -687,10 +703,9 @@ fn next_token(&mut self) -> Option<SyntaxToken> {
                     }
                 }
                 WalkEvent::Leave(ele) => {
-                    if let Some(mut v) = self.append.remove(&ele) {
-                        v.reverse();
+                    if let Some(v) = self.append.remove(&ele) {
                         self.current_leaves.extend(v);
-                        return None;
+                        continue;
                     }
                 }
             }
@@ -715,8 +730,8 @@ fn token(&self) -> &SyntaxToken {
     }
 }
 
-impl<SpanMap, S> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
-    fn kind(&self, _ctx: &Converter<SpanMap, S>) -> SyntaxKind {
+impl<SpanMap, S, OnEvent> SrcToken<Converter<SpanMap, S, OnEvent>, S> for SynToken<S> {
+    fn kind(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> SyntaxKind {
         match self {
             SynToken::Ordinary(token) => token.kind(),
             SynToken::Punct { token, offset: i } => {
@@ -728,14 +743,14 @@ fn kind(&self, _ctx: &Converter<SpanMap, S>) -> SyntaxKind {
             }
         }
     }
-    fn to_char(&self, _ctx: &Converter<SpanMap, S>) -> Option<char> {
+    fn to_char(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> Option<char> {
         match self {
             SynToken::Ordinary(_) => None,
             SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
             SynToken::Leaf(_) => None,
         }
     }
-    fn to_text(&self, _ctx: &Converter<SpanMap, S>) -> SmolStr {
+    fn to_text(&self, _ctx: &Converter<SpanMap, S, OnEvent>) -> SmolStr {
         match self {
             SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
             SynToken::Leaf(_) => {
@@ -752,10 +767,11 @@ fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
     }
 }
 
-impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S>
+impl<S, SpanMap, OnEvent> TokenConverter<S> for Converter<SpanMap, S, OnEvent>
 where
     S: Copy,
     SpanMap: SpanMapper<S>,
+    OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent<SyntaxElement>) -> (bool, Vec<tt::Leaf<S>>),
 {
     type Token = SynToken<S>;
     fn convert_doc_comment(
@@ -781,10 +797,7 @@ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
             ));
         }
 
-        if let Some(leaf) = self.current_leaves.pop() {
-            if self.current_leaves.is_empty() {
-                self.current = self.next_token();
-            }
+        if let Some(leaf) = self.current_leaves.pop_front() {
             return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
         }
 
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs
index c0ff8e1..5922994 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs
@@ -16,6 +16,8 @@ pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
 
     let mut current = buffer.cursor();
     let mut syntax_context_to_edition_cache = FxHashMap::default();
+    let mut ctx_edition =
+        |ctx| *syntax_context_to_edition_cache.entry(ctx).or_insert_with(|| span_to_edition(ctx));
 
     while !current.eof() {
         let tt = current.token_tree();
@@ -26,8 +28,8 @@ pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
         {
             current.bump();
             match current.token_tree() {
-                Some(tt::TokenTree::Leaf(tt::Leaf::Ident(_ident))) => {
-                    res.push(LIFETIME_IDENT);
+                Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => {
+                    res.push(LIFETIME_IDENT, ctx_edition(ident.span.ctx));
                     current.bump();
                     continue;
                 }
@@ -51,7 +53,7 @@ pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
                             tt::LitKind::CStr | tt::LitKind::CStrRaw(_) => SyntaxKind::C_STRING,
                             tt::LitKind::Err(_) => SyntaxKind::ERROR,
                         };
-                        res.push(kind);
+                        res.push(kind, ctx_edition(lit.span.ctx));
 
                         if kind == FLOAT_NUMBER && !lit.symbol.as_str().ends_with('.') {
                             // Tag the token as joint if it is float with a fractional part
@@ -61,20 +63,18 @@ pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
                         }
                     }
                     tt::Leaf::Ident(ident) => {
-                        let edition = *syntax_context_to_edition_cache
-                            .entry(ident.span.ctx)
-                            .or_insert_with(|| span_to_edition(ident.span.ctx));
+                        let edition = ctx_edition(ident.span.ctx);
                         match ident.sym.as_str() {
-                            "_" => res.push(T![_]),
-                            i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
-                            _ if ident.is_raw.yes() => res.push(IDENT),
+                            "_" => res.push(T![_], edition),
+                            i if i.starts_with('\'') => res.push(LIFETIME_IDENT, edition),
+                            _ if ident.is_raw.yes() => res.push(IDENT, edition),
                             text => match SyntaxKind::from_keyword(text, edition) {
-                                Some(kind) => res.push(kind),
+                                Some(kind) => res.push(kind, edition),
                                 None => {
                                     let contextual_keyword =
                                         SyntaxKind::from_contextual_keyword(text, edition)
                                             .unwrap_or(SyntaxKind::IDENT);
-                                    res.push_ident(contextual_keyword);
+                                    res.push_ident(contextual_keyword, edition);
                                 }
                             },
                         }
@@ -82,7 +82,7 @@ pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
                     tt::Leaf::Punct(punct) => {
                         let kind = SyntaxKind::from_char(punct.char)
                             .unwrap_or_else(|| panic!("{punct:#?} is not a valid punct"));
-                        res.push(kind);
+                        res.push(kind, ctx_edition(punct.span.ctx));
                         if punct.spacing == tt::Spacing::Joint {
                             res.was_joint();
                         }
@@ -97,7 +97,7 @@ pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
                     tt::DelimiterKind::Bracket => Some(T!['[']),
                     tt::DelimiterKind::Invisible => None,
                 } {
-                    res.push(kind);
+                    res.push(kind, ctx_edition(subtree.delimiter.open.ctx));
                 }
                 current.bump();
             }
@@ -109,7 +109,7 @@ pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
                     tt::DelimiterKind::Bracket => Some(T![']']),
                     tt::DelimiterKind::Invisible => None,
                 } {
-                    res.push(kind);
+                    res.push(kind, ctx_edition(subtree.delimiter.close.ctx));
                 }
             }
         };
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
index aea99a4..5d67fd4 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -26,7 +26,8 @@
     generated::{nodes::*, tokens::*},
     node_ext::{
         AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind,
-        SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind,
+        SlicePatComponents, StructKind, TokenTreeChildren, TypeBoundKind, TypeOrConstParam,
+        VisibilityKind,
     },
     operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
     token_ext::{
@@ -35,6 +36,7 @@
     traits::{
         AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericArgs,
         HasGenericParams, HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
+        attrs_including_inner,
     },
 };
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
index af741d1..901d17b 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -10,7 +10,7 @@
 use rowan::{GreenNodeData, GreenTokenData};
 
 use crate::{
-    NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, T, TokenText,
+    NodeOrToken, SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxToken, T, TokenText,
     ast::{
         self, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName,
         HasTypeBounds, SyntaxNode, support,
@@ -1114,3 +1114,39 @@ pub fn leading_pipe(&self) -> Option<SyntaxToken> {
             .filter(|it| it.kind() == T![|])
     }
 }
+
+/// An iterator over the elements in an [`ast::TokenTree`].
+///
+/// Does not yield trivia or the delimiters.
+#[derive(Clone)]
+pub struct TokenTreeChildren {
+    iter: SyntaxElementChildren,
+}
+
+impl TokenTreeChildren {
+    #[inline]
+    pub fn new(tt: &ast::TokenTree) -> Self {
+        let mut iter = tt.syntax.children_with_tokens();
+        iter.next(); // Bump the opening delimiter.
+        Self { iter }
+    }
+}
+
+impl Iterator for TokenTreeChildren {
+    type Item = NodeOrToken<ast::TokenTree, SyntaxToken>;
+
+    #[inline]
+    fn next(&mut self) -> Option<Self::Item> {
+        self.iter.find_map(|item| match item {
+            NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node),
+            NodeOrToken::Token(token) => {
+                let kind = token.kind();
+                (!matches!(
+                    kind,
+                    SyntaxKind::WHITESPACE | SyntaxKind::COMMENT | T![')'] | T![']'] | T!['}']
+                ))
+                .then_some(NodeOrToken::Token(token))
+            }
+        })
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
index e1a9f3a..83ab87c 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -40,8 +40,8 @@ pub fn prefix(&self) -> &'static str {
     }
 
     /// Returns the textual content of a doc comment node as a single string with prefix and suffix
-    /// removed.
-    pub fn doc_comment(&self) -> Option<&str> {
+    /// removed, plus the offset of the returned string from the beginning of the comment.
+    pub fn doc_comment(&self) -> Option<(&str, TextSize)> {
         let kind = self.kind();
         match kind {
             CommentKind { shape, doc: Some(_) } => {
@@ -52,7 +52,7 @@ pub fn doc_comment(&self) -> Option<&str> {
                 } else {
                     text
                 };
-                Some(text)
+                Some((text, TextSize::of(prefix)))
             }
             _ => None,
         }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
index 5290f32..2f4109a 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -4,8 +4,9 @@
 use either::Either;
 
 use crate::{
-    SyntaxElement, SyntaxToken, T,
+    SyntaxElement, SyntaxNode, SyntaxToken, T,
     ast::{self, AstChildren, AstNode, AstToken, support},
+    match_ast,
     syntax_node::SyntaxElementChildren,
 };
 
@@ -76,34 +77,44 @@ fn has_atom_attr(&self, atom: &str) -> bool {
         self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
     }
 
-    /// Returns all attributes of this node, including inner attributes that may not be directly under this node
-    /// but under a child.
-    fn attrs_including_inner(self) -> impl Iterator<Item = ast::Attr>
-    where
-        Self: Sized,
-    {
-        let inner_attrs_node = if let Some(it) =
-            support::child::<ast::BlockExpr>(self.syntax()).and_then(|it| it.stmt_list())
-        {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::<ast::MatchArmList>(self.syntax()) {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::<ast::AssocItemList>(self.syntax()) {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::<ast::ItemList>(self.syntax()) {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::<ast::ExternItemList>(self.syntax()) {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::<ast::MacroItems>(self.syntax()) {
-            Some(it.syntax)
-        } else {
-            None
-        };
-
-        self.attrs().chain(inner_attrs_node.into_iter().flat_map(|it| support::children(&it)))
+    /// This may return the same node as called with (with `SourceFile`). The caller has the responsibility
+    /// to avoid duplicate attributes.
+    fn inner_attributes_node(&self) -> Option<SyntaxNode> {
+        let syntax = self.syntax();
+        Some(match_ast! {
+            match syntax {
+                // A `SourceFile` contains the inner attributes of itself.
+                ast::SourceFile(_) => syntax.clone(),
+                ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
+                ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
+                ast::MatchExpr(it) => it.match_arm_list()?.syntax().clone(),
+                ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
+                ast::Trait(it) => it.assoc_item_list()?.syntax().clone(),
+                ast::Module(it) => it.item_list()?.syntax().clone(),
+                ast::BlockExpr(it) => {
+                    if !it.may_carry_attributes() {
+                        return None;
+                    }
+                    syntax.clone()
+                },
+                _ => return None,
+            }
+        })
     }
 }
 
+/// Returns all attributes of this node, including inner attributes that may not be directly under this node
+/// but under a child.
+pub fn attrs_including_inner(owner: &dyn HasAttrs) -> impl Iterator<Item = ast::Attr> + Clone {
+    owner.attrs().filter(|attr| attr.kind().is_outer()).chain(
+        owner
+            .inner_attributes_node()
+            .into_iter()
+            .flat_map(|node| support::children::<ast::Attr>(&node))
+            .filter(|attr| attr.kind().is_inner()),
+    )
+}
+
 pub trait HasDocComments: HasAttrs {
     fn doc_comments(&self) -> DocCommentIter {
         DocCommentIter { iter: self.syntax().children_with_tokens() }
@@ -118,7 +129,7 @@ pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> DocCommentIter {
     #[cfg(test)]
     pub fn doc_comment_text(self) -> Option<String> {
         let docs = itertools::Itertools::join(
-            &mut self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)),
+            &mut self.filter_map(|comment| comment.doc_comment().map(|it| it.0.to_owned())),
             "\n",
         );
         if docs.is_empty() { None } else { Some(docs) }
@@ -151,7 +162,7 @@ pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> AttrDocCommentIter {
 impl Iterator for AttrDocCommentIter {
     type Item = Either<ast::Attr, ast::Comment>;
     fn next(&mut self) -> Option<Self::Item> {
-        self.iter.by_ref().find_map(|el| match el {
+        self.iter.find_map(|el| match el {
             SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
             SyntaxElement::Token(tok) => {
                 ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
index 9e286ed..249c81a 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
@@ -13,7 +13,7 @@ pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Ve
     let _p = tracing::info_span!("parse_text").entered();
     let lexed = parser::LexedStr::new(edition, text);
     let parser_input = lexed.to_input(edition);
-    let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
+    let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input);
     let (node, errors, _eof) = build_tree(lexed, parser_output);
     (node, errors)
 }
@@ -26,7 +26,7 @@ pub(crate) fn parse_text_at(
     let _p = tracing::info_span!("parse_text_at").entered();
     let lexed = parser::LexedStr::new(edition, text);
     let parser_input = lexed.to_input(edition);
-    let parser_output = entry.parse(&parser_input, edition);
+    let parser_output = entry.parse(&parser_input);
     let (node, errors, _eof) = build_tree(lexed, parser_output);
     (node, errors)
 }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
index c54f143..5f193f0 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
@@ -109,7 +109,7 @@ fn reparse_block(
         return None;
     }
 
-    let tree_traversal = reparser.parse(&parser_input, edition);
+    let tree_traversal = reparser.parse(&parser_input);
 
     let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs
index 0b35887..5683d89 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs
@@ -653,4 +653,58 @@ fn test_replace_token_in_parent() {
         let expect = expect![["fn it() {\n    \n}"]];
         expect.assert_eq(&edit.new_root.to_string());
     }
+
+    #[test]
+    fn test_more_times_replace_node_to_mutable_token() {
+        let arg_list =
+            make::arg_list([make::expr_literal("1").into(), make::expr_literal("2").into()]);
+
+        let mut editor = SyntaxEditor::new(arg_list.syntax().clone());
+        let target_expr = make::token(parser::SyntaxKind::UNDERSCORE);
+
+        for arg in arg_list.args() {
+            editor.replace(arg.syntax(), &target_expr);
+        }
+
+        let edit = editor.finish();
+
+        let expect = expect![["(_, _)"]];
+        expect.assert_eq(&edit.new_root.to_string());
+    }
+
+    #[test]
+    fn test_more_times_replace_node_to_mutable() {
+        let arg_list =
+            make::arg_list([make::expr_literal("1").into(), make::expr_literal("2").into()]);
+
+        let mut editor = SyntaxEditor::new(arg_list.syntax().clone());
+        let target_expr = make::expr_literal("3").clone_for_update();
+
+        for arg in arg_list.args() {
+            editor.replace(arg.syntax(), target_expr.syntax());
+        }
+
+        let edit = editor.finish();
+
+        let expect = expect![["(3, 3)"]];
+        expect.assert_eq(&edit.new_root.to_string());
+    }
+
+    #[test]
+    fn test_more_times_insert_node_to_mutable() {
+        let arg_list =
+            make::arg_list([make::expr_literal("1").into(), make::expr_literal("2").into()]);
+
+        let mut editor = SyntaxEditor::new(arg_list.syntax().clone());
+        let target_expr = make::ext::expr_unit().clone_for_update();
+
+        for arg in arg_list.args() {
+            editor.insert(Position::before(arg.syntax()), target_expr.syntax());
+        }
+
+        let edit = editor.finish();
+
+        let expect = expect![["(()1, ()2)"]];
+        expect.assert_eq(&edit.new_root.to_string());
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edit_algo.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edit_algo.rs
index 01c1f0d..e697d97 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edit_algo.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edit_algo.rs
@@ -150,6 +150,35 @@ struct DependentChange {
     // Map change targets to the correct syntax nodes
     let tree_mutator = TreeMutator::new(&root);
     let mut changed_elements = vec![];
+    let mut changed_elements_set = rustc_hash::FxHashSet::default();
+    let mut deduplicate_node = |node_or_token: &mut SyntaxElement| {
+        let node;
+        let node = match node_or_token {
+            SyntaxElement::Token(token) => match token.parent() {
+                None => return,
+                Some(parent) => {
+                    node = parent;
+                    &node
+                }
+            },
+            SyntaxElement::Node(node) => node,
+        };
+        if changed_elements_set.contains(node) {
+            let new_node = node.clone_subtree().clone_for_update();
+            match node_or_token {
+                SyntaxElement::Node(node) => *node = new_node,
+                SyntaxElement::Token(token) => {
+                    *token = new_node
+                        .children_with_tokens()
+                        .filter_map(SyntaxElement::into_token)
+                        .find(|it| it.kind() == token.kind() && it.text() == token.text())
+                        .unwrap();
+                }
+            }
+        } else {
+            changed_elements_set.insert(node.clone());
+        }
+    };
 
     for index in independent_changes {
         match &mut changes[index as usize] {
@@ -180,6 +209,18 @@ struct DependentChange {
             }
         }
 
+        match &mut changes[index as usize] {
+            Change::Insert(_, element) | Change::Replace(_, Some(element)) => {
+                deduplicate_node(element);
+            }
+            Change::InsertAll(_, elements)
+            | Change::ReplaceWithMany(_, elements)
+            | Change::ReplaceAll(_, elements) => {
+                elements.iter_mut().for_each(&mut deduplicate_node);
+            }
+            Change::Replace(_, None) => (),
+        }
+
         // Collect changed elements
         match &changes[index as usize] {
             Change::Insert(_, element) => changed_elements.push(element.clone()),
diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
index a718b96..457cd3a 100644
--- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
@@ -5,7 +5,7 @@
 use base_db::{
     Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
     DependencyBuilder, Env, FileChange, FileSet, FxIndexMap, LangCrateOrigin, SourceDatabase,
-    SourceRoot, Version, VfsPath, salsa,
+    SourceRoot, Version, VfsPath,
 };
 use cfg::CfgOptions;
 use hir_expand::{
@@ -37,10 +37,11 @@ fn with_single_file(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(&db, ra_fixture);
+        let fixture = ChangeFixture::parse(ra_fixture);
         fixture.change.apply(&mut db);
         assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
-        (db, fixture.files[0])
+        let file = EditionedFileId::from_span_guess_origin(&db, fixture.files[0]);
+        (db, file)
     }
 
     #[track_caller]
@@ -48,16 +49,21 @@ fn with_many_files(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, Vec<EditionedFileId>) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(&db, ra_fixture);
+        let fixture = ChangeFixture::parse(ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
-        (db, fixture.files)
+        let files = fixture
+            .files
+            .into_iter()
+            .map(|file| EditionedFileId::from_span_guess_origin(&db, file))
+            .collect();
+        (db, files)
     }
 
     #[track_caller]
     fn with_files(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Self {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(&db, ra_fixture);
+        let fixture = ChangeFixture::parse(ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -69,12 +75,8 @@ fn with_files_extra_proc_macros(
         proc_macros: Vec<(String, ProcMacro)>,
     ) -> Self {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse_with_proc_macros(
-            &db,
-            ra_fixture,
-            MiniCore::RAW_SOURCE,
-            proc_macros,
-        );
+        let fixture =
+            ChangeFixture::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, proc_macros);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -99,12 +101,13 @@ fn with_range_or_offset(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId, RangeOrOffset) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(&db, ra_fixture);
+        let fixture = ChangeFixture::parse(ra_fixture);
         fixture.change.apply(&mut db);
 
         let (file_id, range_or_offset) = fixture
             .file_position
             .expect("Could not find file position in fixture. Did you forget to add an `$0`?");
+        let file_id = EditionedFileId::from_span_guess_origin(&db, file_id);
         (db, file_id, range_or_offset)
     }
 
@@ -116,9 +119,9 @@ fn test_crate(&self) -> Crate {
 impl<DB: ExpandDatabase + SourceDatabase + Default + 'static> WithFixture for DB {}
 
 pub struct ChangeFixture {
-    pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
+    pub file_position: Option<(span::EditionedFileId, RangeOrOffset)>,
     pub file_lines: Vec<usize>,
-    pub files: Vec<EditionedFileId>,
+    pub files: Vec<span::EditionedFileId>,
     pub change: ChangeWithProcMacros,
     pub sysroot_files: Vec<FileId>,
 }
@@ -126,15 +129,11 @@ pub struct ChangeFixture {
 const SOURCE_ROOT_PREFIX: &str = "/";
 
 impl ChangeFixture {
-    pub fn parse(
-        db: &dyn salsa::Database,
-        #[rust_analyzer::rust_fixture] ra_fixture: &str,
-    ) -> ChangeFixture {
-        Self::parse_with_proc_macros(db, ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
+    pub fn parse(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> ChangeFixture {
+        Self::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
     }
 
     pub fn parse_with_proc_macros(
-        db: &dyn salsa::Database,
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
         minicore_raw: &str,
         mut proc_macro_defs: Vec<(String, ProcMacro)>,
@@ -202,7 +201,7 @@ pub fn parse_with_proc_macros(
             let meta = FileMeta::from_fixture(entry, current_source_root_kind);
             if let Some(range_or_offset) = range_or_offset {
                 file_position =
-                    Some((EditionedFileId::new(db, file_id, meta.edition), range_or_offset));
+                    Some((span::EditionedFileId::new(file_id, meta.edition), range_or_offset));
             }
 
             assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
@@ -259,7 +258,7 @@ pub fn parse_with_proc_macros(
             source_change.change_file(file_id, Some(text));
             let path = VfsPath::new_virtual_path(meta.path);
             file_set.insert(file_id, path);
-            files.push(EditionedFileId::new(db, file_id, meta.edition));
+            files.push(span::EditionedFileId::new(file_id, meta.edition));
             file_id = FileId::from_raw(file_id.index() + 1);
         }
 
diff --git a/src/tools/rust-analyzer/docs/book/src/configuration.md b/src/tools/rust-analyzer/docs/book/src/configuration.md
index 708eecd..789ceb4 100644
--- a/src/tools/rust-analyzer/docs/book/src/configuration.md
+++ b/src/tools/rust-analyzer/docs/book/src/configuration.md
@@ -26,16 +26,18 @@
 For example, a very common configuration is to enable proc-macro
 support, can be achieved by sending this JSON:
 
-    {
-      "cargo": {
-        "buildScripts": {
-          "enable": true,
-        },
-      },
-      "procMacro": {
-        "enable": true,
-      }
-    }
+```json
+{
+  "cargo": {
+    "buildScripts": {
+      "enable": true,
+    },
+  },
+  "procMacro": {
+    "enable": true,
+  }
+}
+```
 
 Please consult your editor’s documentation to learn more about how to
 configure [LSP
diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
index f0da2bd..fe1ea57 100644
--- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
+++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md
@@ -1630,7 +1630,7 @@
 `DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and
 therefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an
 existing workspace. As a reference for implementors, buck2's `rust-project` will likely
-be useful: https://github.com/facebook/buck2/tree/main/integrations/rust-project.
+be useful: <https://github.com/facebook/buck2/tree/main/integrations/rust-project>.
 
 
 ## rust-analyzer.workspace.symbol.search.excludeImports {#workspace.symbol.search.excludeImports}
diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/architecture.md b/src/tools/rust-analyzer/docs/book/src/contributing/architecture.md
index 1cc13b3..f2b6c05 100644
--- a/src/tools/rust-analyzer/docs/book/src/contributing/architecture.md
+++ b/src/tools/rust-analyzer/docs/book/src/contributing/architecture.md
@@ -120,7 +120,7 @@
 **Architecture Invariant:** `syntax` crate is completely independent from the rest of rust-analyzer. It knows nothing about salsa or LSP.
 This is important because it is possible to make useful tooling using only the syntax tree.
 Without semantic information, you don't need to be able to _build_ code, which makes the tooling more robust.
-See also https://mlfbrown.com/paper.pdf.
+See also <https://mlfbrown.com/paper.pdf>.
 You can view the `syntax` crate as an entry point to rust-analyzer.
 `syntax` crate is an **API Boundary**.
 
diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/debugging.md b/src/tools/rust-analyzer/docs/book/src/contributing/debugging.md
index db3a28e..fcda664 100644
--- a/src/tools/rust-analyzer/docs/book/src/contributing/debugging.md
+++ b/src/tools/rust-analyzer/docs/book/src/contributing/debugging.md
@@ -57,13 +57,13 @@
 
 - Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
 
-If you need to debug the server from the very beginning, including its initialization code, you can use the `--wait-dbg` command line argument or `RA_WAIT_DBG` environment variable. The server will spin at the beginning of the `try_main` function (see `crates\rust-analyzer\src\bin\main.rs`)
+If you need to debug the server from the very beginning, including its initialization code, you can use the `--wait-dbg` command line argument or `RA_WAIT_DBG` environment variable. The server will spin at the beginning of the `try_main` function (see `crates/rust-analyzer/src/bin/main.rs`)
 
 ```rust
-    let mut d = 4;
-    while d == 4 { // set a breakpoint here and change the value
-        d = 4;
-    }
+let mut d = 4;
+while d == 4 { // set a breakpoint here and change the value
+    d = 4;
+}
 ```
 
 However for this to work, you will need to enable debug_assertions in your build
diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/guide.md b/src/tools/rust-analyzer/docs/book/src/contributing/guide.md
index d068403..774eb5b 100644
--- a/src/tools/rust-analyzer/docs/book/src/contributing/guide.md
+++ b/src/tools/rust-analyzer/docs/book/src/contributing/guide.md
@@ -7,7 +7,7 @@
 architectural solutions related to the problem of building IDE-first compiler
 for Rust. There is a video version of this guide as well -
 however, it's based on an older 2019-01-20 release (git tag [guide-2019-01]):
-https://youtu.be/ANKBNiSWyfc.
+<https://youtu.be/ANKBNiSWyfc>.
 
 [guide-2019-01]: https://github.com/rust-lang/rust-analyzer/tree/guide-2019-01
 [2024-01-01]: https://github.com/rust-lang/rust-analyzer/tree/2024-01-01
diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md b/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md
index 91154b6..0e91e12 100644
--- a/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md
+++ b/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md
@@ -23,7 +23,7 @@
 
 ## Configuration in `initializationOptions`
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/567>
 
 The `initializationOptions` field of the `InitializeParams` of the initialization request should contain the `"rust-analyzer"` section of the configuration.
 
@@ -39,7 +39,7 @@
 
 ## Snippet `TextEdit`
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/724
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/724>
 
 **Experimental Client Capability:** `{ "snippetTextEdit": boolean }`
 
@@ -74,7 +74,7 @@
 
 ## `CodeAction` Groups
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/994
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/994>
 
 **Experimental Client Capability:** `{ "codeActionGroup": boolean }`
 
@@ -121,7 +121,7 @@
 
 ## Parent Module
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/1002
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/1002>
 
 **Experimental Server Capability:** `{ "parentModule": boolean }`
 
@@ -155,7 +155,7 @@
 
 ## Join Lines
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/992
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/992>
 
 **Experimental Server Capability:** `{ "joinLines": boolean }`
 
@@ -202,7 +202,7 @@
 
 ## On Enter
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/1001
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/1001>
 
 **Experimental Server Capability:** `{ "onEnter": boolean }`
 
@@ -297,7 +297,7 @@
 
 ## Matching Brace
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/999
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/999>
 
 **Experimental Server Capability:** `{ "matchingBrace": boolean }`
 
@@ -342,7 +342,7 @@
 
 ## Runnables
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/944
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/944>
 
 **Experimental Server Capability:** `{ "runnables": { "kinds": string[] } }`
 
@@ -854,7 +854,7 @@
 
 ## Open Cargo.toml
 
-**Upstream Issue:** https://github.com/rust-lang/rust-analyzer/issues/6462
+**Upstream Issue:** <https://github.com/rust-lang/rust-analyzer/issues/6462>
 
 **Experimental Server Capability:** `{ "openCargoToml": boolean }`
 
@@ -897,7 +897,7 @@
 
 ## Hover Range
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/377
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/377>
 
 **Experimental Server Capability:** { "hoverRange": boolean }
 
@@ -924,7 +924,7 @@
 
 ## Move Item
 
-**Upstream Issue:** https://github.com/rust-lang/rust-analyzer/issues/6823
+**Upstream Issue:** <https://github.com/rust-lang/rust-analyzer/issues/6823>
 
 This request is sent from client to server to move item under cursor or selection in some direction.
 
@@ -949,7 +949,7 @@
 
 ## Workspace Symbols Filtering
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/941
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/941>
 
 **Experimental Server Capability:** `{ "workspaceSymbolScopeKindFiltering": boolean }`
 
@@ -983,7 +983,7 @@
 
 ## Client Commands
 
-**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/642
+**Upstream Issue:** <https://github.com/microsoft/language-server-protocol/issues/642>
 
 **Experimental Client Capability:** `{ "commands?": ClientCommandOptions }`
 
diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json
index 4d1ae48..8475864 100644
--- a/src/tools/rust-analyzer/editors/code/package.json
+++ b/src/tools/rust-analyzer/editors/code/package.json
@@ -2020,30 +2020,19 @@
                         "markdownDescription": "How imports should be grouped into use statements.",
                         "default": "crate",
                         "type": "string",
-                        "anyOf": [
-                            {
-                                "enum": [
-                                    "crate",
-                                    "module",
-                                    "item",
-                                    "one"
-                                ],
-                                "enumDescriptions": [
-                                    "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
-                                    "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
-                                    "Flatten imports so that each has its own use statement.",
-                                    "Merge all imports into a single use statement as long as they have the same visibility and attributes."
-                                ]
-                            },
-                            {
-                                "enum": [
-                                    "preserve"
-                                ],
-                                "enumDescriptions": [
-                                    "Deprecated - unless `enforceGranularity` is `true`, the style of the current file is preferred over this setting. Behaves like `item`."
-                                ],
-                                "deprecated": true
-                            }
+                        "enum": [
+                            "crate",
+                            "module",
+                            "item",
+                            "one",
+                            "preserve"
+                        ],
+                        "enumDescriptions": [
+                            "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
+                            "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
+                            "Flatten imports so that each has its own use statement.",
+                            "Merge all imports into a single use statement as long as they have the same visibility and attributes.",
+                            "Deprecated - unless `enforceGranularity` is `true`, the style of the current file is preferred over this setting. Behaves like `item`."
                         ]
                     }
                 }
@@ -3043,7 +3032,7 @@
                 "title": "Workspace",
                 "properties": {
                     "rust-analyzer.workspace.discoverConfig": {
-                        "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n        \"command\": [\n                \"rust-project\",\n                \"develop-json\"\n        ],\n        \"progressLabel\": \"rust-analyzer\",\n        \"filesToWatch\": [\n                \"BUCK\"\n        ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to\n`DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n        Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n        Error { error: String, source: Option<String> },\n        Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n        // the internally-tagged representation of the enum.\n        \"kind\": \"finished\",\n        // the file used by a non-Cargo build system to define\n        // a package or target.\n        \"buildfile\": \"rust-analyzer/BUILD\",\n        // the contents of a rust-project.json, elided for brevity\n        \"project\": {\n                \"sysroot\": \"foo\",\n                \"crates\": []\n        }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on `DiscoverProjectData`\nto provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be\nsubstituted with the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n     Path(AbsPathBuf),\n     Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n        \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n        \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and\ntherefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an\nexisting workspace. As a reference for implementors, buck2's `rust-project` will likely\nbe useful: https://github.com/facebook/buck2/tree/main/integrations/rust-project.",
+                        "markdownDescription": "Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`].\n\n[`DiscoverWorkspaceConfig`] also requires setting `progress_label` and `files_to_watch`.\n`progress_label` is used for the title in progress indicators, whereas `files_to_watch`\nis used to determine which build system-specific files should be watched in order to\nreload rust-analyzer.\n\nBelow is an example of a valid configuration:\n```json\n\"rust-analyzer.workspace.discoverConfig\": {\n        \"command\": [\n                \"rust-project\",\n                \"develop-json\"\n        ],\n        \"progressLabel\": \"rust-analyzer\",\n        \"filesToWatch\": [\n                \"BUCK\"\n        ]\n}\n```\n\n## On `DiscoverWorkspaceConfig::command`\n\n**Warning**: This format is provisional and subject to change.\n\n[`DiscoverWorkspaceConfig::command`] *must* return a JSON object corresponding to\n`DiscoverProjectData::Finished`:\n\n```norun\n#[derive(Debug, Clone, Deserialize, Serialize)]\n#[serde(tag = \"kind\")]\n#[serde(rename_all = \"snake_case\")]\nenum DiscoverProjectData {\n        Finished { buildfile: Utf8PathBuf, project: ProjectJsonData },\n        Error { error: String, source: Option<String> },\n        Progress { message: String },\n}\n```\n\nAs JSON, `DiscoverProjectData::Finished` is:\n\n```json\n{\n        // the internally-tagged representation of the enum.\n        \"kind\": \"finished\",\n        // the file used by a non-Cargo build system to define\n        // a package or target.\n        \"buildfile\": \"rust-analyzer/BUILD\",\n        // the contents of a rust-project.json, elided for brevity\n        \"project\": {\n                \"sysroot\": \"foo\",\n                \"crates\": []\n        }\n}\n```\n\nIt is encouraged, but not required, to use the other variants on `DiscoverProjectData`\nto provide a more polished end-user experience.\n\n`DiscoverWorkspaceConfig::command` may *optionally* include an `{arg}`, which will be\nsubstituted with the JSON-serialized form of the following enum:\n\n```norun\n#[derive(PartialEq, Clone, Debug, Serialize)]\n#[serde(rename_all = \"camelCase\")]\npub enum DiscoverArgument {\n     Path(AbsPathBuf),\n     Buildfile(AbsPathBuf),\n}\n```\n\nThe JSON representation of `DiscoverArgument::Path` is:\n\n```json\n{\n        \"path\": \"src/main.rs\"\n}\n```\n\nSimilarly, the JSON representation of `DiscoverArgument::Buildfile` is:\n\n```json\n{\n        \"buildfile\": \"BUILD\"\n}\n```\n\n`DiscoverArgument::Path` is used to find and generate a `rust-project.json`, and\ntherefore, a workspace, whereas `DiscoverArgument::buildfile` is used to to update an\nexisting workspace. As a reference for implementors, buck2's `rust-project` will likely\nbe useful: <https://github.com/facebook/buck2/tree/main/integrations/rust-project>.",
                         "default": null,
                         "anyOf": [
                             {
diff --git a/src/tools/rust-analyzer/lib/smol_str/src/gdb_smolstr_printer.py b/src/tools/rust-analyzer/lib/smol_str/src/gdb_smolstr_printer.py
index 5f28ddd..2792aae 100644
--- a/src/tools/rust-analyzer/lib/smol_str/src/gdb_smolstr_printer.py
+++ b/src/tools/rust-analyzer/lib/smol_str/src/gdb_smolstr_printer.py
@@ -73,16 +73,14 @@
 
         if variant_name == "Static":
             try:
-                data_ptr = variant_val["data_ptr"]
-                length = int(variant_val["length"])
-                mem = gdb.selected_inferior().read_memory(int(data_ptr), length)
-                return _read_utf8(mem)
+                # variant_val["__0"] is &'static str
+                return variant_val["__0"]
             except Exception as e:
                 return f"<SmolStr Static error: {e}>"
 
         if variant_name == "Heap":
             try:
-                # variant_val is an Arc<str>
+                # variant_val["__0"] is an Arc<str>
                 inner = variant_val["__0"]["ptr"]["pointer"]
                 # inner is a fat pointer to ArcInner<str>
                 data_ptr = inner["data_ptr"]
diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version
index bddb68a..7a84872 100644
--- a/src/tools/rust-analyzer/rust-version
+++ b/src/tools/rust-analyzer/rust-version
@@ -1 +1 @@
-1be6b13be73dc12e98e51b403add4c41a0b77759
+dfe1b8c97bcde283102f706d5dcdc3649e5e12e3
diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs
index bddce0f..a803b4e 100644
--- a/src/tools/rust-analyzer/xtask/src/install.rs
+++ b/src/tools/rust-analyzer/xtask/src/install.rs
@@ -44,8 +44,11 @@ pub(crate) struct ServerOpt {
 
 impl ServerOpt {
     fn to_features(&self) -> Vec<&'static str> {
-        let mut features = Vec::new();
-        features.extend(self.malloc.to_features());
+        let malloc_features = self.malloc.to_features();
+        let mut features = Vec::with_capacity(
+            malloc_features.len() + if self.force_always_assert { 2 } else { 0 },
+        );
+        features.extend(malloc_features);
         if self.force_always_assert {
             features.extend(["--features", "force-always-assert"]);
         }
@@ -153,7 +156,7 @@ fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> {
 
     let mut install_cmd = cmd!(
         sh,
-        "cargo install --path crates/rust-analyzer --profile={profile} --locked --force --features force-always-assert {features...}"
+        "cargo install --path crates/rust-analyzer --profile={profile} --locked --force {features...}"
     );
 
     if let Some(train_crate) = opts.pgo {
diff --git a/tests/coverage/conditions.cov-map b/tests/coverage/conditions.cov-map
index c6eba8c..1872f14 100644
--- a/tests/coverage/conditions.cov-map
+++ b/tests/coverage/conditions.cov-map
@@ -1,92 +1,72 @@
 Function name: conditions::main
-Raw bytes (642): 0x[01, 01, 54, 05, 09, 01, 05, 09, 5d, 09, 27, 5d, 61, 27, 65, 5d, 61, 09, 23, 27, 65, 5d, 61, 01, 03, 03, 0d, 11, 51, 11, 4f, 51, 55, 4f, 59, 51, 55, 11, 4b, 4f, 59, 51, 55, 03, 9f, 01, 0d, 11, 0d, 11, 0d, 11, 0d, 11, 0d, 11, 0d, 11, 0d, 11, 9f, 01, 15, 0d, 11, 19, 45, 19, 97, 01, 45, 49, 97, 01, 4d, 45, 49, 19, 93, 01, 97, 01, 4d, 45, 49, 9f, 01, 8f, 02, 0d, 11, 15, 19, 15, 19, 15, 19, 15, 19, 15, 19, 1d, 21, 15, 19, 8f, 02, 1d, 15, 19, 21, 39, 21, e3, 01, 39, 3d, e3, 01, 41, 39, 3d, 21, df, 01, e3, 01, 41, 39, 3d, 8f, 02, cb, 02, 15, 19, 1d, 21, 8f, 02, cb, 02, 15, 19, 1d, 21, 8f, 02, cb, 02, 15, 19, 1d, 21, 8f, 02, cb, 02, 15, 19, 1d, 21, 25, 29, 1d, 21, cb, 02, 25, 1d, 21, 29, 2d, 29, c3, 02, 2d, 31, c3, 02, 35, 2d, 31, 29, bf, 02, c3, 02, 35, 2d, 31, cb, 02, cf, 02, 1d, 21, 25, 29, 52, 01, 03, 01, 00, 0a, 01, 01, 09, 00, 16, 01, 00, 19, 00, 1a, 01, 01, 08, 00, 0c, 01, 00, 0d, 02, 06, 00, 02, 05, 00, 06, 03, 03, 09, 00, 0a, 01, 00, 10, 00, 1d, 05, 01, 09, 00, 17, 05, 01, 09, 00, 0a, 06, 01, 0f, 00, 1c, 09, 01, 0c, 00, 19, 0a, 00, 1d, 00, 2a, 0e, 00, 2e, 00, 3c, 23, 00, 3d, 02, 0a, 1e, 02, 09, 00, 0a, 09, 01, 09, 00, 17, 09, 01, 09, 00, 12, 2a, 02, 09, 00, 0f, 03, 03, 09, 00, 16, 03, 00, 19, 00, 1a, 03, 01, 08, 00, 0c, 03, 00, 0d, 02, 06, 00, 02, 05, 00, 06, 03, 02, 08, 00, 15, 0d, 00, 16, 02, 06, 2e, 02, 0f, 00, 1c, 11, 01, 0c, 00, 19, 32, 00, 1d, 00, 2a, 36, 00, 2e, 00, 3c, 4b, 00, 3d, 02, 0a, 46, 02, 09, 00, 0a, 11, 01, 09, 00, 17, 52, 02, 09, 00, 0f, 9f, 01, 03, 08, 00, 0c, 9f, 01, 01, 0d, 00, 1a, 9f, 01, 00, 1d, 00, 1e, 9f, 01, 01, 0c, 00, 10, 9f, 01, 00, 11, 02, 0a, 00, 02, 09, 00, 0a, 9f, 01, 02, 0c, 00, 19, 15, 00, 1a, 02, 0a, 72, 04, 11, 00, 1e, 19, 01, 10, 00, 1d, 7a, 00, 21, 00, 2e, 7e, 00, 32, 00, 40, 93, 01, 00, 41, 02, 0e, 8e, 01, 02, 0d, 00, 0e, 19, 01, 0d, 00, 1b, 9a, 01, 02, 0d, 00, 13, 00, 02, 05, 00, 06, 8f, 02, 02, 09, 00, 16, 8f, 02, 00, 19, 00, 1a, 8f, 02, 01, 08, 00, 0c, 8f, 02, 00, 0d, 02, 06, 00, 02, 05, 00, 06, cb, 02, 02, 09, 00, 0a, 8f, 02, 00, 10, 00, 1d, 1d, 00, 1e, 02, 06, be, 01, 02, 0f, 00, 1c, 21, 01, 0c, 00, 19, c6, 01, 00, 1d, 00, 2a, ca, 01, 00, 2e, 00, 3c, df, 01, 00, 3d, 02, 0a, da, 01, 02, 09, 00, 0a, 21, 01, 09, 00, 17, 8a, 02, 02, 0d, 00, 20, 8a, 02, 00, 23, 00, 2c, 8a, 02, 01, 09, 00, 11, 8a, 02, 01, 09, 00, 0f, cf, 02, 03, 09, 00, 0a, cb, 02, 00, 10, 00, 1d, 25, 00, 1e, 02, 06, 9e, 02, 02, 0f, 00, 1c, 29, 01, 0c, 00, 19, a6, 02, 00, 1d, 00, 2a, aa, 02, 00, 2e, 00, 3c, bf, 02, 00, 3d, 02, 0a, ba, 02, 02, 09, 00, 0a, 29, 01, 09, 00, 17, c6, 02, 02, 09, 00, 0f, 01, 02, 01, 00, 02]
+Raw bytes (581): 0x[01, 01, 40, 05, 09, 01, 05, 09, 51, 09, 13, 51, 55, 01, 03, 03, 0d, 11, 49, 11, 27, 49, 4d, 03, 63, 0d, 11, 0d, 11, 0d, 11, 0d, 11, 0d, 11, 0d, 11, 0d, 11, 63, 15, 0d, 11, 19, 41, 19, 5b, 41, 45, 63, bf, 01, 0d, 11, 15, 19, 15, 19, 15, 19, 15, 19, 15, 19, 1d, 21, 15, 19, bf, 01, 1d, 15, 19, 21, 39, 21, 93, 01, 39, 3d, bf, 01, fb, 01, 15, 19, 1d, 21, bf, 01, fb, 01, 15, 19, 1d, 21, bf, 01, fb, 01, 15, 19, 1d, 21, bf, 01, fb, 01, 15, 19, 1d, 21, 25, 29, 1d, 21, fb, 01, 25, 1d, 21, 29, 2d, 29, f3, 01, 2d, 31, f3, 01, 35, 2d, 31, 29, ef, 01, f3, 01, 35, 2d, 31, fb, 01, ff, 01, 1d, 21, 25, 29, 52, 01, 03, 01, 00, 0a, 01, 01, 09, 00, 16, 01, 00, 19, 00, 1a, 01, 01, 08, 00, 0c, 01, 00, 0d, 02, 06, 00, 02, 05, 00, 06, 03, 03, 09, 00, 0a, 01, 00, 10, 00, 1d, 05, 01, 09, 00, 17, 05, 01, 09, 00, 0a, 06, 01, 0f, 00, 1c, 09, 01, 0c, 00, 19, 0a, 00, 1d, 00, 2a, 0e, 00, 2e, 00, 3c, 09, 00, 3d, 02, 0a, 00, 02, 09, 00, 0a, 09, 01, 09, 00, 17, 09, 01, 09, 00, 12, 16, 02, 09, 00, 0f, 03, 03, 09, 00, 16, 03, 00, 19, 00, 1a, 03, 01, 08, 00, 0c, 03, 00, 0d, 02, 06, 00, 02, 05, 00, 06, 03, 02, 08, 00, 15, 0d, 00, 16, 02, 06, 1a, 02, 0f, 00, 1c, 11, 01, 0c, 00, 19, 1e, 00, 1d, 00, 2a, 22, 00, 2e, 00, 3c, 11, 00, 3d, 02, 0a, 00, 02, 09, 00, 0a, 11, 01, 09, 00, 17, 2a, 02, 09, 00, 0f, 63, 03, 08, 00, 0c, 63, 01, 0d, 00, 1a, 63, 00, 1d, 00, 1e, 63, 01, 0c, 00, 10, 63, 00, 11, 02, 0a, 00, 02, 09, 00, 0a, 63, 02, 0c, 00, 19, 15, 00, 1a, 02, 0a, 4a, 04, 11, 00, 1e, 19, 01, 10, 00, 1d, 52, 00, 21, 00, 2e, 56, 00, 32, 00, 40, 19, 00, 41, 02, 0e, 00, 02, 0d, 00, 0e, 19, 01, 0d, 00, 1b, 5e, 02, 0d, 00, 13, 00, 02, 05, 00, 06, bf, 01, 02, 09, 00, 16, bf, 01, 00, 19, 00, 1a, bf, 01, 01, 08, 00, 0c, bf, 01, 00, 0d, 02, 06, 00, 02, 05, 00, 06, fb, 01, 02, 09, 00, 0a, bf, 01, 00, 10, 00, 1d, 1d, 00, 1e, 02, 06, 82, 01, 02, 0f, 00, 1c, 21, 01, 0c, 00, 19, 8a, 01, 00, 1d, 00, 2a, 8e, 01, 00, 2e, 00, 3c, 21, 00, 3d, 02, 0a, 00, 02, 09, 00, 0a, 21, 01, 09, 00, 17, ba, 01, 02, 0d, 00, 20, ba, 01, 00, 23, 00, 2c, ba, 01, 01, 09, 00, 11, ba, 01, 01, 09, 00, 0f, ff, 01, 03, 09, 00, 0a, fb, 01, 00, 10, 00, 1d, 25, 00, 1e, 02, 06, ce, 01, 02, 0f, 00, 1c, 29, 01, 0c, 00, 19, d6, 01, 00, 1d, 00, 2a, da, 01, 00, 2e, 00, 3c, ef, 01, 00, 3d, 02, 0a, ea, 01, 02, 09, 00, 0a, 29, 01, 09, 00, 17, f6, 01, 02, 09, 00, 0f, 01, 02, 01, 00, 02]
 Number of files: 1
 - file 0 => $DIR/conditions.rs
-Number of expressions: 84
+Number of expressions: 64
 - expression 0 operands: lhs = Counter(1), rhs = Counter(2)
 - expression 1 operands: lhs = Counter(0), rhs = Counter(1)
-- expression 2 operands: lhs = Counter(2), rhs = Counter(23)
-- expression 3 operands: lhs = Counter(2), rhs = Expression(9, Add)
-- expression 4 operands: lhs = Counter(23), rhs = Counter(24)
-- expression 5 operands: lhs = Expression(9, Add), rhs = Counter(25)
-- expression 6 operands: lhs = Counter(23), rhs = Counter(24)
-- expression 7 operands: lhs = Counter(2), rhs = Expression(8, Add)
-- expression 8 operands: lhs = Expression(9, Add), rhs = Counter(25)
-- expression 9 operands: lhs = Counter(23), rhs = Counter(24)
-- expression 10 operands: lhs = Counter(0), rhs = Expression(0, Add)
-- expression 11 operands: lhs = Expression(0, Add), rhs = Counter(3)
-- expression 12 operands: lhs = Counter(4), rhs = Counter(20)
-- expression 13 operands: lhs = Counter(4), rhs = Expression(19, Add)
-- expression 14 operands: lhs = Counter(20), rhs = Counter(21)
-- expression 15 operands: lhs = Expression(19, Add), rhs = Counter(22)
-- expression 16 operands: lhs = Counter(20), rhs = Counter(21)
-- expression 17 operands: lhs = Counter(4), rhs = Expression(18, Add)
-- expression 18 operands: lhs = Expression(19, Add), rhs = Counter(22)
-- expression 19 operands: lhs = Counter(20), rhs = Counter(21)
-- expression 20 operands: lhs = Expression(0, Add), rhs = Expression(39, Add)
-- expression 21 operands: lhs = Counter(3), rhs = Counter(4)
-- expression 22 operands: lhs = Counter(3), rhs = Counter(4)
-- expression 23 operands: lhs = Counter(3), rhs = Counter(4)
+- expression 2 operands: lhs = Counter(2), rhs = Counter(20)
+- expression 3 operands: lhs = Counter(2), rhs = Expression(4, Add)
+- expression 4 operands: lhs = Counter(20), rhs = Counter(21)
+- expression 5 operands: lhs = Counter(0), rhs = Expression(0, Add)
+- expression 6 operands: lhs = Expression(0, Add), rhs = Counter(3)
+- expression 7 operands: lhs = Counter(4), rhs = Counter(18)
+- expression 8 operands: lhs = Counter(4), rhs = Expression(9, Add)
+- expression 9 operands: lhs = Counter(18), rhs = Counter(19)
+- expression 10 operands: lhs = Expression(0, Add), rhs = Expression(24, Add)
+- expression 11 operands: lhs = Counter(3), rhs = Counter(4)
+- expression 12 operands: lhs = Counter(3), rhs = Counter(4)
+- expression 13 operands: lhs = Counter(3), rhs = Counter(4)
+- expression 14 operands: lhs = Counter(3), rhs = Counter(4)
+- expression 15 operands: lhs = Counter(3), rhs = Counter(4)
+- expression 16 operands: lhs = Counter(3), rhs = Counter(4)
+- expression 17 operands: lhs = Counter(3), rhs = Counter(4)
+- expression 18 operands: lhs = Expression(24, Add), rhs = Counter(5)
+- expression 19 operands: lhs = Counter(3), rhs = Counter(4)
+- expression 20 operands: lhs = Counter(6), rhs = Counter(16)
+- expression 21 operands: lhs = Counter(6), rhs = Expression(22, Add)
+- expression 22 operands: lhs = Counter(16), rhs = Counter(17)
+- expression 23 operands: lhs = Expression(24, Add), rhs = Expression(47, Add)
 - expression 24 operands: lhs = Counter(3), rhs = Counter(4)
-- expression 25 operands: lhs = Counter(3), rhs = Counter(4)
-- expression 26 operands: lhs = Counter(3), rhs = Counter(4)
-- expression 27 operands: lhs = Counter(3), rhs = Counter(4)
-- expression 28 operands: lhs = Expression(39, Add), rhs = Counter(5)
-- expression 29 operands: lhs = Counter(3), rhs = Counter(4)
-- expression 30 operands: lhs = Counter(6), rhs = Counter(17)
-- expression 31 operands: lhs = Counter(6), rhs = Expression(37, Add)
-- expression 32 operands: lhs = Counter(17), rhs = Counter(18)
-- expression 33 operands: lhs = Expression(37, Add), rhs = Counter(19)
-- expression 34 operands: lhs = Counter(17), rhs = Counter(18)
-- expression 35 operands: lhs = Counter(6), rhs = Expression(36, Add)
-- expression 36 operands: lhs = Expression(37, Add), rhs = Counter(19)
-- expression 37 operands: lhs = Counter(17), rhs = Counter(18)
-- expression 38 operands: lhs = Expression(39, Add), rhs = Expression(67, Add)
-- expression 39 operands: lhs = Counter(3), rhs = Counter(4)
-- expression 40 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 25 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 26 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 27 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 28 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 29 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 30 operands: lhs = Counter(7), rhs = Counter(8)
+- expression 31 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 32 operands: lhs = Expression(47, Add), rhs = Counter(7)
+- expression 33 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 34 operands: lhs = Counter(8), rhs = Counter(14)
+- expression 35 operands: lhs = Counter(8), rhs = Expression(36, Add)
+- expression 36 operands: lhs = Counter(14), rhs = Counter(15)
+- expression 37 operands: lhs = Expression(47, Add), rhs = Expression(62, Add)
+- expression 38 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 39 operands: lhs = Counter(7), rhs = Counter(8)
+- expression 40 operands: lhs = Expression(47, Add), rhs = Expression(62, Add)
 - expression 41 operands: lhs = Counter(5), rhs = Counter(6)
-- expression 42 operands: lhs = Counter(5), rhs = Counter(6)
-- expression 43 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 42 operands: lhs = Counter(7), rhs = Counter(8)
+- expression 43 operands: lhs = Expression(47, Add), rhs = Expression(62, Add)
 - expression 44 operands: lhs = Counter(5), rhs = Counter(6)
 - expression 45 operands: lhs = Counter(7), rhs = Counter(8)
-- expression 46 operands: lhs = Counter(5), rhs = Counter(6)
-- expression 47 operands: lhs = Expression(67, Add), rhs = Counter(7)
-- expression 48 operands: lhs = Counter(5), rhs = Counter(6)
-- expression 49 operands: lhs = Counter(8), rhs = Counter(14)
-- expression 50 operands: lhs = Counter(8), rhs = Expression(56, Add)
-- expression 51 operands: lhs = Counter(14), rhs = Counter(15)
-- expression 52 operands: lhs = Expression(56, Add), rhs = Counter(16)
-- expression 53 operands: lhs = Counter(14), rhs = Counter(15)
-- expression 54 operands: lhs = Counter(8), rhs = Expression(55, Add)
-- expression 55 operands: lhs = Expression(56, Add), rhs = Counter(16)
-- expression 56 operands: lhs = Counter(14), rhs = Counter(15)
-- expression 57 operands: lhs = Expression(67, Add), rhs = Expression(82, Add)
-- expression 58 operands: lhs = Counter(5), rhs = Counter(6)
-- expression 59 operands: lhs = Counter(7), rhs = Counter(8)
-- expression 60 operands: lhs = Expression(67, Add), rhs = Expression(82, Add)
-- expression 61 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 46 operands: lhs = Expression(47, Add), rhs = Expression(62, Add)
+- expression 47 operands: lhs = Counter(5), rhs = Counter(6)
+- expression 48 operands: lhs = Counter(7), rhs = Counter(8)
+- expression 49 operands: lhs = Counter(9), rhs = Counter(10)
+- expression 50 operands: lhs = Counter(7), rhs = Counter(8)
+- expression 51 operands: lhs = Expression(62, Add), rhs = Counter(9)
+- expression 52 operands: lhs = Counter(7), rhs = Counter(8)
+- expression 53 operands: lhs = Counter(10), rhs = Counter(11)
+- expression 54 operands: lhs = Counter(10), rhs = Expression(60, Add)
+- expression 55 operands: lhs = Counter(11), rhs = Counter(12)
+- expression 56 operands: lhs = Expression(60, Add), rhs = Counter(13)
+- expression 57 operands: lhs = Counter(11), rhs = Counter(12)
+- expression 58 operands: lhs = Counter(10), rhs = Expression(59, Add)
+- expression 59 operands: lhs = Expression(60, Add), rhs = Counter(13)
+- expression 60 operands: lhs = Counter(11), rhs = Counter(12)
+- expression 61 operands: lhs = Expression(62, Add), rhs = Expression(63, Add)
 - expression 62 operands: lhs = Counter(7), rhs = Counter(8)
-- expression 63 operands: lhs = Expression(67, Add), rhs = Expression(82, Add)
-- expression 64 operands: lhs = Counter(5), rhs = Counter(6)
-- expression 65 operands: lhs = Counter(7), rhs = Counter(8)
-- expression 66 operands: lhs = Expression(67, Add), rhs = Expression(82, Add)
-- expression 67 operands: lhs = Counter(5), rhs = Counter(6)
-- expression 68 operands: lhs = Counter(7), rhs = Counter(8)
-- expression 69 operands: lhs = Counter(9), rhs = Counter(10)
-- expression 70 operands: lhs = Counter(7), rhs = Counter(8)
-- expression 71 operands: lhs = Expression(82, Add), rhs = Counter(9)
-- expression 72 operands: lhs = Counter(7), rhs = Counter(8)
-- expression 73 operands: lhs = Counter(10), rhs = Counter(11)
-- expression 74 operands: lhs = Counter(10), rhs = Expression(80, Add)
-- expression 75 operands: lhs = Counter(11), rhs = Counter(12)
-- expression 76 operands: lhs = Expression(80, Add), rhs = Counter(13)
-- expression 77 operands: lhs = Counter(11), rhs = Counter(12)
-- expression 78 operands: lhs = Counter(10), rhs = Expression(79, Add)
-- expression 79 operands: lhs = Expression(80, Add), rhs = Counter(13)
-- expression 80 operands: lhs = Counter(11), rhs = Counter(12)
-- expression 81 operands: lhs = Expression(82, Add), rhs = Expression(83, Add)
-- expression 82 operands: lhs = Counter(7), rhs = Counter(8)
-- expression 83 operands: lhs = Counter(9), rhs = Counter(10)
+- expression 63 operands: lhs = Counter(9), rhs = Counter(10)
 Number of file 0 mappings: 82
 - Code(Counter(0)) at (prev + 3, 1) to (start + 0, 10)
 - Code(Counter(0)) at (prev + 1, 9) to (start + 0, 22)
@@ -103,16 +83,14 @@
     = (c0 - c1)
 - Code(Counter(2)) at (prev + 1, 12) to (start + 0, 25)
 - Code(Expression(2, Sub)) at (prev + 0, 29) to (start + 0, 42)
-    = (c2 - c23)
+    = (c2 - c20)
 - Code(Expression(3, Sub)) at (prev + 0, 46) to (start + 0, 60)
-    = (c2 - (c23 + c24))
-- Code(Expression(8, Add)) at (prev + 0, 61) to (start + 2, 10)
-    = ((c23 + c24) + c25)
-- Code(Expression(7, Sub)) at (prev + 2, 9) to (start + 0, 10)
-    = (c2 - ((c23 + c24) + c25))
+    = (c2 - (c20 + c21))
+- Code(Counter(2)) at (prev + 0, 61) to (start + 2, 10)
+- Code(Zero) at (prev + 2, 9) to (start + 0, 10)
 - Code(Counter(2)) at (prev + 1, 9) to (start + 0, 23)
 - Code(Counter(2)) at (prev + 1, 9) to (start + 0, 18)
-- Code(Expression(10, Sub)) at (prev + 2, 9) to (start + 0, 15)
+- Code(Expression(5, Sub)) at (prev + 2, 9) to (start + 0, 15)
     = (c0 - (c1 + c2))
 - Code(Expression(0, Add)) at (prev + 3, 9) to (start + 0, 22)
     = (c1 + c2)
@@ -126,101 +104,95 @@
 - Code(Expression(0, Add)) at (prev + 2, 8) to (start + 0, 21)
     = (c1 + c2)
 - Code(Counter(3)) at (prev + 0, 22) to (start + 2, 6)
-- Code(Expression(11, Sub)) at (prev + 2, 15) to (start + 0, 28)
+- Code(Expression(6, Sub)) at (prev + 2, 15) to (start + 0, 28)
     = ((c1 + c2) - c3)
 - Code(Counter(4)) at (prev + 1, 12) to (start + 0, 25)
-- Code(Expression(12, Sub)) at (prev + 0, 29) to (start + 0, 42)
-    = (c4 - c20)
-- Code(Expression(13, Sub)) at (prev + 0, 46) to (start + 0, 60)
-    = (c4 - (c20 + c21))
-- Code(Expression(18, Add)) at (prev + 0, 61) to (start + 2, 10)
-    = ((c20 + c21) + c22)
-- Code(Expression(17, Sub)) at (prev + 2, 9) to (start + 0, 10)
-    = (c4 - ((c20 + c21) + c22))
+- Code(Expression(7, Sub)) at (prev + 0, 29) to (start + 0, 42)
+    = (c4 - c18)
+- Code(Expression(8, Sub)) at (prev + 0, 46) to (start + 0, 60)
+    = (c4 - (c18 + c19))
+- Code(Counter(4)) at (prev + 0, 61) to (start + 2, 10)
+- Code(Zero) at (prev + 2, 9) to (start + 0, 10)
 - Code(Counter(4)) at (prev + 1, 9) to (start + 0, 23)
-- Code(Expression(20, Sub)) at (prev + 2, 9) to (start + 0, 15)
+- Code(Expression(10, Sub)) at (prev + 2, 9) to (start + 0, 15)
     = ((c1 + c2) - (c3 + c4))
-- Code(Expression(39, Add)) at (prev + 3, 8) to (start + 0, 12)
+- Code(Expression(24, Add)) at (prev + 3, 8) to (start + 0, 12)
     = (c3 + c4)
-- Code(Expression(39, Add)) at (prev + 1, 13) to (start + 0, 26)
+- Code(Expression(24, Add)) at (prev + 1, 13) to (start + 0, 26)
     = (c3 + c4)
-- Code(Expression(39, Add)) at (prev + 0, 29) to (start + 0, 30)
+- Code(Expression(24, Add)) at (prev + 0, 29) to (start + 0, 30)
     = (c3 + c4)
-- Code(Expression(39, Add)) at (prev + 1, 12) to (start + 0, 16)
+- Code(Expression(24, Add)) at (prev + 1, 12) to (start + 0, 16)
     = (c3 + c4)
-- Code(Expression(39, Add)) at (prev + 0, 17) to (start + 2, 10)
+- Code(Expression(24, Add)) at (prev + 0, 17) to (start + 2, 10)
     = (c3 + c4)
 - Code(Zero) at (prev + 2, 9) to (start + 0, 10)
-- Code(Expression(39, Add)) at (prev + 2, 12) to (start + 0, 25)
+- Code(Expression(24, Add)) at (prev + 2, 12) to (start + 0, 25)
     = (c3 + c4)
 - Code(Counter(5)) at (prev + 0, 26) to (start + 2, 10)
-- Code(Expression(28, Sub)) at (prev + 4, 17) to (start + 0, 30)
+- Code(Expression(18, Sub)) at (prev + 4, 17) to (start + 0, 30)
     = ((c3 + c4) - c5)
 - Code(Counter(6)) at (prev + 1, 16) to (start + 0, 29)
-- Code(Expression(30, Sub)) at (prev + 0, 33) to (start + 0, 46)
-    = (c6 - c17)
-- Code(Expression(31, Sub)) at (prev + 0, 50) to (start + 0, 64)
-    = (c6 - (c17 + c18))
-- Code(Expression(36, Add)) at (prev + 0, 65) to (start + 2, 14)
-    = ((c17 + c18) + c19)
-- Code(Expression(35, Sub)) at (prev + 2, 13) to (start + 0, 14)
-    = (c6 - ((c17 + c18) + c19))
+- Code(Expression(20, Sub)) at (prev + 0, 33) to (start + 0, 46)
+    = (c6 - c16)
+- Code(Expression(21, Sub)) at (prev + 0, 50) to (start + 0, 64)
+    = (c6 - (c16 + c17))
+- Code(Counter(6)) at (prev + 0, 65) to (start + 2, 14)
+- Code(Zero) at (prev + 2, 13) to (start + 0, 14)
 - Code(Counter(6)) at (prev + 1, 13) to (start + 0, 27)
-- Code(Expression(38, Sub)) at (prev + 2, 13) to (start + 0, 19)
+- Code(Expression(23, Sub)) at (prev + 2, 13) to (start + 0, 19)
     = ((c3 + c4) - (c5 + c6))
 - Code(Zero) at (prev + 2, 5) to (start + 0, 6)
-- Code(Expression(67, Add)) at (prev + 2, 9) to (start + 0, 22)
+- Code(Expression(47, Add)) at (prev + 2, 9) to (start + 0, 22)
     = (c5 + c6)
-- Code(Expression(67, Add)) at (prev + 0, 25) to (start + 0, 26)
+- Code(Expression(47, Add)) at (prev + 0, 25) to (start + 0, 26)
     = (c5 + c6)
-- Code(Expression(67, Add)) at (prev + 1, 8) to (start + 0, 12)
+- Code(Expression(47, Add)) at (prev + 1, 8) to (start + 0, 12)
     = (c5 + c6)
-- Code(Expression(67, Add)) at (prev + 0, 13) to (start + 2, 6)
+- Code(Expression(47, Add)) at (prev + 0, 13) to (start + 2, 6)
     = (c5 + c6)
 - Code(Zero) at (prev + 2, 5) to (start + 0, 6)
-- Code(Expression(82, Add)) at (prev + 2, 9) to (start + 0, 10)
+- Code(Expression(62, Add)) at (prev + 2, 9) to (start + 0, 10)
     = (c7 + c8)
-- Code(Expression(67, Add)) at (prev + 0, 16) to (start + 0, 29)
+- Code(Expression(47, Add)) at (prev + 0, 16) to (start + 0, 29)
     = (c5 + c6)
 - Code(Counter(7)) at (prev + 0, 30) to (start + 2, 6)
-- Code(Expression(47, Sub)) at (prev + 2, 15) to (start + 0, 28)
+- Code(Expression(32, Sub)) at (prev + 2, 15) to (start + 0, 28)
     = ((c5 + c6) - c7)
 - Code(Counter(8)) at (prev + 1, 12) to (start + 0, 25)
-- Code(Expression(49, Sub)) at (prev + 0, 29) to (start + 0, 42)
+- Code(Expression(34, Sub)) at (prev + 0, 29) to (start + 0, 42)
     = (c8 - c14)
-- Code(Expression(50, Sub)) at (prev + 0, 46) to (start + 0, 60)
+- Code(Expression(35, Sub)) at (prev + 0, 46) to (start + 0, 60)
     = (c8 - (c14 + c15))
-- Code(Expression(55, Add)) at (prev + 0, 61) to (start + 2, 10)
-    = ((c14 + c15) + c16)
-- Code(Expression(54, Sub)) at (prev + 2, 9) to (start + 0, 10)
-    = (c8 - ((c14 + c15) + c16))
+- Code(Counter(8)) at (prev + 0, 61) to (start + 2, 10)
+- Code(Zero) at (prev + 2, 9) to (start + 0, 10)
 - Code(Counter(8)) at (prev + 1, 9) to (start + 0, 23)
-- Code(Expression(66, Sub)) at (prev + 2, 13) to (start + 0, 32)
+- Code(Expression(46, Sub)) at (prev + 2, 13) to (start + 0, 32)
     = ((c5 + c6) - (c7 + c8))
-- Code(Expression(66, Sub)) at (prev + 0, 35) to (start + 0, 44)
+- Code(Expression(46, Sub)) at (prev + 0, 35) to (start + 0, 44)
     = ((c5 + c6) - (c7 + c8))
-- Code(Expression(66, Sub)) at (prev + 1, 9) to (start + 0, 17)
+- Code(Expression(46, Sub)) at (prev + 1, 9) to (start + 0, 17)
     = ((c5 + c6) - (c7 + c8))
-- Code(Expression(66, Sub)) at (prev + 1, 9) to (start + 0, 15)
+- Code(Expression(46, Sub)) at (prev + 1, 9) to (start + 0, 15)
     = ((c5 + c6) - (c7 + c8))
-- Code(Expression(83, Add)) at (prev + 3, 9) to (start + 0, 10)
+- Code(Expression(63, Add)) at (prev + 3, 9) to (start + 0, 10)
     = (c9 + c10)
-- Code(Expression(82, Add)) at (prev + 0, 16) to (start + 0, 29)
+- Code(Expression(62, Add)) at (prev + 0, 16) to (start + 0, 29)
     = (c7 + c8)
 - Code(Counter(9)) at (prev + 0, 30) to (start + 2, 6)
-- Code(Expression(71, Sub)) at (prev + 2, 15) to (start + 0, 28)
+- Code(Expression(51, Sub)) at (prev + 2, 15) to (start + 0, 28)
     = ((c7 + c8) - c9)
 - Code(Counter(10)) at (prev + 1, 12) to (start + 0, 25)
-- Code(Expression(73, Sub)) at (prev + 0, 29) to (start + 0, 42)
+- Code(Expression(53, Sub)) at (prev + 0, 29) to (start + 0, 42)
     = (c10 - c11)
-- Code(Expression(74, Sub)) at (prev + 0, 46) to (start + 0, 60)
+- Code(Expression(54, Sub)) at (prev + 0, 46) to (start + 0, 60)
     = (c10 - (c11 + c12))
-- Code(Expression(79, Add)) at (prev + 0, 61) to (start + 2, 10)
+- Code(Expression(59, Add)) at (prev + 0, 61) to (start + 2, 10)
     = ((c11 + c12) + c13)
-- Code(Expression(78, Sub)) at (prev + 2, 9) to (start + 0, 10)
+- Code(Expression(58, Sub)) at (prev + 2, 9) to (start + 0, 10)
     = (c10 - ((c11 + c12) + c13))
 - Code(Counter(10)) at (prev + 1, 9) to (start + 0, 23)
-- Code(Expression(81, Sub)) at (prev + 2, 9) to (start + 0, 15)
+- Code(Expression(61, Sub)) at (prev + 2, 9) to (start + 0, 15)
     = ((c7 + c8) - (c9 + c10))
 - Code(Counter(0)) at (prev + 2, 1) to (start + 0, 2)
 Highest counter ID seen: c10
diff --git a/tests/crashes/130104.rs b/tests/crashes/130104.rs
deleted file mode 100644
index b961108..0000000
--- a/tests/crashes/130104.rs
+++ /dev/null
@@ -1,6 +0,0 @@
-//@ known-bug: rust-lang/rust#130104
-
-fn main() {
-    let non_secure_function =
-        core::mem::transmute::<fn() -> _, extern "cmse-nonsecure-call" fn() -> _>;
-}
diff --git a/tests/mir-opt/jump_threading.aggregate.JumpThreading.panic-abort.diff b/tests/mir-opt/jump_threading.aggregate.JumpThreading.panic-abort.diff
index a7551c3..89d04c5 100644
--- a/tests/mir-opt/jump_threading.aggregate.JumpThreading.panic-abort.diff
+++ b/tests/mir-opt/jump_threading.aggregate.JumpThreading.panic-abort.diff
@@ -1,51 +1,50 @@
 - // MIR for `aggregate` before JumpThreading
 + // MIR for `aggregate` after JumpThreading
   
-  fn aggregate(_1: u8) -> u8 {
-      debug x => _1;
+  fn aggregate() -> u8 {
       let mut _0: u8;
+      let _1: u8;
       let _2: u8;
-      let _3: u8;
-      let mut _4: (u8, u8);
-      let mut _5: bool;
-      let mut _6: u8;
+      let mut _3: (u8, u8);
+      let mut _4: bool;
+      let mut _5: u8;
       scope 1 {
-          debug a => _2;
-          debug b => _3;
+          debug a => _1;
+          debug b => _2;
       }
   
       bb0: {
-          StorageLive(_4);
-          _4 = const aggregate::FOO;
-          StorageLive(_2);
-          _2 = copy (_4.0: u8);
           StorageLive(_3);
-          _3 = copy (_4.1: u8);
-          StorageDead(_4);
+          _3 = const aggregate::FOO;
+          StorageLive(_1);
+          _1 = copy (_3.0: u8);
+          StorageLive(_2);
+          _2 = copy (_3.1: u8);
+          StorageDead(_3);
+          StorageLive(_4);
           StorageLive(_5);
-          StorageLive(_6);
-          _6 = copy _2;
-          _5 = Eq(move _6, const 7_u8);
--         switchInt(move _5) -> [0: bb2, otherwise: bb1];
+          _5 = copy _1;
+          _4 = Eq(move _5, const 7_u8);
+-         switchInt(move _4) -> [0: bb2, otherwise: bb1];
 +         goto -> bb2;
       }
   
       bb1: {
-          StorageDead(_6);
-          _0 = copy _3;
-          goto -> bb3;
-      }
-  
-      bb2: {
-          StorageDead(_6);
+          StorageDead(_5);
           _0 = copy _2;
           goto -> bb3;
       }
   
-      bb3: {
+      bb2: {
           StorageDead(_5);
-          StorageDead(_3);
+          _0 = copy _1;
+          goto -> bb3;
+      }
+  
+      bb3: {
+          StorageDead(_4);
           StorageDead(_2);
+          StorageDead(_1);
           return;
       }
   }
diff --git a/tests/mir-opt/jump_threading.aggregate.JumpThreading.panic-unwind.diff b/tests/mir-opt/jump_threading.aggregate.JumpThreading.panic-unwind.diff
index a7551c3..89d04c5 100644
--- a/tests/mir-opt/jump_threading.aggregate.JumpThreading.panic-unwind.diff
+++ b/tests/mir-opt/jump_threading.aggregate.JumpThreading.panic-unwind.diff
@@ -1,51 +1,50 @@
 - // MIR for `aggregate` before JumpThreading
 + // MIR for `aggregate` after JumpThreading
   
-  fn aggregate(_1: u8) -> u8 {
-      debug x => _1;
+  fn aggregate() -> u8 {
       let mut _0: u8;
+      let _1: u8;
       let _2: u8;
-      let _3: u8;
-      let mut _4: (u8, u8);
-      let mut _5: bool;
-      let mut _6: u8;
+      let mut _3: (u8, u8);
+      let mut _4: bool;
+      let mut _5: u8;
       scope 1 {
-          debug a => _2;
-          debug b => _3;
+          debug a => _1;
+          debug b => _2;
       }
   
       bb0: {
-          StorageLive(_4);
-          _4 = const aggregate::FOO;
-          StorageLive(_2);
-          _2 = copy (_4.0: u8);
           StorageLive(_3);
-          _3 = copy (_4.1: u8);
-          StorageDead(_4);
+          _3 = const aggregate::FOO;
+          StorageLive(_1);
+          _1 = copy (_3.0: u8);
+          StorageLive(_2);
+          _2 = copy (_3.1: u8);
+          StorageDead(_3);
+          StorageLive(_4);
           StorageLive(_5);
-          StorageLive(_6);
-          _6 = copy _2;
-          _5 = Eq(move _6, const 7_u8);
--         switchInt(move _5) -> [0: bb2, otherwise: bb1];
+          _5 = copy _1;
+          _4 = Eq(move _5, const 7_u8);
+-         switchInt(move _4) -> [0: bb2, otherwise: bb1];
 +         goto -> bb2;
       }
   
       bb1: {
-          StorageDead(_6);
-          _0 = copy _3;
-          goto -> bb3;
-      }
-  
-      bb2: {
-          StorageDead(_6);
+          StorageDead(_5);
           _0 = copy _2;
           goto -> bb3;
       }
   
-      bb3: {
+      bb2: {
           StorageDead(_5);
-          StorageDead(_3);
+          _0 = copy _1;
+          goto -> bb3;
+      }
+  
+      bb3: {
+          StorageDead(_4);
           StorageDead(_2);
+          StorageDead(_1);
           return;
       }
   }
diff --git a/tests/mir-opt/jump_threading.chained_conditions.JumpThreading.panic-abort.diff b/tests/mir-opt/jump_threading.chained_conditions.JumpThreading.panic-abort.diff
new file mode 100644
index 0000000..f09a187
--- /dev/null
+++ b/tests/mir-opt/jump_threading.chained_conditions.JumpThreading.panic-abort.diff
@@ -0,0 +1,238 @@
+- // MIR for `chained_conditions` before JumpThreading
++ // MIR for `chained_conditions` after JumpThreading
+  
+  fn chained_conditions() -> u8 {
+      let mut _0: u8;
+      let _1: chained_conditions::BacktraceStyle;
+      let mut _2: std::option::Option<std::string::String>;
+      let mut _3: &std::option::Option<std::string::String>;
+      let mut _4: isize;
+      let _5: std::string::String;
+      let _6: &std::string::String;
+      let mut _7: bool;
+      let mut _8: &&std::string::String;
+      let _9: &std::string::String;
+      let mut _10: &&str;
+      let _11: &str;
+      let _12: std::string::String;
+      let _13: &std::string::String;
+      let mut _14: bool;
+      let mut _15: &&std::string::String;
+      let _16: &std::string::String;
+      let mut _17: &&str;
+      let _18: &str;
+      let mut _19: isize;
+      let mut _20: &&str;
+      let mut _21: &&str;
+      let mut _22: bool;
+      let mut _23: bool;
+      let mut _24: isize;
+      let mut _25: isize;
+      let mut _26: isize;
+      scope 1 {
+          debug format => _1;
+      }
+      scope 2 {
+          debug x => _5;
+          debug x => _6;
+      }
+      scope 3 {
+          debug x => _12;
+          debug x => _13;
+      }
+      scope 4 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
+          let mut _27: &std::string::String;
+          let mut _28: &str;
+      }
+      scope 5 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
+          let mut _29: &std::string::String;
+          let mut _30: &str;
+      }
+  
+      bb0: {
+          _22 = const false;
+          _23 = const false;
+          StorageLive(_1);
+          StorageLive(_2);
+          _2 = env_var() -> [return: bb1, unwind unreachable];
+      }
+  
+      bb1: {
+          _22 = const true;
+          _23 = const true;
+          _4 = discriminant(_2);
+          switchInt(move _4) -> [0: bb3, 1: bb4, otherwise: bb2];
+      }
+  
+      bb2: {
+          unreachable;
+      }
+  
+      bb3: {
+          _1 = chained_conditions::BacktraceStyle::Off;
+-         goto -> bb18;
++         goto -> bb23;
+      }
+  
+      bb4: {
+          StorageLive(_6);
+          _6 = &((_2 as Some).0: std::string::String);
+          StorageLive(_7);
+          StorageLive(_8);
+          StorageLive(_9);
+          _9 = &(*_6);
+          _8 = &_9;
+          StorageLive(_10);
+          _21 = const chained_conditions::promoted[1];
+          _10 = &(*_21);
+          StorageLive(_27);
+          StorageLive(_28);
+          _27 = copy (*_8);
+          _28 = copy (*_10);
+          _7 = <String as PartialEq<str>>::eq(move _27, move _28) -> [return: bb19, unwind unreachable];
+      }
+  
+      bb5: {
+          StorageDead(_10);
+          StorageDead(_9);
+          StorageDead(_8);
+          StorageDead(_7);
+          StorageLive(_5);
+          _23 = const false;
+          _5 = move ((_2 as Some).0: std::string::String);
+          _1 = chained_conditions::BacktraceStyle::Full;
+          drop(_5) -> [return: bb7, unwind unreachable];
+      }
+  
+      bb6: {
+          StorageDead(_10);
+          StorageDead(_9);
+          StorageDead(_8);
+          StorageDead(_7);
+          StorageDead(_6);
+          StorageLive(_13);
+          _13 = &((_2 as Some).0: std::string::String);
+          StorageLive(_14);
+          StorageLive(_15);
+          StorageLive(_16);
+          _16 = &(*_13);
+          _15 = &_16;
+          StorageLive(_17);
+          _20 = const chained_conditions::promoted[0];
+          _17 = &(*_20);
+          StorageLive(_29);
+          StorageLive(_30);
+          _29 = copy (*_15);
+          _30 = copy (*_17);
+          _14 = <String as PartialEq<str>>::eq(move _29, move _30) -> [return: bb20, unwind unreachable];
+      }
+  
+      bb7: {
+          StorageDead(_5);
+          StorageDead(_6);
+-         goto -> bb18;
++         goto -> bb21;
+      }
+  
+      bb8: {
+          StorageDead(_17);
+          StorageDead(_16);
+          StorageDead(_15);
+          StorageDead(_14);
+          StorageLive(_12);
+          _23 = const false;
+          _12 = move ((_2 as Some).0: std::string::String);
+          _1 = chained_conditions::BacktraceStyle::Off;
+          drop(_12) -> [return: bb10, unwind unreachable];
+      }
+  
+      bb9: {
+          StorageDead(_17);
+          StorageDead(_16);
+          StorageDead(_15);
+          StorageDead(_14);
+          StorageDead(_13);
+          _1 = chained_conditions::BacktraceStyle::Short;
+-         goto -> bb18;
++         goto -> bb23;
+      }
+  
+      bb10: {
+          StorageDead(_12);
+          StorageDead(_13);
+-         goto -> bb18;
++         goto -> bb21;
+      }
+  
+      bb11: {
+          _0 = const 3_u8;
+          goto -> bb14;
+      }
+  
+      bb12: {
+          _0 = const 2_u8;
+          goto -> bb14;
+      }
+  
+      bb13: {
+          _0 = const 1_u8;
+          goto -> bb14;
+      }
+  
+      bb14: {
+          StorageDead(_1);
+          return;
+      }
+  
+      bb15: {
+          _22 = const false;
+          _23 = const false;
+          StorageDead(_2);
+          _19 = discriminant(_1);
+          switchInt(move _19) -> [0: bb13, 1: bb12, 2: bb11, otherwise: bb2];
+      }
+  
+      bb16: {
+          switchInt(copy _23) -> [0: bb15, otherwise: bb17];
+      }
+  
+      bb17: {
+          drop(((_2 as Some).0: std::string::String)) -> [return: bb15, unwind unreachable];
+      }
+  
+      bb18: {
+          _24 = discriminant(_2);
+          switchInt(move _24) -> [1: bb16, otherwise: bb15];
+      }
+  
+      bb19: {
+          StorageDead(_28);
+          StorageDead(_27);
+          switchInt(move _7) -> [0: bb6, otherwise: bb5];
+      }
+  
+      bb20: {
+          StorageDead(_30);
+          StorageDead(_29);
+          switchInt(move _14) -> [0: bb9, otherwise: bb8];
++     }
++ 
++     bb21: {
++         _24 = discriminant(_2);
++         switchInt(move _24) -> [1: bb22, otherwise: bb15];
++     }
++ 
++     bb22: {
++         goto -> bb15;
++     }
++ 
++     bb23: {
++         _24 = discriminant(_2);
++         switchInt(move _24) -> [1: bb24, otherwise: bb15];
++     }
++ 
++     bb24: {
++         goto -> bb17;
+      }
+  }
+  
diff --git a/tests/mir-opt/jump_threading.chained_conditions.JumpThreading.panic-unwind.diff b/tests/mir-opt/jump_threading.chained_conditions.JumpThreading.panic-unwind.diff
new file mode 100644
index 0000000..afd40c1
--- /dev/null
+++ b/tests/mir-opt/jump_threading.chained_conditions.JumpThreading.panic-unwind.diff
@@ -0,0 +1,255 @@
+- // MIR for `chained_conditions` before JumpThreading
++ // MIR for `chained_conditions` after JumpThreading
+  
+  fn chained_conditions() -> u8 {
+      let mut _0: u8;
+      let _1: chained_conditions::BacktraceStyle;
+      let mut _2: std::option::Option<std::string::String>;
+      let mut _3: &std::option::Option<std::string::String>;
+      let mut _4: isize;
+      let _5: std::string::String;
+      let _6: &std::string::String;
+      let mut _7: bool;
+      let mut _8: &&std::string::String;
+      let _9: &std::string::String;
+      let mut _10: &&str;
+      let _11: &str;
+      let _12: std::string::String;
+      let _13: &std::string::String;
+      let mut _14: bool;
+      let mut _15: &&std::string::String;
+      let _16: &std::string::String;
+      let mut _17: &&str;
+      let _18: &str;
+      let mut _19: isize;
+      let mut _20: &&str;
+      let mut _21: &&str;
+      let mut _22: bool;
+      let mut _23: bool;
+      let mut _24: isize;
+      let mut _25: isize;
+      let mut _26: isize;
+      scope 1 {
+          debug format => _1;
+      }
+      scope 2 {
+          debug x => _5;
+          debug x => _6;
+      }
+      scope 3 {
+          debug x => _12;
+          debug x => _13;
+      }
+      scope 4 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
+          let mut _27: &std::string::String;
+          let mut _28: &str;
+      }
+      scope 5 (inlined std::cmp::impls::<impl PartialEq<&str> for &String>::eq) {
+          let mut _29: &std::string::String;
+          let mut _30: &str;
+      }
+  
+      bb0: {
+          _22 = const false;
+          _23 = const false;
+          StorageLive(_1);
+          StorageLive(_2);
+          _22 = const true;
+          _23 = const true;
+          _2 = env_var() -> [return: bb1, unwind continue];
+      }
+  
+      bb1: {
+          _4 = discriminant(_2);
+          switchInt(move _4) -> [0: bb3, 1: bb4, otherwise: bb2];
+      }
+  
+      bb2: {
+          unreachable;
+      }
+  
+      bb3: {
+          _1 = chained_conditions::BacktraceStyle::Off;
+-         goto -> bb19;
++         goto -> bb27;
+      }
+  
+      bb4: {
+          StorageLive(_6);
+          _6 = &((_2 as Some).0: std::string::String);
+          StorageLive(_7);
+          StorageLive(_8);
+          StorageLive(_9);
+          _9 = &(*_6);
+          _8 = &_9;
+          StorageLive(_10);
+          _21 = const chained_conditions::promoted[1];
+          _10 = &(*_21);
+          StorageLive(_27);
+          StorageLive(_28);
+          _27 = copy (*_8);
+          _28 = copy (*_10);
+          _7 = <String as PartialEq<str>>::eq(move _27, move _28) -> [return: bb23, unwind: bb22];
+      }
+  
+      bb5: {
+          StorageDead(_10);
+          StorageDead(_9);
+          StorageDead(_8);
+          StorageDead(_7);
+          StorageLive(_5);
+          _23 = const false;
+          _5 = move ((_2 as Some).0: std::string::String);
+          _1 = chained_conditions::BacktraceStyle::Full;
+          drop(_5) -> [return: bb7, unwind: bb22];
+      }
+  
+      bb6: {
+          StorageDead(_10);
+          StorageDead(_9);
+          StorageDead(_8);
+          StorageDead(_7);
+          StorageDead(_6);
+          StorageLive(_13);
+          _13 = &((_2 as Some).0: std::string::String);
+          StorageLive(_14);
+          StorageLive(_15);
+          StorageLive(_16);
+          _16 = &(*_13);
+          _15 = &_16;
+          StorageLive(_17);
+          _20 = const chained_conditions::promoted[0];
+          _17 = &(*_20);
+          StorageLive(_29);
+          StorageLive(_30);
+          _29 = copy (*_15);
+          _30 = copy (*_17);
+          _14 = <String as PartialEq<str>>::eq(move _29, move _30) -> [return: bb24, unwind: bb22];
+      }
+  
+      bb7: {
+          StorageDead(_5);
+          StorageDead(_6);
+-         goto -> bb19;
++         goto -> bb25;
+      }
+  
+      bb8: {
+          StorageDead(_17);
+          StorageDead(_16);
+          StorageDead(_15);
+          StorageDead(_14);
+          StorageLive(_12);
+          _23 = const false;
+          _12 = move ((_2 as Some).0: std::string::String);
+          _1 = chained_conditions::BacktraceStyle::Off;
+          drop(_12) -> [return: bb10, unwind: bb22];
+      }
+  
+      bb9: {
+          StorageDead(_17);
+          StorageDead(_16);
+          StorageDead(_15);
+          StorageDead(_14);
+          StorageDead(_13);
+          _1 = chained_conditions::BacktraceStyle::Short;
+-         goto -> bb19;
++         goto -> bb27;
+      }
+  
+      bb10: {
+          StorageDead(_12);
+          StorageDead(_13);
+-         goto -> bb19;
++         goto -> bb25;
+      }
+  
+      bb11: {
+          _0 = const 3_u8;
+          goto -> bb14;
+      }
+  
+      bb12: {
+          _0 = const 2_u8;
+          goto -> bb14;
+      }
+  
+      bb13: {
+          _0 = const 1_u8;
+          goto -> bb14;
+      }
+  
+      bb14: {
+          StorageDead(_1);
+          return;
+      }
+  
+      bb15 (cleanup): {
+          resume;
+      }
+  
+      bb16: {
+          _22 = const false;
+          _23 = const false;
+          StorageDead(_2);
+          _19 = discriminant(_1);
+          switchInt(move _19) -> [0: bb13, 1: bb12, 2: bb11, otherwise: bb2];
+      }
+  
+      bb17: {
+          switchInt(copy _23) -> [0: bb16, otherwise: bb18];
+      }
+  
+      bb18: {
+          drop(((_2 as Some).0: std::string::String)) -> [return: bb16, unwind: bb15];
+      }
+  
+      bb19: {
+          _24 = discriminant(_2);
+          switchInt(move _24) -> [1: bb17, otherwise: bb16];
+      }
+  
+      bb20 (cleanup): {
+          switchInt(copy _23) -> [0: bb15, otherwise: bb21];
+      }
+  
+      bb21 (cleanup): {
+          drop(((_2 as Some).0: std::string::String)) -> [return: bb15, unwind terminate(cleanup)];
+      }
+  
+      bb22 (cleanup): {
+          _26 = discriminant(_2);
+          switchInt(move _26) -> [1: bb20, otherwise: bb15];
+      }
+  
+      bb23: {
+          StorageDead(_28);
+          StorageDead(_27);
+          switchInt(move _7) -> [0: bb6, otherwise: bb5];
+      }
+  
+      bb24: {
+          StorageDead(_30);
+          StorageDead(_29);
+          switchInt(move _14) -> [0: bb9, otherwise: bb8];
++     }
++ 
++     bb25: {
++         _24 = discriminant(_2);
++         switchInt(move _24) -> [1: bb26, otherwise: bb16];
++     }
++ 
++     bb26: {
++         goto -> bb16;
++     }
++ 
++     bb27: {
++         _24 = discriminant(_2);
++         switchInt(move _24) -> [1: bb28, otherwise: bb16];
++     }
++ 
++     bb28: {
++         goto -> bb18;
+      }
+  }
+  
diff --git a/tests/mir-opt/jump_threading.custom_discr.JumpThreading.panic-abort.diff b/tests/mir-opt/jump_threading.custom_discr.JumpThreading.panic-abort.diff
index a863717..0b11a0d 100644
--- a/tests/mir-opt/jump_threading.custom_discr.JumpThreading.panic-abort.diff
+++ b/tests/mir-opt/jump_threading.custom_discr.JumpThreading.panic-abort.diff
@@ -23,14 +23,14 @@
   
       bb2: {
           _2 = CustomDiscr::B;
-          goto -> bb3;
+-         goto -> bb3;
++         goto -> bb8;
       }
   
       bb3: {
           StorageDead(_3);
           _4 = discriminant(_2);
--         switchInt(move _4) -> [35: bb5, otherwise: bb4];
-+         goto -> bb4;
+          switchInt(move _4) -> [35: bb5, otherwise: bb4];
       }
   
       bb4: {
@@ -52,6 +52,12 @@
 +         StorageDead(_3);
 +         _4 = discriminant(_2);
 +         goto -> bb5;
++     }
++ 
++     bb8: {
++         StorageDead(_3);
++         _4 = discriminant(_2);
++         goto -> bb4;
       }
   }
   
diff --git a/tests/mir-opt/jump_threading.custom_discr.JumpThreading.panic-unwind.diff b/tests/mir-opt/jump_threading.custom_discr.JumpThreading.panic-unwind.diff
index a863717..0b11a0d 100644
--- a/tests/mir-opt/jump_threading.custom_discr.JumpThreading.panic-unwind.diff
+++ b/tests/mir-opt/jump_threading.custom_discr.JumpThreading.panic-unwind.diff
@@ -23,14 +23,14 @@
   
       bb2: {
           _2 = CustomDiscr::B;
-          goto -> bb3;
+-         goto -> bb3;
++         goto -> bb8;
       }
   
       bb3: {
           StorageDead(_3);
           _4 = discriminant(_2);
--         switchInt(move _4) -> [35: bb5, otherwise: bb4];
-+         goto -> bb4;
+          switchInt(move _4) -> [35: bb5, otherwise: bb4];
       }
   
       bb4: {
@@ -52,6 +52,12 @@
 +         StorageDead(_3);
 +         _4 = discriminant(_2);
 +         goto -> bb5;
++     }
++ 
++     bb8: {
++         StorageDead(_3);
++         _4 = discriminant(_2);
++         goto -> bb4;
       }
   }
   
diff --git a/tests/mir-opt/jump_threading.disappearing_bb.JumpThreading.panic-abort.diff b/tests/mir-opt/jump_threading.disappearing_bb.JumpThreading.panic-abort.diff
index d17f275..4955d05 100644
--- a/tests/mir-opt/jump_threading.disappearing_bb.JumpThreading.panic-abort.diff
+++ b/tests/mir-opt/jump_threading.disappearing_bb.JumpThreading.panic-abort.diff
@@ -15,7 +15,7 @@
       bb1: {
           _3 = const false;
 -         goto -> bb4;
-+         goto -> bb9;
++         goto -> bb10;
       }
   
       bb2: {
@@ -24,7 +24,8 @@
   
       bb3: {
           _2 = const false;
-          goto -> bb4;
+-         goto -> bb4;
++         goto -> bb13;
       }
   
       bb4: {
@@ -40,8 +41,7 @@
       }
   
       bb7: {
--         goto -> bb5;
-+         goto -> bb10;
+          goto -> bb5;
       }
   
       bb8: {
@@ -49,10 +49,30 @@
 +     }
 + 
 +     bb9: {
-+         goto -> bb5;
++         switchInt(copy _3) -> [0: bb5, otherwise: bb7];
 +     }
 + 
 +     bb10: {
++         goto -> bb11;
++     }
++ 
++     bb11: {
++         goto -> bb8;
++     }
++ 
++     bb12: {
++         switchInt(copy _3) -> [0: bb5, otherwise: bb7];
++     }
++ 
++     bb13: {
++         goto -> bb14;
++     }
++ 
++     bb14: {
++         goto -> bb15;
++     }
++ 
++     bb15: {
           goto -> bb6;
       }
   }
diff --git a/tests/mir-opt/jump_threading.disappearing_bb.JumpThreading.panic-unwind.diff b/tests/mir-opt/jump_threading.disappearing_bb.JumpThreading.panic-unwind.diff
index d17f275..4955d05 100644
--- a/tests/mir-opt/jump_threading.disappearing_bb.JumpThreading.panic-unwind.diff
+++ b/tests/mir-opt/jump_threading.disappearing_bb.JumpThreading.panic-unwind.diff
@@ -15,7 +15,7 @@
       bb1: {
           _3 = const false;
 -         goto -> bb4;
-+         goto -> bb9;
++         goto -> bb10;
       }
   
       bb2: {
@@ -24,7 +24,8 @@
   
       bb3: {
           _2 = const false;
-          goto -> bb4;
+-         goto -> bb4;
++         goto -> bb13;
       }
   
       bb4: {
@@ -40,8 +41,7 @@
       }
   
       bb7: {
--         goto -> bb5;
-+         goto -> bb10;
+          goto -> bb5;
       }
   
       bb8: {
@@ -49,10 +49,30 @@
 +     }
 + 
 +     bb9: {
-+         goto -> bb5;
++         switchInt(copy _3) -> [0: bb5, otherwise: bb7];
 +     }
 + 
 +     bb10: {
++         goto -> bb11;
++     }
++ 
++     bb11: {
++         goto -> bb8;
++     }
++ 
++     bb12: {
++         switchInt(copy _3) -> [0: bb5, otherwise: bb7];
++     }
++ 
++     bb13: {
++         goto -> bb14;
++     }
++ 
++     bb14: {
++         goto -> bb15;
++     }
++ 
++     bb15: {
           goto -> bb6;
       }
   }
diff --git a/tests/mir-opt/jump_threading.identity.JumpThreading.panic-abort.diff b/tests/mir-opt/jump_threading.identity.JumpThreading.panic-abort.diff
index 79599f8..97b8d48 100644
--- a/tests/mir-opt/jump_threading.identity.JumpThreading.panic-abort.diff
+++ b/tests/mir-opt/jump_threading.identity.JumpThreading.panic-abort.diff
@@ -97,8 +97,7 @@
           StorageDead(_10);
           StorageDead(_4);
           _5 = discriminant(_3);
--         switchInt(move _5) -> [0: bb2, 1: bb3, otherwise: bb1];
-+         goto -> bb2;
+          switchInt(move _5) -> [0: bb2, 1: bb3, otherwise: bb1];
       }
   
       bb6: {
@@ -114,7 +113,8 @@
       bb7: {
           _11 = move ((_4 as Ok).0: i32);
           _3 = ControlFlow::<Result<Infallible, i32>, i32>::Continue(copy _11);
-          goto -> bb5;
+-         goto -> bb5;
++         goto -> bb9;
 +     }
 + 
 +     bb8: {
@@ -124,6 +124,15 @@
 +         StorageDead(_4);
 +         _5 = discriminant(_3);
 +         goto -> bb3;
++     }
++ 
++     bb9: {
++         StorageDead(_12);
++         StorageDead(_11);
++         StorageDead(_10);
++         StorageDead(_4);
++         _5 = discriminant(_3);
++         goto -> bb2;
       }
   }
   
diff --git a/tests/mir-opt/jump_threading.identity.JumpThreading.panic-unwind.diff b/tests/mir-opt/jump_threading.identity.JumpThreading.panic-unwind.diff
index 79599f8..97b8d48 100644
--- a/tests/mir-opt/jump_threading.identity.JumpThreading.panic-unwind.diff
+++ b/tests/mir-opt/jump_threading.identity.JumpThreading.panic-unwind.diff
@@ -97,8 +97,7 @@
           StorageDead(_10);
           StorageDead(_4);
           _5 = discriminant(_3);
--         switchInt(move _5) -> [0: bb2, 1: bb3, otherwise: bb1];
-+         goto -> bb2;
+          switchInt(move _5) -> [0: bb2, 1: bb3, otherwise: bb1];
       }
   
       bb6: {
@@ -114,7 +113,8 @@
       bb7: {
           _11 = move ((_4 as Ok).0: i32);
           _3 = ControlFlow::<Result<Infallible, i32>, i32>::Continue(copy _11);
-          goto -> bb5;
+-         goto -> bb5;
++         goto -> bb9;
 +     }
 + 
 +     bb8: {
@@ -124,6 +124,15 @@
 +         StorageDead(_4);
 +         _5 = discriminant(_3);
 +         goto -> bb3;
++     }
++ 
++     bb9: {
++         StorageDead(_12);
++         StorageDead(_11);
++         StorageDead(_10);
++         StorageDead(_4);
++         _5 = discriminant(_3);
++         goto -> bb2;
       }
   }
   
diff --git a/tests/mir-opt/jump_threading.renumbered_bb.JumpThreading.panic-abort.diff b/tests/mir-opt/jump_threading.renumbered_bb.JumpThreading.panic-abort.diff
index 9a8bdc8..c8edd70 100644
--- a/tests/mir-opt/jump_threading.renumbered_bb.JumpThreading.panic-abort.diff
+++ b/tests/mir-opt/jump_threading.renumbered_bb.JumpThreading.panic-abort.diff
@@ -14,7 +14,7 @@
       bb1: {
           _2 = const false;
 -         goto -> bb3;
-+         goto -> bb8;
++         goto -> bb9;
       }
   
       bb2: {
@@ -47,10 +47,14 @@
 +     }
 + 
 +     bb8: {
-+         goto -> bb9;
++         switchInt(copy _2) -> [0: bb4, otherwise: bb5];
 +     }
 + 
 +     bb9: {
++         goto -> bb10;
++     }
++ 
++     bb10: {
 +         goto -> bb6;
       }
   }
diff --git a/tests/mir-opt/jump_threading.renumbered_bb.JumpThreading.panic-unwind.diff b/tests/mir-opt/jump_threading.renumbered_bb.JumpThreading.panic-unwind.diff
index 9a8bdc8..c8edd70 100644
--- a/tests/mir-opt/jump_threading.renumbered_bb.JumpThreading.panic-unwind.diff
+++ b/tests/mir-opt/jump_threading.renumbered_bb.JumpThreading.panic-unwind.diff
@@ -14,7 +14,7 @@
       bb1: {
           _2 = const false;
 -         goto -> bb3;
-+         goto -> bb8;
++         goto -> bb9;
       }
   
       bb2: {
@@ -47,10 +47,14 @@
 +     }
 + 
 +     bb8: {
-+         goto -> bb9;
++         switchInt(copy _2) -> [0: bb4, otherwise: bb5];
 +     }
 + 
 +     bb9: {
++         goto -> bb10;
++     }
++ 
++     bb10: {
 +         goto -> bb6;
       }
   }
diff --git a/tests/mir-opt/jump_threading.rs b/tests/mir-opt/jump_threading.rs
index 009e106..39a2f16 100644
--- a/tests/mir-opt/jump_threading.rs
+++ b/tests/mir-opt/jump_threading.rs
@@ -19,9 +19,9 @@ fn too_complex(x: Result<i32, usize>) -> Option<i32> {
     // CHECK:     goto -> bb8;
     // CHECK: bb3: {
     // CHECK:     [[controlflow]] = ControlFlow::<usize, i32>::Continue(
-    // CHECK:     goto -> bb4;
+    // CHECK:     goto -> bb9;
     // CHECK: bb4: {
-    // CHECK:     goto -> bb6;
+    // CHECK:     switchInt(move _8) -> [0: bb6, 1: bb5, otherwise: bb1];
     // CHECK: bb5: {
     // CHECK:     {{_.*}} = copy (([[controlflow]] as Break).0: usize);
     // CHECK:     _0 = Option::<i32>::None;
@@ -34,6 +34,8 @@ fn too_complex(x: Result<i32, usize>) -> Option<i32> {
     // CHECK:     return;
     // CHECK: bb8: {
     // CHECK:     goto -> bb5;
+    // CHECK: bb9: {
+    // CHECK:     goto -> bb6;
     match {
         match x {
             Ok(v) => ControlFlow::Continue(v),
@@ -63,7 +65,7 @@ fn identity(x: Result<i32, i32>) -> Result<i32, i32> {
     // CHECK: bb4: {
     // CHECK:     return;
     // CHECK: bb5: {
-    // CHECK:     goto -> bb2;
+    // CHECK:     switchInt(move _5) -> [0: bb2, 1: bb3, otherwise: bb1];
     // CHECK: bb6: {
     // CHECK:     {{_.*}} = move (([[x]] as Err).0: i32);
     // CHECK:     [[controlflow]] = ControlFlow::<Result<Infallible, i32>, i32>::Break(
@@ -71,12 +73,40 @@ fn identity(x: Result<i32, i32>) -> Result<i32, i32> {
     // CHECK: bb7: {
     // CHECK:     {{_.*}} = move (([[x]] as Ok).0: i32);
     // CHECK:     [[controlflow]] = ControlFlow::<Result<Infallible, i32>, i32>::Continue(
-    // CHECK:     goto -> bb5;
+    // CHECK:     goto -> bb9;
     // CHECK: bb8: {
     // CHECK:     goto -> bb3;
+    // CHECK: bb9: {
+    // CHECK:     goto -> bb2;
     Ok(x?)
 }
 
+fn two_reads() -> i32 {
+    // CHECK-LABEL: fn two_reads(
+    // CHECK: debug a => [[a:_.*]];
+    // CHECK: debug b => [[b:_.*]];
+    // CHECK: debug c => [[c:_.*]];
+    // CHECK: bb0: {
+    // CHECK:     [[a]] = const 2_i32;
+    // CHECK:     [[b]] = copy [[a]];
+    // CHECK:     [[c]] = copy [[a]];
+    // CHECK:     [[tmp:_.*]] = copy [[c]];
+    // CHECK:     [[eq:_.*]] = Eq(move [[tmp]], const 2_i32);
+    // CHECK:     goto -> bb1;
+    // CHECK: bb1: {
+    // CHECK:     _0 = const 0_i32;
+    // CHECK:     goto -> bb3;
+    // CHECK: bb2: {
+    // CHECK:     _0 = const 1_i32;
+    // CHECK:     goto -> bb3;
+    // CHECK: bb3: {
+    // CHECK:     return;
+    let a = 2;
+    let b = a;
+    let c = a;
+    if c == 2 { 0 } else { 1 }
+}
+
 enum DFA {
     A,
     B,
@@ -134,9 +164,9 @@ fn custom_discr(x: bool) -> u8 {
     // CHECK:     goto -> bb7;
     // CHECK: bb2: {
     // CHECK:     {{_.*}} = CustomDiscr::B;
-    // CHECK:     goto -> bb3;
+    // CHECK:     goto -> bb8;
     // CHECK: bb3: {
-    // CHECK:     goto -> bb4;
+    // CHECK:     switchInt(move _4) -> [35: bb5, otherwise: bb4];
     // CHECK: bb4: {
     // CHECK:     _0 = const 13_u8;
     // CHECK:     goto -> bb6;
@@ -147,6 +177,8 @@ fn custom_discr(x: bool) -> u8 {
     // CHECK:     return;
     // CHECK: bb7: {
     // CHECK:     goto -> bb5;
+    // CHECK: bb8: {
+    // CHECK:     goto -> bb4;
     match if x { CustomDiscr::A } else { CustomDiscr::B } {
         CustomDiscr::A => 5,
         _ => 13,
@@ -258,7 +290,6 @@ fn duplicate_chain(x: bool) -> u8 {
         bb4 = {
             // CHECK: bb4: {
             // CHECK:     {{_.*}} = const 15_i32;
-            // CHECK-NOT: switchInt(
             // CHECK:     goto -> bb5;
             let c = 15;
             match a { 5 => bb5, _ => bb6 }
@@ -348,7 +379,7 @@ fn renumbered_bb(x: bool) -> u8 {
         }
         bb1 = {
             // CHECK: bb1: {
-            // CHECK:     goto -> bb8;
+            // CHECK:     goto -> bb9;
             a = false;
             Goto(bb3)
         }
@@ -389,10 +420,13 @@ fn renumbered_bb(x: bool) -> u8 {
         }
         // Duplicate of bb3.
         // CHECK: bb8: {
-        // CHECK-NEXT: goto -> bb9;
-        // Duplicate of bb4.
+        // CHECK:     switchInt(copy _2) -> [0: bb4, otherwise: bb5];
+        // Duplicate of bb8.
         // CHECK: bb9: {
-        // CHECK-NEXT: goto -> bb6;
+        // CHECK:     goto -> bb10;
+        // Duplicate of bb4.
+        // CHECK: bb10: {
+        // CHECK:     goto -> bb6;
     }
 }
 
@@ -407,22 +441,26 @@ fn disappearing_bb(x: u8) -> u8 {
         let a: bool;
         let b: bool;
         {
+            // CHECK: bb0: {
             a = true;
             b = true;
+            // CHECK:     switchInt({{.*}}) -> [0: bb3, 1: bb3, 2: bb1, otherwise: bb2];
             match x { 0 => bb3, 1 => bb3, 2 => bb1, _ => bb2 }
         }
         bb1 = {
             // CHECK: bb1: {
-            // CHECK: goto -> bb9;
+            // CHECK:     goto -> bb10;
             b = false;
             Goto(bb4)
         }
         bb2 = {
+            // CHECK: bb2: {
+            // CHECK:     unreachable;
             Unreachable()
         }
         bb3 = {
             // CHECK: bb3: {
-            // CHECK: goto -> bb10;
+            // CHECK:     goto -> bb13;
             a = false;
             Goto(bb4)
         }
@@ -442,16 +480,34 @@ fn disappearing_bb(x: u8) -> u8 {
             Goto(bb6)
         }
         // CHECK: bb9: {
-        // CHECK: goto -> bb5;
+        // CHECK:     switchInt(copy _3) -> [0: bb5, otherwise: bb7];
         // CHECK: bb10: {
-        // CHECK: goto -> bb6;
+        // CHECK:     goto -> bb11;
+        // CHECK: bb11: {
+        // CHECK:     goto -> bb8;
+        // CHECK: bb12: {
+        // CHECK:     switchInt(copy _3) -> [0: bb5, otherwise: bb7];
+        // CHECK: bb13: {
+        // CHECK:     goto -> bb14;
+        // CHECK: bb14: {
+        // CHECK:     goto -> bb15;
+        // CHECK: bb15: {
+        // CHECK:     goto -> bb6;
     }
 }
 
 /// Verify that we can thread jumps when we assign from an aggregate constant.
-fn aggregate(x: u8) -> u8 {
+fn aggregate() -> u8 {
     // CHECK-LABEL: fn aggregate(
+    // CHECK: debug a => [[a:_.*]];
+    // CHECK: debug b => [[b:_.*]];
     // CHECK-NOT: switchInt(
+    // CHECK: [[a2:_.*]] = copy [[a]];
+    // CHECK: {{_.*}} = Eq(move [[a2]], const 7_u8);
+    // CHECK-NEXT: goto -> [[bb:bb.*]];
+    // CHECK: [[bb]]: {
+    // CHECK-NOT: }
+    // CHECK: _0 = copy [[a]];
 
     const FOO: (u8, u8) = (5, 13);
 
@@ -508,7 +564,16 @@ fn assume(a: u8, b: bool) -> u8 {
 /// Verify that jump threading succeeds seeing through copies of aggregates.
 fn aggregate_copy() -> u32 {
     // CHECK-LABEL: fn aggregate_copy(
+    // CHECK: debug a => [[a:_.*]];
+    // CHECK: debug b => [[b:_.*]];
+    // CHECK: debug c => [[c:_.*]];
     // CHECK-NOT: switchInt(
+    // CHECK: [[c2:_.*]] = copy [[c]];
+    // CHECK: {{_.*}} = Eq(move [[c2]], const 2_u32);
+    // CHECK-NEXT: goto -> [[bb:bb.*]];
+    // CHECK: [[bb]]: {
+    // CHECK-NOT: }
+    // CHECK: _0 = const 13_u32;
 
     const Foo: (u32, u32) = (5, 3);
 
@@ -532,6 +597,14 @@ fn floats() -> u32 {
 
 pub fn bitwise_not() -> i32 {
     // CHECK-LABEL: fn bitwise_not(
+    // CHECK: debug a => [[a:_.*]];
+    // CHECK: [[a2:_.*]] = copy [[a]];
+    // CHECK: [[not:_.*]] = Not(move [[a2]]);
+    // CHECK: {{_.*}} = Eq(move [[not]], const 0_i32);
+    // CHECK-NEXT: goto -> [[bb:bb.*]];
+    // CHECK: [[bb]]: {
+    // CHECK-NOT: }
+    // CHECK: _0 = const 0_i32;
 
     // Test for #131195, which was optimizing `!a == b` into `a != b`.
     let a = 1;
@@ -540,11 +613,49 @@ pub fn bitwise_not() -> i32 {
 
 pub fn logical_not() -> i32 {
     // CHECK-LABEL: fn logical_not(
+    // CHECK: debug a => [[a:_.*]];
+    // CHECK: [[a2:_.*]] = copy [[a]];
+    // CHECK: [[not:_.*]] = Not(move [[a2]]);
+    // CHECK: {{_.*}} = Eq(move [[not]], const true);
+    // CHECK-NEXT: goto -> [[bb:bb.*]];
+    // CHECK: [[bb]]: {
+    // CHECK-NOT: }
+    // CHECK: _0 = const 1_i32;
 
     let a = false;
     if !a == true { 1 } else { 0 }
 }
 
+/// Verify that we correctly handle threading multiple conditions on the same bb.
+/// One version of the implementation was buggy and mutated a bb that would be duplicated later.
+fn chained_conditions() -> u8 {
+    // CHECK-LABEL: fn chained_conditions(
+
+    #[inline(never)]
+    fn env_var() -> Option<String> {
+        None
+    }
+
+    enum BacktraceStyle {
+        Off,
+        Short,
+        Full,
+    };
+
+    let format = match env_var() {
+        Some(x) if &x == "full" => BacktraceStyle::Full,
+        Some(x) if &x == "0" => BacktraceStyle::Off,
+        Some(_) => BacktraceStyle::Short,
+        None => BacktraceStyle::Off,
+    };
+
+    match format {
+        BacktraceStyle::Off => 1,
+        BacktraceStyle::Short => 2,
+        BacktraceStyle::Full => 3,
+    }
+}
+
 fn main() {
     // CHECK-LABEL: fn main(
     too_complex(Ok(0));
@@ -557,7 +668,7 @@ fn main() {
     mutable_ref();
     renumbered_bb(true);
     disappearing_bb(7);
-    aggregate(7);
+    aggregate();
     assume(7, false);
     floats();
     bitwise_not();
@@ -566,6 +677,7 @@ fn main() {
 
 // EMIT_MIR jump_threading.too_complex.JumpThreading.diff
 // EMIT_MIR jump_threading.identity.JumpThreading.diff
+// EMIT_MIR jump_threading.two_reads.JumpThreading.diff
 // EMIT_MIR jump_threading.custom_discr.JumpThreading.diff
 // EMIT_MIR jump_threading.dfa.JumpThreading.diff
 // EMIT_MIR jump_threading.multiple_match.JumpThreading.diff
@@ -580,3 +692,4 @@ fn main() {
 // EMIT_MIR jump_threading.floats.JumpThreading.diff
 // EMIT_MIR jump_threading.bitwise_not.JumpThreading.diff
 // EMIT_MIR jump_threading.logical_not.JumpThreading.diff
+// EMIT_MIR jump_threading.chained_conditions.JumpThreading.diff
diff --git a/tests/mir-opt/jump_threading.too_complex.JumpThreading.panic-abort.diff b/tests/mir-opt/jump_threading.too_complex.JumpThreading.panic-abort.diff
index 7de3592..6e0cf5e 100644
--- a/tests/mir-opt/jump_threading.too_complex.JumpThreading.panic-abort.diff
+++ b/tests/mir-opt/jump_threading.too_complex.JumpThreading.panic-abort.diff
@@ -57,13 +57,13 @@
           _2 = ControlFlow::<usize, i32>::Continue(move _5);
           StorageDead(_5);
           StorageDead(_4);
-          goto -> bb4;
+-         goto -> bb4;
++         goto -> bb9;
       }
   
       bb4: {
           _8 = discriminant(_2);
--         switchInt(move _8) -> [0: bb6, 1: bb5, otherwise: bb1];
-+         goto -> bb6;
+          switchInt(move _8) -> [0: bb6, 1: bb5, otherwise: bb1];
       }
   
       bb5: {
@@ -93,6 +93,11 @@
 +     bb8: {
 +         _8 = discriminant(_2);
 +         goto -> bb5;
++     }
++ 
++     bb9: {
++         _8 = discriminant(_2);
++         goto -> bb6;
       }
   }
   
diff --git a/tests/mir-opt/jump_threading.too_complex.JumpThreading.panic-unwind.diff b/tests/mir-opt/jump_threading.too_complex.JumpThreading.panic-unwind.diff
index 7de3592..6e0cf5e 100644
--- a/tests/mir-opt/jump_threading.too_complex.JumpThreading.panic-unwind.diff
+++ b/tests/mir-opt/jump_threading.too_complex.JumpThreading.panic-unwind.diff
@@ -57,13 +57,13 @@
           _2 = ControlFlow::<usize, i32>::Continue(move _5);
           StorageDead(_5);
           StorageDead(_4);
-          goto -> bb4;
+-         goto -> bb4;
++         goto -> bb9;
       }
   
       bb4: {
           _8 = discriminant(_2);
--         switchInt(move _8) -> [0: bb6, 1: bb5, otherwise: bb1];
-+         goto -> bb6;
+          switchInt(move _8) -> [0: bb6, 1: bb5, otherwise: bb1];
       }
   
       bb5: {
@@ -93,6 +93,11 @@
 +     bb8: {
 +         _8 = discriminant(_2);
 +         goto -> bb5;
++     }
++ 
++     bb9: {
++         _8 = discriminant(_2);
++         goto -> bb6;
       }
   }
   
diff --git a/tests/mir-opt/jump_threading.two_reads.JumpThreading.panic-abort.diff b/tests/mir-opt/jump_threading.two_reads.JumpThreading.panic-abort.diff
new file mode 100644
index 0000000..090e9d3
--- /dev/null
+++ b/tests/mir-opt/jump_threading.two_reads.JumpThreading.panic-abort.diff
@@ -0,0 +1,56 @@
+- // MIR for `two_reads` before JumpThreading
++ // MIR for `two_reads` after JumpThreading
+  
+  fn two_reads() -> i32 {
+      let mut _0: i32;
+      let _1: i32;
+      let mut _4: bool;
+      let mut _5: i32;
+      scope 1 {
+          debug a => _1;
+          let _2: i32;
+          scope 2 {
+              debug b => _2;
+              let _3: i32;
+              scope 3 {
+                  debug c => _3;
+              }
+          }
+      }
+  
+      bb0: {
+          StorageLive(_1);
+          _1 = const 2_i32;
+          StorageLive(_2);
+          _2 = copy _1;
+          StorageLive(_3);
+          _3 = copy _1;
+          StorageLive(_4);
+          StorageLive(_5);
+          _5 = copy _3;
+          _4 = Eq(move _5, const 2_i32);
+-         switchInt(move _4) -> [0: bb2, otherwise: bb1];
++         goto -> bb1;
+      }
+  
+      bb1: {
+          StorageDead(_5);
+          _0 = const 0_i32;
+          goto -> bb3;
+      }
+  
+      bb2: {
+          StorageDead(_5);
+          _0 = const 1_i32;
+          goto -> bb3;
+      }
+  
+      bb3: {
+          StorageDead(_4);
+          StorageDead(_3);
+          StorageDead(_2);
+          StorageDead(_1);
+          return;
+      }
+  }
+  
diff --git a/tests/mir-opt/jump_threading.two_reads.JumpThreading.panic-unwind.diff b/tests/mir-opt/jump_threading.two_reads.JumpThreading.panic-unwind.diff
new file mode 100644
index 0000000..090e9d3
--- /dev/null
+++ b/tests/mir-opt/jump_threading.two_reads.JumpThreading.panic-unwind.diff
@@ -0,0 +1,56 @@
+- // MIR for `two_reads` before JumpThreading
++ // MIR for `two_reads` after JumpThreading
+  
+  fn two_reads() -> i32 {
+      let mut _0: i32;
+      let _1: i32;
+      let mut _4: bool;
+      let mut _5: i32;
+      scope 1 {
+          debug a => _1;
+          let _2: i32;
+          scope 2 {
+              debug b => _2;
+              let _3: i32;
+              scope 3 {
+                  debug c => _3;
+              }
+          }
+      }
+  
+      bb0: {
+          StorageLive(_1);
+          _1 = const 2_i32;
+          StorageLive(_2);
+          _2 = copy _1;
+          StorageLive(_3);
+          _3 = copy _1;
+          StorageLive(_4);
+          StorageLive(_5);
+          _5 = copy _3;
+          _4 = Eq(move _5, const 2_i32);
+-         switchInt(move _4) -> [0: bb2, otherwise: bb1];
++         goto -> bb1;
+      }
+  
+      bb1: {
+          StorageDead(_5);
+          _0 = const 0_i32;
+          goto -> bb3;
+      }
+  
+      bb2: {
+          StorageDead(_5);
+          _0 = const 1_i32;
+          goto -> bb3;
+      }
+  
+      bb3: {
+          StorageDead(_4);
+          StorageDead(_3);
+          StorageDead(_2);
+          StorageDead(_1);
+          return;
+      }
+  }
+  
diff --git a/tests/mir-opt/pre-codegen/derived_ord.demo_le.PreCodegen.after.mir b/tests/mir-opt/pre-codegen/derived_ord.demo_le.PreCodegen.after.mir
index e235fa3..578aff4 100644
--- a/tests/mir-opt/pre-codegen/derived_ord.demo_le.PreCodegen.after.mir
+++ b/tests/mir-opt/pre-codegen/derived_ord.demo_le.PreCodegen.after.mir
@@ -7,12 +7,11 @@
     scope 1 (inlined <MultiField as PartialOrd>::le) {
         let mut _6: std::option::Option<std::cmp::Ordering>;
         scope 2 (inlined Option::<std::cmp::Ordering>::is_some_and::<fn(std::cmp::Ordering) -> bool {std::cmp::Ordering::is_le}>) {
-            let mut _11: isize;
-            let _12: std::cmp::Ordering;
+            let _11: std::cmp::Ordering;
             scope 3 {
                 scope 4 (inlined <fn(std::cmp::Ordering) -> bool {std::cmp::Ordering::is_le} as FnOnce<(std::cmp::Ordering,)>>::call_once - shim(fn(std::cmp::Ordering) -> bool {std::cmp::Ordering::is_le})) {
                     scope 5 (inlined std::cmp::Ordering::is_le) {
-                        let mut _13: i8;
+                        let mut _12: i8;
                         scope 6 (inlined std::cmp::Ordering::as_raw) {
                         }
                     }
@@ -37,7 +36,7 @@
     }
 
     bb0: {
-        StorageLive(_12);
+        StorageLive(_11);
         StorageLive(_6);
         StorageLive(_5);
         StorageLive(_7);
@@ -64,42 +63,19 @@
         StorageDead(_8);
         _6 = Option::<std::cmp::Ordering>::Some(move _10);
         StorageDead(_10);
-        StorageDead(_7);
-        StorageDead(_5);
-        StorageLive(_11);
-        goto -> bb4;
+        goto -> bb2;
     }
 
     bb2: {
         StorageDead(_7);
         StorageDead(_5);
-        StorageLive(_11);
-        _11 = discriminant(_6);
-        switchInt(move _11) -> [0: bb3, 1: bb4, otherwise: bb6];
-    }
-
-    bb3: {
-        _0 = const false;
-        goto -> bb5;
-    }
-
-    bb4: {
-        _12 = move ((_6 as Some).0: std::cmp::Ordering);
-        StorageLive(_13);
-        _13 = discriminant(_12);
-        _0 = Le(move _13, const 0_i8);
-        StorageDead(_13);
-        goto -> bb5;
-    }
-
-    bb5: {
-        StorageDead(_11);
-        StorageDead(_6);
+        _11 = move ((_6 as Some).0: std::cmp::Ordering);
+        StorageLive(_12);
+        _12 = discriminant(_11);
+        _0 = Le(move _12, const 0_i8);
         StorageDead(_12);
+        StorageDead(_6);
+        StorageDead(_11);
         return;
     }
-
-    bb6: {
-        unreachable;
-    }
 }
diff --git a/tests/mir-opt/pre-codegen/derived_ord.rs b/tests/mir-opt/pre-codegen/derived_ord.rs
index 823e0f6d..a67756c 100644
--- a/tests/mir-opt/pre-codegen/derived_ord.rs
+++ b/tests/mir-opt/pre-codegen/derived_ord.rs
@@ -13,22 +13,23 @@ pub fn demo_le(a: &MultiField, b: &MultiField) -> bool {
     // CHECK: inlined{{.+}}is_some_and
     // CHECK: inlined <MultiField as PartialOrd>::partial_cmp
 
-    // CHECK: [[A0:_[0-9]+]] = copy ((*_1).0: char);
-    // CHECK: [[B0:_[0-9]+]] = copy ((*_2).0: char);
-    // CHECK: Cmp(move [[A0]], move [[B0]]);
+    // CHECK: bb0: {
+    // CHECK:     [[A0:_[0-9]+]] = copy ((*_1).0: char);
+    // CHECK:     [[B0:_[0-9]+]] = copy ((*_2).0: char);
+    // CHECK:     Cmp(move [[A0]], move [[B0]]);
+    // CHECK:     [[D0:_[0-9]+]] = discriminant({{.+}});
+    // CHECK:     switchInt(move [[D0]]) -> [0: bb1, otherwise: bb2];
 
-    // CHECK: [[D0:_[0-9]+]] = discriminant({{.+}});
-    // CHECK: switchInt(move [[D0]]) -> [0: bb{{[0-9]+}}, otherwise: bb{{[0-9]+}}];
+    // CHECK: bb1: {
+    // CHECK:     [[A1:_[0-9]+]] = copy ((*_1).1: i16);
+    // CHECK:     [[B1:_[0-9]+]] = copy ((*_2).1: i16);
+    // CHECK:     Cmp(move [[A1]], move [[B1]]);
+    // CHECK:     goto -> bb2;
 
-    // CHECK: [[A1:_[0-9]+]] = copy ((*_1).1: i16);
-    // CHECK: [[B1:_[0-9]+]] = copy ((*_2).1: i16);
-    // CHECK: Cmp(move [[A1]], move [[B1]]);
-
-    // CHECK: [[D1:_[0-9]+]] = discriminant({{.+}});
-    // CHECK: switchInt(move [[D1]]) -> [0: bb{{[0-9]+}}, 1: bb{{[0-9]+}}, otherwise: bb{{[0-9]+}}];
-
-    // CHECK: [[D2:_[0-9]+]] = discriminant({{.+}});
-    // CHECK: _0 = Le(move [[D2]], const 0_i8);
+    // CHECK: bb2: {
+    // CHECK:     [[D2:_[0-9]+]] = discriminant({{.+}});
+    // CHECK:     _0 = Le(move [[D2]], const 0_i8);
+    // CHECK:     return;
     *a <= *b
 }
 
diff --git a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir
index 104987b..f72611b 100644
--- a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir
+++ b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir
@@ -211,12 +211,6 @@
 
     bb7: {
         StorageDead(_16);
-        StorageDead(_22);
-        StorageDead(_13);
-        StorageDead(_20);
-        StorageDead(_19);
-        StorageDead(_12);
-        StorageDead(_11);
         goto -> bb10;
     }
 
@@ -226,16 +220,16 @@
     }
 
     bb9: {
+        goto -> bb10;
+    }
+
+    bb10: {
         StorageDead(_22);
         StorageDead(_13);
         StorageDead(_20);
         StorageDead(_19);
         StorageDead(_12);
         StorageDead(_11);
-        goto -> bb10;
-    }
-
-    bb10: {
         StorageDead(_23);
         StorageDead(_26);
         StorageDead(_25);
diff --git a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir
index 4d0e354..b210efb 100644
--- a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir
+++ b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir
@@ -173,12 +173,6 @@
 
     bb7: {
         StorageDead(_15);
-        StorageDead(_21);
-        StorageDead(_12);
-        StorageDead(_19);
-        StorageDead(_18);
-        StorageDead(_11);
-        StorageDead(_10);
         goto -> bb10;
     }
 
@@ -188,16 +182,16 @@
     }
 
     bb9: {
+        goto -> bb10;
+    }
+
+    bb10: {
         StorageDead(_21);
         StorageDead(_12);
         StorageDead(_19);
         StorageDead(_18);
         StorageDead(_11);
         StorageDead(_10);
-        goto -> bb10;
-    }
-
-    bb10: {
         StorageDead(_22);
         drop(_2) -> [return: bb11, unwind unreachable];
     }
diff --git a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir
index 2b5d8c2..ab6e2bf 100644
--- a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir
+++ b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir
@@ -173,12 +173,6 @@
 
     bb7: {
         StorageDead(_15);
-        StorageDead(_21);
-        StorageDead(_12);
-        StorageDead(_19);
-        StorageDead(_18);
-        StorageDead(_11);
-        StorageDead(_10);
         goto -> bb10;
     }
 
@@ -188,16 +182,16 @@
     }
 
     bb9: {
+        goto -> bb10;
+    }
+
+    bb10: {
         StorageDead(_21);
         StorageDead(_12);
         StorageDead(_19);
         StorageDead(_18);
         StorageDead(_11);
         StorageDead(_10);
-        goto -> bb10;
-    }
-
-    bb10: {
         StorageDead(_22);
         drop(_2) -> [return: bb11, unwind continue];
     }
diff --git a/tests/mir-opt/separate_const_switch.identity.JumpThreading.diff b/tests/mir-opt/separate_const_switch.identity.JumpThreading.diff
index ce9d812..34f451f 100644
--- a/tests/mir-opt/separate_const_switch.identity.JumpThreading.diff
+++ b/tests/mir-opt/separate_const_switch.identity.JumpThreading.diff
@@ -69,8 +69,7 @@
           StorageDead(_7);
           StorageDead(_6);
           _3 = discriminant(_2);
--         switchInt(move _3) -> [0: bb2, 1: bb3, otherwise: bb1];
-+         goto -> bb2;
+          switchInt(move _3) -> [0: bb2, 1: bb3, otherwise: bb1];
       }
   
       bb5: {
@@ -86,7 +85,8 @@
       bb6: {
           _7 = copy ((_1 as Ok).0: i32);
           _2 = ControlFlow::<Result<Infallible, i32>, i32>::Continue(copy _7);
-          goto -> bb4;
+-         goto -> bb4;
++         goto -> bb8;
 +     }
 + 
 +     bb7: {
@@ -95,6 +95,14 @@
 +         StorageDead(_6);
 +         _3 = discriminant(_2);
 +         goto -> bb3;
++     }
++ 
++     bb8: {
++         StorageDead(_8);
++         StorageDead(_7);
++         StorageDead(_6);
++         _3 = discriminant(_2);
++         goto -> bb2;
       }
   }
   
diff --git a/tests/mir-opt/separate_const_switch.too_complex.JumpThreading.diff b/tests/mir-opt/separate_const_switch.too_complex.JumpThreading.diff
index c88c63e..794c28a 100644
--- a/tests/mir-opt/separate_const_switch.too_complex.JumpThreading.diff
+++ b/tests/mir-opt/separate_const_switch.too_complex.JumpThreading.diff
@@ -44,13 +44,13 @@
       bb3: {
           _4 = copy ((_1 as Ok).0: i32);
           _2 = ControlFlow::<usize, i32>::Continue(copy _4);
-          goto -> bb4;
+-         goto -> bb4;
++         goto -> bb9;
       }
   
       bb4: {
           _6 = discriminant(_2);
--         switchInt(move _6) -> [0: bb6, 1: bb5, otherwise: bb1];
-+         goto -> bb6;
+          switchInt(move _6) -> [0: bb6, 1: bb5, otherwise: bb1];
       }
   
       bb5: {
@@ -75,6 +75,11 @@
 +     bb8: {
 +         _6 = discriminant(_2);
 +         goto -> bb5;
++     }
++ 
++     bb9: {
++         _6 = discriminant(_2);
++         goto -> bb6;
       }
   }
   
diff --git a/tests/ui/README.md b/tests/ui/README.md
index 11003bb..344b0b2 100644
--- a/tests/ui/README.md
+++ b/tests/ui/README.md
@@ -113,12 +113,6 @@
 
 Tests for automatic referencing and dereferencing behavior, such as automatically adding reference operations (`&` or `&mut`) to make a value match a method's receiver type. Sometimes abbreviated as "auto-ref" or "auto-deref".
 
-## `tests/ui/auxiliary/`: Auxiliary files for tests directly under `tests/ui`.
-
-This top-level `auxiliary` subdirectory contains support files for tests immediately under `tests/ui/`.
-
-**FIXME(#133895)**: tests immediately under `tests/ui/` should be rehomed to more suitable subdirectories, after which this subdirectory can be removed.
-
 ## `tests/ui/backtrace/`: Backtraces
 
 Runtime panics and error handling generate backtraces to assist in debugging and diagnostics.
@@ -542,12 +536,6 @@
 
 Accompanies `tests/ui/error-codes/`, exercises the `--explain` cli flag.
 
-## `tests/ui/explicit/`: Errors involving the concept of "explicit"
-
-This category contains three tests: two which are about the specific error `explicit use of destructor method`, and one which is about explicit annotation of lifetimes: https://doc.rust-lang.org/stable/rust-by-example/scope/lifetime/explicit.html.
-
-**FIXME**: Rehome the two tests about the destructor method with `drop`-related categories, and rehome the last test with a category related to lifetimes.
-
 ## `tests/ui/explicit-tail-calls/`
 
 Exercises `#![feature(explicit_tail_calls)]` and the `become` keyword. See [Explicit Tail Calls #3407](https://github.com/rust-lang/rfcs/pull/3407).
@@ -733,10 +721,6 @@
 
 See [Tracking issue for `-Z instrument-xray` #102921](https://github.com/rust-lang/rust/issues/102921).
 
-## `tests/ui/interior-mutability/`
-
-**FIXME**: contains a single test, probably better rehomed.
-
 ## `tests/ui/internal/`
 
 Tests for `internal_unstable` and the attribute header `#![feature(allow_internal_unstable)]`, which lets compiler developers mark features as internal to the compiler, and unstable for standard library use.
@@ -759,16 +743,6 @@
 
 Tests for checking that invalid usage of compiler flags are rejected.
 
-## `tests/ui/invalid-module-declaration/`
-
-**FIXME**: Consider merging into module/resolve directories.
-
-## `tests/ui/invalid-self-argument/`: `self` as a function argument incorrectly
-
-Tests with erroneous ways of using `self`, such as having it not be the first argument, or using it in a non-associated function (no `impl` or `trait`).
-
-**FIXME**: Maybe merge with `ui/self`.
-
 ## `tests/ui/io-checks/`
 
 Contains a single test. The test tries to output a file into an invalid directory with `-o`, then checks that the result is an error, not an internal compiler error.
diff --git a/tests/ui/cmse-nonsecure/cmse-nonsecure-call/infer.rs b/tests/ui/cmse-nonsecure/cmse-nonsecure-call/infer.rs
new file mode 100644
index 0000000..3452dc2
--- /dev/null
+++ b/tests/ui/cmse-nonsecure/cmse-nonsecure-call/infer.rs
@@ -0,0 +1,34 @@
+//@ add-minicore
+//@ compile-flags: --target thumbv8m.main-none-eabi --crate-type lib
+//@ needs-llvm-components: arm
+#![feature(abi_cmse_nonsecure_call, no_core, lang_items)]
+#![no_core]
+
+// Infer variables cause panics in layout generation, so the argument/return type is checked for
+// whether it contains an infer var, and `LayoutError::Unknown` is emitted if so.
+//
+// See https://github.com/rust-lang/rust/issues/130104.
+
+extern crate minicore;
+use minicore::*;
+
+fn infer_1() {
+    let _ = mem::transmute::<fn() -> _, extern "cmse-nonsecure-call" fn() -> _>;
+    //~^ ERROR type annotations needed
+}
+
+fn infer_2() {
+    let _ = mem::transmute::<fn() -> (i32, _), extern "cmse-nonsecure-call" fn() -> (i32, _)>;
+    //~^ ERROR type annotations needed
+}
+
+fn infer_3() {
+    let _ = mem::transmute::<fn(_: _) -> (), extern "cmse-nonsecure-call" fn(_: _) -> ()>;
+    //~^ ERROR type annotations needed
+}
+
+fn infer_4() {
+    let _ =
+        mem::transmute::<fn(_: (i32, _)) -> (), extern "cmse-nonsecure-call" fn(_: (i32, _)) -> ()>;
+    //~^ ERROR type annotations needed
+}
diff --git a/tests/ui/cmse-nonsecure/cmse-nonsecure-call/infer.stderr b/tests/ui/cmse-nonsecure/cmse-nonsecure-call/infer.stderr
new file mode 100644
index 0000000..aab314c
--- /dev/null
+++ b/tests/ui/cmse-nonsecure/cmse-nonsecure-call/infer.stderr
@@ -0,0 +1,27 @@
+error[E0282]: type annotations needed
+  --> $DIR/infer.rs:16:13
+   |
+LL |     let _ = mem::transmute::<fn() -> _, extern "cmse-nonsecure-call" fn() -> _>;
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type of the type parameter `Src` declared on the function `transmute`
+
+error[E0282]: type annotations needed
+  --> $DIR/infer.rs:21:13
+   |
+LL |     let _ = mem::transmute::<fn() -> (i32, _), extern "cmse-nonsecure-call" fn() -> (i32, _)>;
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type of the type parameter `Src` declared on the function `transmute`
+
+error[E0282]: type annotations needed
+  --> $DIR/infer.rs:26:13
+   |
+LL |     let _ = mem::transmute::<fn(_: _) -> (), extern "cmse-nonsecure-call" fn(_: _) -> ()>;
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type of the type parameter `Src` declared on the function `transmute`
+
+error[E0282]: type annotations needed
+  --> $DIR/infer.rs:32:9
+   |
+LL |         mem::transmute::<fn(_: (i32, _)) -> (), extern "cmse-nonsecure-call" fn(_: (i32, _)) -> ()>;
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type of the type parameter `Src` declared on the function `transmute`
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0282`.
diff --git a/tests/ui/cmse-nonsecure/cmse-nonsecure-entry/infer.rs b/tests/ui/cmse-nonsecure/cmse-nonsecure-entry/infer.rs
new file mode 100644
index 0000000..75a08ff
--- /dev/null
+++ b/tests/ui/cmse-nonsecure/cmse-nonsecure-entry/infer.rs
@@ -0,0 +1,36 @@
+//@ add-minicore
+//@ compile-flags: --target thumbv8m.main-none-eabi --crate-type lib
+//@ needs-llvm-components: arm
+#![feature(cmse_nonsecure_entry, no_core, lang_items)]
+#![no_core]
+
+// Infer variables cause panics in layout generation, so the argument/return type is checked for
+// whether it contains an infer var, and `LayoutError::Unknown` is emitted if so.
+//
+// See https://github.com/rust-lang/rust/issues/130104.
+
+extern crate minicore;
+use minicore::*;
+
+fn infer_1() {
+    let _ = mem::transmute::<fn() -> _, extern "cmse-nonsecure-entry" fn() -> _>;
+    //~^ ERROR type annotations needed
+}
+
+fn infer_2() {
+    let _ = mem::transmute::<fn() -> (i32, _), extern "cmse-nonsecure-entry" fn() -> (i32, _)>;
+    //~^ ERROR type annotations needed
+}
+
+fn infer_3() {
+    let _ = mem::transmute::<fn(_: _) -> (), extern "cmse-nonsecure-entry" fn(_: _) -> ()>;
+    //~^ ERROR type annotations needed
+}
+
+fn infer_4() {
+    let _ = mem::transmute::<
+        //~^ ERROR type annotations needed
+        fn(_: (i32, _)) -> (),
+        extern "cmse-nonsecure-entry" fn(_: (i32, _)) -> (),
+    >;
+}
diff --git a/tests/ui/cmse-nonsecure/cmse-nonsecure-entry/infer.stderr b/tests/ui/cmse-nonsecure/cmse-nonsecure-entry/infer.stderr
new file mode 100644
index 0000000..4243771
--- /dev/null
+++ b/tests/ui/cmse-nonsecure/cmse-nonsecure-entry/infer.stderr
@@ -0,0 +1,32 @@
+error[E0282]: type annotations needed
+  --> $DIR/infer.rs:16:13
+   |
+LL |     let _ = mem::transmute::<fn() -> _, extern "cmse-nonsecure-entry" fn() -> _>;
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type of the type parameter `Src` declared on the function `transmute`
+
+error[E0282]: type annotations needed
+  --> $DIR/infer.rs:21:13
+   |
+LL |     let _ = mem::transmute::<fn() -> (i32, _), extern "cmse-nonsecure-entry" fn() -> (i32, _)>;
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type of the type parameter `Src` declared on the function `transmute`
+
+error[E0282]: type annotations needed
+  --> $DIR/infer.rs:26:13
+   |
+LL |     let _ = mem::transmute::<fn(_: _) -> (), extern "cmse-nonsecure-entry" fn(_: _) -> ()>;
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type of the type parameter `Src` declared on the function `transmute`
+
+error[E0282]: type annotations needed
+  --> $DIR/infer.rs:31:13
+   |
+LL |       let _ = mem::transmute::<
+   |  _____________^
+LL | |
+LL | |         fn(_: (i32, _)) -> (),
+LL | |         extern "cmse-nonsecure-entry" fn(_: (i32, _)) -> (),
+LL | |     >;
+   | |_____^ cannot infer type of the type parameter `Src` declared on the function `transmute`
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0282`.
diff --git a/tests/ui/consts/const-result-no-expect-suggestion.rs b/tests/ui/consts/const-result-no-expect-suggestion.rs
new file mode 100644
index 0000000..cd725d9
--- /dev/null
+++ b/tests/ui/consts/const-result-no-expect-suggestion.rs
@@ -0,0 +1,15 @@
+const fn f(value: u32) -> Result<u32, ()> {
+    Ok(value)
+}
+
+const TEST: u32 = f(2);
+//~^ ERROR: mismatched types
+
+const fn g() -> Result<String, ()> {
+    Ok(String::new())
+}
+
+const TEST2: usize = g().len();
+//~^ ERROR: no method named `len` found for enum `Result<T, E>`
+
+fn main() {}
diff --git a/tests/ui/consts/const-result-no-expect-suggestion.stderr b/tests/ui/consts/const-result-no-expect-suggestion.stderr
new file mode 100644
index 0000000..70aa306
--- /dev/null
+++ b/tests/ui/consts/const-result-no-expect-suggestion.stderr
@@ -0,0 +1,24 @@
+error[E0308]: mismatched types
+  --> $DIR/const-result-no-expect-suggestion.rs:5:19
+   |
+LL | const TEST: u32 = f(2);
+   |                   ^^^^ expected `u32`, found `Result<u32, ()>`
+   |
+   = note: expected type `u32`
+              found enum `Result<u32, ()>`
+
+error[E0599]: no method named `len` found for enum `Result<T, E>` in the current scope
+  --> $DIR/const-result-no-expect-suggestion.rs:12:26
+   |
+LL | const TEST2: usize = g().len();
+   |                          ^^^
+   |
+note: the method `len` exists on the type `String`
+  --> $SRC_DIR/alloc/src/string.rs:LL:COL
+help: there is a method `le` with a similar name, but with different arguments
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+
+error: aborting due to 2 previous errors
+
+Some errors have detailed explanations: E0308, E0599.
+For more information about an error, try `rustc --explain E0308`.
diff --git a/tests/ui/delegation/unreachable-label-ice-148889.rs b/tests/ui/delegation/unreachable-label-ice-148889.rs
new file mode 100644
index 0000000..e5f9fa1
--- /dev/null
+++ b/tests/ui/delegation/unreachable-label-ice-148889.rs
@@ -0,0 +1,18 @@
+#![allow(incomplete_features)]
+#![feature(fn_delegation)]
+
+trait Trait {
+    fn static_method2(x: i32, y: i32) -> i32 {
+        x + y
+    }
+}
+
+struct S;
+impl Trait for S {}
+
+pub fn main() {
+    'foo: loop {
+        reuse <S as Trait>::static_method2 { loop { break 'foo; } }
+        //~^ ERROR use of unreachable label `'foo`
+    }
+}
diff --git a/tests/ui/delegation/unreachable-label-ice-148889.stderr b/tests/ui/delegation/unreachable-label-ice-148889.stderr
new file mode 100644
index 0000000..2141962
--- /dev/null
+++ b/tests/ui/delegation/unreachable-label-ice-148889.stderr
@@ -0,0 +1,13 @@
+error[E0767]: use of unreachable label `'foo`
+  --> $DIR/unreachable-label-ice-148889.rs:15:59
+   |
+LL |     'foo: loop {
+   |     ---- unreachable label defined here
+LL |         reuse <S as Trait>::static_method2 { loop { break 'foo; } }
+   |                                                           ^^^^ unreachable label `'foo`
+   |
+   = note: labels are unreachable through functions, closures, async blocks and modules
+
+error: aborting due to 1 previous error
+
+For more information about this error, try `rustc --explain E0767`.
diff --git a/tests/ui/explicit/explicit-call-to-dtor.fixed b/tests/ui/drop/explicit-call-to-dtor.fixed
similarity index 71%
rename from tests/ui/explicit/explicit-call-to-dtor.fixed
rename to tests/ui/drop/explicit-call-to-dtor.fixed
index 4c4142c..167f557 100644
--- a/tests/ui/explicit/explicit-call-to-dtor.fixed
+++ b/tests/ui/drop/explicit-call-to-dtor.fixed
@@ -1,6 +1,6 @@
 //@ run-rustfix
 struct Foo {
-    x: isize
+    x: isize,
 }
 
 impl Drop for Foo {
@@ -12,5 +12,5 @@
 fn main() {
     let x = Foo { x: 3 };
     println!("{}", x.x);
-    drop(x);   //~ ERROR explicit use of destructor method
+    drop(x); //~ ERROR explicit use of destructor method
 }
diff --git a/tests/ui/explicit/explicit-call-to-dtor.fixed b/tests/ui/drop/explicit-call-to-dtor.rs
similarity index 70%
copy from tests/ui/explicit/explicit-call-to-dtor.fixed
copy to tests/ui/drop/explicit-call-to-dtor.rs
index 4c4142c..2c4e013 100644
--- a/tests/ui/explicit/explicit-call-to-dtor.fixed
+++ b/tests/ui/drop/explicit-call-to-dtor.rs
@@ -1,6 +1,6 @@
 //@ run-rustfix
 struct Foo {
-    x: isize
+    x: isize,
 }
 
 impl Drop for Foo {
@@ -12,5 +12,5 @@ fn drop(&mut self) {
 fn main() {
     let x = Foo { x: 3 };
     println!("{}", x.x);
-    drop(x);   //~ ERROR explicit use of destructor method
+    x.drop(); //~ ERROR explicit use of destructor method
 }
diff --git a/tests/ui/explicit/explicit-call-to-dtor.stderr b/tests/ui/drop/explicit-call-to-dtor.stderr
similarity index 100%
rename from tests/ui/explicit/explicit-call-to-dtor.stderr
rename to tests/ui/drop/explicit-call-to-dtor.stderr
diff --git a/tests/ui/explicit/explicit-call-to-supertrait-dtor.fixed b/tests/ui/drop/explicit-call-to-supertrait-dtor.fixed
similarity index 80%
rename from tests/ui/explicit/explicit-call-to-supertrait-dtor.fixed
rename to tests/ui/drop/explicit-call-to-supertrait-dtor.fixed
index 57cb858..1526f7b 100644
--- a/tests/ui/explicit/explicit-call-to-supertrait-dtor.fixed
+++ b/tests/ui/drop/explicit-call-to-supertrait-dtor.fixed
@@ -4,7 +4,7 @@
 #![allow(dropping_references)]
 
 struct Foo {
-    x: isize
+    x: isize,
 }
 
 #[allow(drop_bounds)]
@@ -20,7 +20,7 @@
 
 impl Bar for Foo {
     fn blah(&self) {
-        drop(self);    //~ ERROR explicit use of destructor method
+        drop(self); //~ ERROR explicit use of destructor method
     }
 }
 
diff --git a/tests/ui/explicit/explicit-call-to-supertrait-dtor.fixed b/tests/ui/drop/explicit-call-to-supertrait-dtor.rs
similarity index 80%
copy from tests/ui/explicit/explicit-call-to-supertrait-dtor.fixed
copy to tests/ui/drop/explicit-call-to-supertrait-dtor.rs
index 57cb858..2de3d00 100644
--- a/tests/ui/explicit/explicit-call-to-supertrait-dtor.fixed
+++ b/tests/ui/drop/explicit-call-to-supertrait-dtor.rs
@@ -4,7 +4,7 @@
 #![allow(dropping_references)]
 
 struct Foo {
-    x: isize
+    x: isize,
 }
 
 #[allow(drop_bounds)]
@@ -20,7 +20,7 @@ fn drop(&mut self) {
 
 impl Bar for Foo {
     fn blah(&self) {
-        drop(self);    //~ ERROR explicit use of destructor method
+        self.drop(); //~ ERROR explicit use of destructor method
     }
 }
 
diff --git a/tests/ui/explicit/explicit-call-to-supertrait-dtor.stderr b/tests/ui/drop/explicit-call-to-supertrait-dtor.stderr
similarity index 100%
rename from tests/ui/explicit/explicit-call-to-supertrait-dtor.stderr
rename to tests/ui/drop/explicit-call-to-supertrait-dtor.stderr
diff --git a/tests/ui/enum-discriminant/invalid-niche-discriminant.normal.stderr b/tests/ui/enum-discriminant/invalid-niche-discriminant.normal.stderr
new file mode 100644
index 0000000..9c66c17
--- /dev/null
+++ b/tests/ui/enum-discriminant/invalid-niche-discriminant.normal.stderr
@@ -0,0 +1,35 @@
+error[E0732]: `#[repr(inttype)]` must be specified for enums with explicit discriminants and non-unit variants
+  --> $DIR/invalid-niche-discriminant.rs:11:1
+   |
+LL |   enum E {
+   |   ^^^^^^
+...
+LL |       S0 {
+   |       -- non-unit discriminant declared here
+...
+LL |       Bar = {
+   |  ___________-
+LL | |         let x = 1;
+LL | |         3
+LL | |     },
+   | |_____- explicit discriminant specified here
+
+error[E0599]: no variant named `S1` found for enum `E`
+  --> $DIR/invalid-niche-discriminant.rs:23:18
+   |
+LL | enum E {
+   | ------ variant `S1` not found here
+...
+LL | static C: E = E::S1 { u: 23 };
+   |                  ^^
+   |
+help: there is a variant with a similar name
+   |
+LL - static C: E = E::S1 { u: 23 };
+LL + static C: E = E::S0 { u: 23 };
+   |
+
+error: aborting due to 2 previous errors
+
+Some errors have detailed explanations: E0599, E0732.
+For more information about an error, try `rustc --explain E0599`.
diff --git a/tests/ui/enum-discriminant/invalid-niche-discriminant.rs b/tests/ui/enum-discriminant/invalid-niche-discriminant.rs
new file mode 100644
index 0000000..f70f7d1
--- /dev/null
+++ b/tests/ui/enum-discriminant/invalid-niche-discriminant.rs
@@ -0,0 +1,25 @@
+//@ needs-rustc-debug-assertions
+//@ revisions: normal with_delayed
+//@ [with_delayed] compile-flags: -Z eagerly-emit-delayed-bugs
+
+#![crate_type = "lib"]
+
+// Repro for <https://github.com/rust-lang/rust/issues/144501>
+// which ICEd because the calculated layout is invalid
+// but which we needn't care about as the discriminant already was.
+
+enum E {
+//~^ ERROR must be specified
+//[with_delayed]~| ERROR variant 1 has discriminant 3
+    S0 {
+        s: String,
+    },
+    Bar = {
+        let x = 1;
+        3
+    },
+}
+
+static C: E = E::S1 { u: 23 };
+//~^ ERROR no variant named
+//[with_delayed]~| ERROR but no error emitted
diff --git a/tests/ui/enum-discriminant/invalid-niche-discriminant.with_delayed.stderr b/tests/ui/enum-discriminant/invalid-niche-discriminant.with_delayed.stderr
new file mode 100644
index 0000000..20f1186
--- /dev/null
+++ b/tests/ui/enum-discriminant/invalid-niche-discriminant.with_delayed.stderr
@@ -0,0 +1,47 @@
+error[E0732]: `#[repr(inttype)]` must be specified for enums with explicit discriminants and non-unit variants
+  --> $DIR/invalid-niche-discriminant.rs:11:1
+   |
+LL |   enum E {
+   |   ^^^^^^
+...
+LL |       S0 {
+   |       -- non-unit discriminant declared here
+...
+LL |       Bar = {
+   |  ___________-
+LL | |         let x = 1;
+LL | |         3
+LL | |     },
+   | |_____- explicit discriminant specified here
+
+error: variant 1 has discriminant 3 in niche-encoded type
+  --> $DIR/invalid-niche-discriminant.rs:11:1
+   |
+LL | enum E {
+   | ^^^^^^
+
+error[E0599]: no variant named `S1` found for enum `E`
+  --> $DIR/invalid-niche-discriminant.rs:23:18
+   |
+LL | enum E {
+   | ------ variant `S1` not found here
+...
+LL | static C: E = E::S1 { u: 23 };
+   |                  ^^
+   |
+help: there is a variant with a similar name
+   |
+LL - static C: E = E::S1 { u: 23 };
+LL + static C: E = E::S0 { u: 23 };
+   |
+
+error: `Res::Err` but no error emitted
+  --> $DIR/invalid-niche-discriminant.rs:23:15
+   |
+LL | static C: E = E::S1 { u: 23 };
+   |               ^^^^^
+
+error: aborting due to 4 previous errors
+
+Some errors have detailed explanations: E0599, E0732.
+For more information about an error, try `rustc --explain E0599`.
diff --git a/tests/ui/explain/basic.stdout b/tests/ui/explain/basic.stdout
index ef1d866..6377768 100644
--- a/tests/ui/explain/basic.stdout
+++ b/tests/ui/explain/basic.stdout
@@ -56,10 +56,10 @@
   and do the cast in the fn body (the preferred option)
 - cast the fn item of a fn pointer before calling transmute, as shown here:
 
-    ```
-    let f: extern "C" fn(*mut i32) = transmute(foo as extern "C" fn(_));
-    let f: extern "C" fn(*mut i32) = transmute(foo as usize); // works too
-    ```
+```
+let f: extern "C" fn(*mut i32) = transmute(foo as extern "C" fn(_));
+let f: extern "C" fn(*mut i32) = transmute(foo as usize); // works too
+```
 
 The same applies to transmutes to `*mut fn()`, which were observed in practice.
 Note though that use of this type is generally incorrect.
diff --git a/tests/ui/explicit/explicit-call-to-dtor.rs b/tests/ui/explicit/explicit-call-to-dtor.rs
deleted file mode 100644
index 262dde5..0000000
--- a/tests/ui/explicit/explicit-call-to-dtor.rs
+++ /dev/null
@@ -1,16 +0,0 @@
-//@ run-rustfix
-struct Foo {
-    x: isize
-}
-
-impl Drop for Foo {
-    fn drop(&mut self) {
-        println!("kaboom");
-    }
-}
-
-fn main() {
-    let x = Foo { x: 3 };
-    println!("{}", x.x);
-    x.drop();   //~ ERROR explicit use of destructor method
-}
diff --git a/tests/ui/explicit/explicit-call-to-supertrait-dtor.rs b/tests/ui/explicit/explicit-call-to-supertrait-dtor.rs
deleted file mode 100644
index bb29e49..0000000
--- a/tests/ui/explicit/explicit-call-to-supertrait-dtor.rs
+++ /dev/null
@@ -1,30 +0,0 @@
-//@ run-rustfix
-
-#![allow(dead_code)]
-#![allow(dropping_references)]
-
-struct Foo {
-    x: isize
-}
-
-#[allow(drop_bounds)]
-trait Bar: Drop {
-    fn blah(&self);
-}
-
-impl Drop for Foo {
-    fn drop(&mut self) {
-        println!("kaboom");
-    }
-}
-
-impl Bar for Foo {
-    fn blah(&self) {
-        self.drop();    //~ ERROR explicit use of destructor method
-    }
-}
-
-fn main() {
-    let x = Foo { x: 3 };
-    println!("{}", x.x);
-}
diff --git a/tests/ui/explicit/explicit-self-lifetime-mismatch.rs b/tests/ui/explicit/explicit-self-lifetime-mismatch.rs
deleted file mode 100644
index aa5e352..0000000
--- a/tests/ui/explicit/explicit-self-lifetime-mismatch.rs
+++ /dev/null
@@ -1,22 +0,0 @@
-//@ dont-require-annotations: NOTE
-
-struct Foo<'a,'b> {
-    x: &'a isize,
-    y: &'b isize,
-}
-
-impl<'a,'b> Foo<'a,'b> {
-    fn bar(self:
-           Foo<'b,'a>
-    //~^ ERROR mismatched `self` parameter type
-    //~| NOTE expected struct `Foo<'a, 'b>`
-    //~| NOTE found struct `Foo<'b, 'a>`
-    //~| NOTE lifetime mismatch
-    //~| ERROR mismatched `self` parameter type
-    //~| NOTE expected struct `Foo<'a, 'b>`
-    //~| NOTE found struct `Foo<'b, 'a>`
-    //~| NOTE lifetime mismatch
-           ) {}
-}
-
-fn main() {}
diff --git a/tests/ui/explicit/explicit-self-lifetime-mismatch.stderr b/tests/ui/explicit/explicit-self-lifetime-mismatch.stderr
deleted file mode 100644
index a20901e..0000000
--- a/tests/ui/explicit/explicit-self-lifetime-mismatch.stderr
+++ /dev/null
@@ -1,41 +0,0 @@
-error[E0308]: mismatched `self` parameter type
-  --> $DIR/explicit-self-lifetime-mismatch.rs:10:12
-   |
-LL |            Foo<'b,'a>
-   |            ^^^^^^^^^^ lifetime mismatch
-   |
-   = note: expected struct `Foo<'a, 'b>`
-              found struct `Foo<'b, 'a>`
-note: the lifetime `'b` as defined here...
-  --> $DIR/explicit-self-lifetime-mismatch.rs:8:9
-   |
-LL | impl<'a,'b> Foo<'a,'b> {
-   |         ^^
-note: ...does not necessarily outlive the lifetime `'a` as defined here
-  --> $DIR/explicit-self-lifetime-mismatch.rs:8:6
-   |
-LL | impl<'a,'b> Foo<'a,'b> {
-   |      ^^
-
-error[E0308]: mismatched `self` parameter type
-  --> $DIR/explicit-self-lifetime-mismatch.rs:10:12
-   |
-LL |            Foo<'b,'a>
-   |            ^^^^^^^^^^ lifetime mismatch
-   |
-   = note: expected struct `Foo<'a, 'b>`
-              found struct `Foo<'b, 'a>`
-note: the lifetime `'a` as defined here...
-  --> $DIR/explicit-self-lifetime-mismatch.rs:8:6
-   |
-LL | impl<'a,'b> Foo<'a,'b> {
-   |      ^^
-note: ...does not necessarily outlive the lifetime `'b` as defined here
-  --> $DIR/explicit-self-lifetime-mismatch.rs:8:9
-   |
-LL | impl<'a,'b> Foo<'a,'b> {
-   |         ^^
-
-error: aborting due to 2 previous errors
-
-For more information about this error, try `rustc --explain E0308`.
diff --git a/tests/ui/frontmatter/fence-too-many-dashes.rs b/tests/ui/frontmatter/fence-too-many-dashes.rs
new file mode 100644
index 0000000..abb6474
--- /dev/null
+++ b/tests/ui/frontmatter/fence-too-many-dashes.rs
@@ -0,0 +1,11 @@
+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+//~? ERROR: too many `-` symbols: frontmatter openings may be delimited by up to 255 `-` symbols
+// ignore-tidy-linelength
+[dependencies]
+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+
+#![feature(frontmatter)]
+
+// check that we limit fence lengths
+
+fn main() {}
diff --git a/tests/ui/frontmatter/fence-too-many-dashes.stderr b/tests/ui/frontmatter/fence-too-many-dashes.stderr
new file mode 100644
index 0000000..09ed9ff
--- /dev/null
+++ b/tests/ui/frontmatter/fence-too-many-dashes.stderr
@@ -0,0 +1,4 @@
+error: too many `-` symbols: frontmatter openings may be delimited by up to 255 `-` symbols, but found 256
+
+error: aborting due to 1 previous error
+
diff --git a/tests/ui/invalid-module-declaration/auxiliary/foo/bar.rs b/tests/ui/invalid-module-declaration/auxiliary/foo/bar.rs
deleted file mode 100644
index bcfd7dc..0000000
--- a/tests/ui/invalid-module-declaration/auxiliary/foo/bar.rs
+++ /dev/null
@@ -1 +0,0 @@
-pub mod baz;
diff --git a/tests/ui/invalid-module-declaration/auxiliary/foo/mod.rs b/tests/ui/invalid-module-declaration/auxiliary/foo/mod.rs
deleted file mode 100644
index 46f285c..0000000
--- a/tests/ui/invalid-module-declaration/auxiliary/foo/mod.rs
+++ /dev/null
@@ -1 +0,0 @@
-pub mod bar;
diff --git a/tests/ui/invalid-module-declaration/invalid-module-declaration.rs b/tests/ui/invalid-module-declaration/invalid-module-declaration.rs
deleted file mode 100644
index 1c6c282..0000000
--- a/tests/ui/invalid-module-declaration/invalid-module-declaration.rs
+++ /dev/null
@@ -1,7 +0,0 @@
-mod auxiliary {
-    mod foo;
-}
-
-fn main() {}
-
-//~? ERROR file not found for module `baz`
diff --git a/tests/ui/invalid-module-declaration/invalid-module-declaration.stderr b/tests/ui/invalid-module-declaration/invalid-module-declaration.stderr
deleted file mode 100644
index a8f6588..0000000
--- a/tests/ui/invalid-module-declaration/invalid-module-declaration.stderr
+++ /dev/null
@@ -1,12 +0,0 @@
-error[E0583]: file not found for module `baz`
-  --> $DIR/auxiliary/foo/bar.rs:1:1
-   |
-LL | pub mod baz;
-   | ^^^^^^^^^^^^
-   |
-   = help: to create the module `baz`, create file "$DIR/auxiliary/foo/bar/baz.rs" or "$DIR/auxiliary/foo/bar/baz/mod.rs"
-   = note: if there is a `mod baz` elsewhere in the crate already, import it with `use crate::...` instead
-
-error: aborting due to 1 previous error
-
-For more information about this error, try `rustc --explain E0583`.
diff --git a/tests/ui/invalid-self-argument/bare-fn-start.rs b/tests/ui/invalid-self-argument/bare-fn-start.rs
deleted file mode 100644
index 7c580bc..0000000
--- a/tests/ui/invalid-self-argument/bare-fn-start.rs
+++ /dev/null
@@ -1,6 +0,0 @@
-fn a(&self) { }
-//~^ ERROR `self` parameter is only allowed in associated functions
-//~| NOTE not semantically valid as function parameter
-//~| NOTE associated functions are those in `impl` or `trait` definitions
-
-fn main() { }
diff --git a/tests/ui/invalid-self-argument/bare-fn-start.stderr b/tests/ui/invalid-self-argument/bare-fn-start.stderr
deleted file mode 100644
index bf7160b..0000000
--- a/tests/ui/invalid-self-argument/bare-fn-start.stderr
+++ /dev/null
@@ -1,10 +0,0 @@
-error: `self` parameter is only allowed in associated functions
-  --> $DIR/bare-fn-start.rs:1:6
-   |
-LL | fn a(&self) { }
-   |      ^^^^^ not semantically valid as function parameter
-   |
-   = note: associated functions are those in `impl` or `trait` definitions
-
-error: aborting due to 1 previous error
-
diff --git a/tests/ui/invalid-self-argument/bare-fn.rs b/tests/ui/invalid-self-argument/bare-fn.rs
deleted file mode 100644
index 342bdc3..0000000
--- a/tests/ui/invalid-self-argument/bare-fn.rs
+++ /dev/null
@@ -1,5 +0,0 @@
-fn b(foo: u32, &mut self) { }
-//~^ ERROR unexpected `self` parameter in function
-//~| NOTE must be the first parameter of an associated function
-
-fn main() { }
diff --git a/tests/ui/invalid-self-argument/bare-fn.stderr b/tests/ui/invalid-self-argument/bare-fn.stderr
deleted file mode 100644
index 7abb566..0000000
--- a/tests/ui/invalid-self-argument/bare-fn.stderr
+++ /dev/null
@@ -1,8 +0,0 @@
-error: unexpected `self` parameter in function
-  --> $DIR/bare-fn.rs:1:16
-   |
-LL | fn b(foo: u32, &mut self) { }
-   |                ^^^^^^^^^ must be the first parameter of an associated function
-
-error: aborting due to 1 previous error
-
diff --git a/tests/ui/invalid-self-argument/trait-fn.rs b/tests/ui/invalid-self-argument/trait-fn.rs
deleted file mode 100644
index 5ccea58..0000000
--- a/tests/ui/invalid-self-argument/trait-fn.rs
+++ /dev/null
@@ -1,11 +0,0 @@
-struct Foo {}
-
-impl Foo {
-    fn c(foo: u32, self) {}
-    //~^ ERROR unexpected `self` parameter in function
-    //~| NOTE must be the first parameter of an associated function
-
-    fn good(&mut self, foo: u32) {}
-}
-
-fn main() { }
diff --git a/tests/ui/invalid-self-argument/trait-fn.stderr b/tests/ui/invalid-self-argument/trait-fn.stderr
deleted file mode 100644
index c9d0a33..0000000
--- a/tests/ui/invalid-self-argument/trait-fn.stderr
+++ /dev/null
@@ -1,8 +0,0 @@
-error: unexpected `self` parameter in function
-  --> $DIR/trait-fn.rs:4:20
-   |
-LL |     fn c(foo: u32, self) {}
-   |                    ^^^^ must be the first parameter of an associated function
-
-error: aborting due to 1 previous error
-
diff --git a/tests/ui/issues/issue-17740.rs b/tests/ui/issues/issue-17740.rs
deleted file mode 100644
index 20a7375..0000000
--- a/tests/ui/issues/issue-17740.rs
+++ /dev/null
@@ -1,20 +0,0 @@
-//@ dont-require-annotations: NOTE
-
-struct Foo<'a> {
-    data: &'a[u8],
-}
-
-impl <'a> Foo<'a>{
-    fn bar(self: &mut Foo) {
-    //~^ ERROR mismatched `self` parameter type
-    //~| NOTE expected struct `Foo<'a>`
-    //~| NOTE found struct `Foo<'_>`
-    //~| NOTE lifetime mismatch
-    //~| ERROR mismatched `self` parameter type
-    //~| NOTE expected struct `Foo<'a>`
-    //~| NOTE found struct `Foo<'_>`
-    //~| NOTE lifetime mismatch
-    }
-}
-
-fn main() {}
diff --git a/tests/ui/issues/issue-17740.stderr b/tests/ui/issues/issue-17740.stderr
deleted file mode 100644
index 198d7d5..0000000
--- a/tests/ui/issues/issue-17740.stderr
+++ /dev/null
@@ -1,41 +0,0 @@
-error[E0308]: mismatched `self` parameter type
-  --> $DIR/issue-17740.rs:8:18
-   |
-LL |     fn bar(self: &mut Foo) {
-   |                  ^^^^^^^^ lifetime mismatch
-   |
-   = note: expected struct `Foo<'a>`
-              found struct `Foo<'_>`
-note: the anonymous lifetime defined here...
-  --> $DIR/issue-17740.rs:8:23
-   |
-LL |     fn bar(self: &mut Foo) {
-   |                       ^^^
-note: ...does not necessarily outlive the lifetime `'a` as defined here
-  --> $DIR/issue-17740.rs:7:7
-   |
-LL | impl <'a> Foo<'a>{
-   |       ^^
-
-error[E0308]: mismatched `self` parameter type
-  --> $DIR/issue-17740.rs:8:18
-   |
-LL |     fn bar(self: &mut Foo) {
-   |                  ^^^^^^^^ lifetime mismatch
-   |
-   = note: expected struct `Foo<'a>`
-              found struct `Foo<'_>`
-note: the lifetime `'a` as defined here...
-  --> $DIR/issue-17740.rs:7:7
-   |
-LL | impl <'a> Foo<'a>{
-   |       ^^
-note: ...does not necessarily outlive the anonymous lifetime defined here
-  --> $DIR/issue-17740.rs:8:23
-   |
-LL |     fn bar(self: &mut Foo) {
-   |                       ^^^
-
-error: aborting due to 2 previous errors
-
-For more information about this error, try `rustc --explain E0308`.
diff --git a/tests/ui/lifetimes/explicit-self-lifetime-mismatch.rs b/tests/ui/lifetimes/explicit-self-lifetime-mismatch.rs
new file mode 100644
index 0000000..88b9d86
--- /dev/null
+++ b/tests/ui/lifetimes/explicit-self-lifetime-mismatch.rs
@@ -0,0 +1,41 @@
+//@ dont-require-annotations: NOTE
+//! regression test for <https://github.com/rust-lang/rust/issues/17740>
+
+struct Foo<'a, 'b> {
+    x: &'a isize,
+    y: &'b isize,
+}
+
+impl<'a, 'b> Foo<'a, 'b> {
+    fn bar(
+        self: Foo<'b, 'a>,
+        //~^ ERROR mismatched `self` parameter type
+        //~| NOTE expected struct `Foo<'a, 'b>`
+        //~| NOTE found struct `Foo<'b, 'a>`
+        //~| NOTE lifetime mismatch
+        //~| ERROR mismatched `self` parameter type
+        //~| NOTE expected struct `Foo<'a, 'b>`
+        //~| NOTE found struct `Foo<'b, 'a>`
+        //~| NOTE lifetime mismatch
+    ) {
+    }
+}
+
+struct Bar<'a> {
+    data: &'a [u8],
+}
+
+impl<'a> Bar<'a> {
+    fn bar(self: &mut Bar) {
+        //~^ ERROR mismatched `self` parameter type
+        //~| NOTE expected struct `Bar<'a>`
+        //~| NOTE found struct `Bar<'_>`
+        //~| NOTE lifetime mismatch
+        //~| ERROR mismatched `self` parameter type
+        //~| NOTE expected struct `Bar<'a>`
+        //~| NOTE found struct `Bar<'_>`
+        //~| NOTE lifetime mismatch
+    }
+}
+
+fn main() {}
diff --git a/tests/ui/lifetimes/explicit-self-lifetime-mismatch.stderr b/tests/ui/lifetimes/explicit-self-lifetime-mismatch.stderr
new file mode 100644
index 0000000..ebd6383
--- /dev/null
+++ b/tests/ui/lifetimes/explicit-self-lifetime-mismatch.stderr
@@ -0,0 +1,79 @@
+error[E0308]: mismatched `self` parameter type
+  --> $DIR/explicit-self-lifetime-mismatch.rs:11:15
+   |
+LL |         self: Foo<'b, 'a>,
+   |               ^^^^^^^^^^^ lifetime mismatch
+   |
+   = note: expected struct `Foo<'a, 'b>`
+              found struct `Foo<'b, 'a>`
+note: the lifetime `'b` as defined here...
+  --> $DIR/explicit-self-lifetime-mismatch.rs:9:10
+   |
+LL | impl<'a, 'b> Foo<'a, 'b> {
+   |          ^^
+note: ...does not necessarily outlive the lifetime `'a` as defined here
+  --> $DIR/explicit-self-lifetime-mismatch.rs:9:6
+   |
+LL | impl<'a, 'b> Foo<'a, 'b> {
+   |      ^^
+
+error[E0308]: mismatched `self` parameter type
+  --> $DIR/explicit-self-lifetime-mismatch.rs:11:15
+   |
+LL |         self: Foo<'b, 'a>,
+   |               ^^^^^^^^^^^ lifetime mismatch
+   |
+   = note: expected struct `Foo<'a, 'b>`
+              found struct `Foo<'b, 'a>`
+note: the lifetime `'a` as defined here...
+  --> $DIR/explicit-self-lifetime-mismatch.rs:9:6
+   |
+LL | impl<'a, 'b> Foo<'a, 'b> {
+   |      ^^
+note: ...does not necessarily outlive the lifetime `'b` as defined here
+  --> $DIR/explicit-self-lifetime-mismatch.rs:9:10
+   |
+LL | impl<'a, 'b> Foo<'a, 'b> {
+   |          ^^
+
+error[E0308]: mismatched `self` parameter type
+  --> $DIR/explicit-self-lifetime-mismatch.rs:29:18
+   |
+LL |     fn bar(self: &mut Bar) {
+   |                  ^^^^^^^^ lifetime mismatch
+   |
+   = note: expected struct `Bar<'a>`
+              found struct `Bar<'_>`
+note: the anonymous lifetime defined here...
+  --> $DIR/explicit-self-lifetime-mismatch.rs:29:23
+   |
+LL |     fn bar(self: &mut Bar) {
+   |                       ^^^
+note: ...does not necessarily outlive the lifetime `'a` as defined here
+  --> $DIR/explicit-self-lifetime-mismatch.rs:28:6
+   |
+LL | impl<'a> Bar<'a> {
+   |      ^^
+
+error[E0308]: mismatched `self` parameter type
+  --> $DIR/explicit-self-lifetime-mismatch.rs:29:18
+   |
+LL |     fn bar(self: &mut Bar) {
+   |                  ^^^^^^^^ lifetime mismatch
+   |
+   = note: expected struct `Bar<'a>`
+              found struct `Bar<'_>`
+note: the lifetime `'a` as defined here...
+  --> $DIR/explicit-self-lifetime-mismatch.rs:28:6
+   |
+LL | impl<'a> Bar<'a> {
+   |      ^^
+note: ...does not necessarily outlive the anonymous lifetime defined here
+  --> $DIR/explicit-self-lifetime-mismatch.rs:29:23
+   |
+LL |     fn bar(self: &mut Bar) {
+   |                       ^^^
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/tests/ui/parser/item-kw-case-mismatch.stderr b/tests/ui/parser/item-kw-case-mismatch.stderr
index d2a1eb7..55cbc6b 100644
--- a/tests/ui/parser/item-kw-case-mismatch.stderr
+++ b/tests/ui/parser/item-kw-case-mismatch.stderr
@@ -4,7 +4,7 @@
 LL | Use std::ptr::read;
    | ^^^
    |
-help: write it in the correct case (notice the capitalization)
+help: write it in lowercase (notice the capitalization)
    |
 LL - Use std::ptr::read;
 LL + use std::ptr::read;
@@ -16,7 +16,7 @@
 LL | USE std::ptr::write;
    | ^^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - USE std::ptr::write;
 LL + use std::ptr::write;
@@ -28,7 +28,7 @@
 LL | async Fn _a() {}
    |       ^^
    |
-help: write it in the correct case (notice the capitalization)
+help: write it in lowercase (notice the capitalization)
    |
 LL - async Fn _a() {}
 LL + async fn _a() {}
@@ -40,7 +40,7 @@
 LL | Fn _b() {}
    | ^^
    |
-help: write it in the correct case (notice the capitalization)
+help: write it in lowercase (notice the capitalization)
    |
 LL - Fn _b() {}
 LL + fn _b() {}
@@ -52,7 +52,7 @@
 LL | aSYNC fN _c() {}
    | ^^^^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - aSYNC fN _c() {}
 LL + async fN _c() {}
@@ -64,7 +64,7 @@
 LL | aSYNC fN _c() {}
    |       ^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - aSYNC fN _c() {}
 LL + aSYNC fn _c() {}
@@ -76,7 +76,7 @@
 LL | Async fn _d() {}
    | ^^^^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - Async fn _d() {}
 LL + async fn _d() {}
@@ -88,7 +88,7 @@
 LL | CONST UNSAFE FN _e() {}
    | ^^^^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - CONST UNSAFE FN _e() {}
 LL + const UNSAFE FN _e() {}
@@ -100,7 +100,7 @@
 LL | CONST UNSAFE FN _e() {}
    |       ^^^^^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - CONST UNSAFE FN _e() {}
 LL + CONST unsafe FN _e() {}
@@ -112,7 +112,7 @@
 LL | CONST UNSAFE FN _e() {}
    |              ^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - CONST UNSAFE FN _e() {}
 LL + CONST UNSAFE fn _e() {}
@@ -124,7 +124,7 @@
 LL | unSAFE EXTern "C" fn _f() {}
    | ^^^^^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - unSAFE EXTern "C" fn _f() {}
 LL + unsafe EXTern "C" fn _f() {}
@@ -136,7 +136,7 @@
 LL | unSAFE EXTern "C" fn _f() {}
    |        ^^^^^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - unSAFE EXTern "C" fn _f() {}
 LL + unSAFE extern "C" fn _f() {}
@@ -148,7 +148,7 @@
 LL | EXTERN "C" FN _g() {}
    | ^^^^^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - EXTERN "C" FN _g() {}
 LL + extern "C" FN _g() {}
@@ -160,7 +160,7 @@
 LL | EXTERN "C" FN _g() {}
    |            ^^
    |
-help: write it in the correct case
+help: write it in lowercase
    |
 LL - EXTERN "C" FN _g() {}
 LL + EXTERN "C" fn _g() {}
diff --git a/tests/ui/self/invalid-self-argument.rs b/tests/ui/self/invalid-self-argument.rs
new file mode 100644
index 0000000..fef687e
--- /dev/null
+++ b/tests/ui/self/invalid-self-argument.rs
@@ -0,0 +1,22 @@
+//! regression test for <https://github.com/rust-lang/rust/issues/55972>
+
+fn a(&self) {}
+//~^ ERROR `self` parameter is only allowed in associated functions
+//~| NOTE not semantically valid as function parameter
+//~| NOTE associated functions are those in `impl` or `trait` definitions
+
+fn b(foo: u32, &mut self) {}
+//~^ ERROR unexpected `self` parameter in function
+//~| NOTE must be the first parameter of an associated function
+
+struct Foo {}
+
+impl Foo {
+    fn c(foo: u32, self) {}
+    //~^ ERROR unexpected `self` parameter in function
+    //~| NOTE must be the first parameter of an associated function
+
+    fn good(&mut self, foo: u32) {}
+}
+
+fn main() {}
diff --git a/tests/ui/self/invalid-self-argument.stderr b/tests/ui/self/invalid-self-argument.stderr
new file mode 100644
index 0000000..c92e5b2
--- /dev/null
+++ b/tests/ui/self/invalid-self-argument.stderr
@@ -0,0 +1,22 @@
+error: unexpected `self` parameter in function
+  --> $DIR/invalid-self-argument.rs:8:16
+   |
+LL | fn b(foo: u32, &mut self) {}
+   |                ^^^^^^^^^ must be the first parameter of an associated function
+
+error: unexpected `self` parameter in function
+  --> $DIR/invalid-self-argument.rs:15:20
+   |
+LL |     fn c(foo: u32, self) {}
+   |                    ^^^^ must be the first parameter of an associated function
+
+error: `self` parameter is only allowed in associated functions
+  --> $DIR/invalid-self-argument.rs:3:6
+   |
+LL | fn a(&self) {}
+   |      ^^^^^ not semantically valid as function parameter
+   |
+   = note: associated functions are those in `impl` or `trait` definitions
+
+error: aborting due to 3 previous errors
+
diff --git a/tests/ui/interior-mutability/interior-mutability.rs b/tests/ui/traits/catch-unwind-cell-interior-mut.rs
similarity index 78%
rename from tests/ui/interior-mutability/interior-mutability.rs
rename to tests/ui/traits/catch-unwind-cell-interior-mut.rs
index 7e4fe76..cfc5232 100644
--- a/tests/ui/interior-mutability/interior-mutability.rs
+++ b/tests/ui/traits/catch-unwind-cell-interior-mut.rs
@@ -1,3 +1,4 @@
+//! related issue: <https://github.com/rust-lang/rust/issues/40313>
 //@ compile-flags: -Zwrite-long-types-to-disk=yes
 use std::cell::Cell;
 use std::panic::catch_unwind;
diff --git a/tests/ui/interior-mutability/interior-mutability.stderr b/tests/ui/traits/catch-unwind-cell-interior-mut.stderr
similarity index 90%
rename from tests/ui/interior-mutability/interior-mutability.stderr
rename to tests/ui/traits/catch-unwind-cell-interior-mut.stderr
index b307d60..6f58c88 100644
--- a/tests/ui/interior-mutability/interior-mutability.stderr
+++ b/tests/ui/traits/catch-unwind-cell-interior-mut.stderr
@@ -1,5 +1,5 @@
 error[E0277]: the type `UnsafeCell<i32>` may contain interior mutability and a reference may not be safely transferable across a catch_unwind boundary
-  --> $DIR/interior-mutability.rs:6:18
+  --> $DIR/catch-unwind-cell-interior-mut.rs:7:18
    |
 LL |     catch_unwind(|| { x.set(23); });
    |     ------------ ^^^^^^^^^^^^^^^^^ `UnsafeCell<i32>` may contain interior mutability and a reference may not be safely transferable across a catch_unwind boundary
@@ -11,7 +11,7 @@
   --> $SRC_DIR/core/src/cell.rs:LL:COL
    = note: required for `&Cell<i32>` to implement `UnwindSafe`
 note: required because it's used within this closure
-  --> $DIR/interior-mutability.rs:6:18
+  --> $DIR/catch-unwind-cell-interior-mut.rs:7:18
    |
 LL |     catch_unwind(|| { x.set(23); });
    |                  ^^
diff --git a/tests/ui/traits/const-traits/auxiliary/staged-api.rs b/tests/ui/traits/const-traits/auxiliary/staged-api.rs
index b2b1e06..65e7586 100644
--- a/tests/ui/traits/const-traits/auxiliary/staged-api.rs
+++ b/tests/ui/traits/const-traits/auxiliary/staged-api.rs
@@ -1,4 +1,3 @@
-//@ compile-flags: -Znext-solver
 #![feature(const_trait_impl)]
 #![feature(staged_api)]
 #![stable(feature = "rust1", since = "1.0.0")]
@@ -19,6 +18,14 @@ impl const MyTrait for Unstable {
     fn func() {}
 }
 
+// tested in inherent-impl-stability.rs instead to avoid clutter
+#[stable(feature = "rust1", since = "1.0.0")]
+#[rustc_const_unstable(feature = "unstable", issue = "none")]
+const impl Unstable {
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn inherent_func() {}
+}
+
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct Unstable2;
 
diff --git a/tests/ui/traits/const-traits/inherent-impl-stability.rs b/tests/ui/traits/const-traits/inherent-impl-stability.rs
new file mode 100644
index 0000000..e520e5a
--- /dev/null
+++ b/tests/ui/traits/const-traits/inherent-impl-stability.rs
@@ -0,0 +1,16 @@
+//@ aux-build: staged-api.rs
+extern crate staged_api;
+
+use staged_api::*;
+
+// Const stability has no impact on usage in non-const contexts.
+fn non_const_context() {
+    Unstable::inherent_func();
+}
+
+const fn stable_const_context() {
+    Unstable::inherent_func();
+    //~^ ERROR: `staged_api::Unstable::inherent_func` is not yet stable as a const fn
+}
+
+fn main() {}
diff --git a/tests/ui/traits/const-traits/inherent-impl-stability.stderr b/tests/ui/traits/const-traits/inherent-impl-stability.stderr
new file mode 100644
index 0000000..018065c
--- /dev/null
+++ b/tests/ui/traits/const-traits/inherent-impl-stability.stderr
@@ -0,0 +1,13 @@
+error: `staged_api::Unstable::inherent_func` is not yet stable as a const fn
+  --> $DIR/inherent-impl-stability.rs:12:5
+   |
+LL |     Unstable::inherent_func();
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+help: add `#![feature(unstable)]` to the crate attributes to enable
+   |
+LL + #![feature(unstable)]
+   |
+
+error: aborting due to 1 previous error
+
diff --git a/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-1.rs b/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-1.rs
new file mode 100644
index 0000000..76a44db
--- /dev/null
+++ b/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-1.rs
@@ -0,0 +1,43 @@
+//@ compile-flags: -Znext-solver
+//@ edition: 2024
+//
+// A regression test for the ICE variant in trait-system-refactor-initiative#245.
+// We'll meet regions that're already popped off when using parent predicate in cause code.
+// `cause` in `Obligation` is ignored by folders/visitors.
+// In this case, `fudge_inference_if_ok` doesn't fudge a region var in cause code.
+//
+// The old solver doesn't trigger ICE because regions in the predicate are replaced with
+// placeholders when checking generator witness. Besides, the old solver doesn't eagerly
+// resolves vars before canonicalizing the predicate in `predicate_must_hold_modulo_regions`.
+
+trait AsyncFn: Send + 'static {
+    type Fut: Future<Output = ()> + Send;
+
+    fn call(&self) -> Self::Fut;
+}
+
+async fn wrap_call<P: AsyncFn + ?Sized>(filter: &P) {
+    filter.call().await;
+}
+
+fn get_boxed_fn() -> Box<DynAsyncFnBoxed> {
+    todo!()
+}
+
+async fn cursed_fut() {
+    wrap_call(get_boxed_fn().as_ref()).await;
+}
+
+fn observe_fut_not_send() {
+    assert_send(cursed_fut());
+    //~^ ERROR: `dyn AsyncFn<Fut = Pin<Box<dyn Future<Output = ()> + Send>>>` cannot be shared between threads safely [E0277]
+}
+
+fn assert_send<T: Send>(t: T) -> T {
+    t
+}
+
+pub type BoxFuture<'a, T> = std::pin::Pin<Box<dyn Future<Output = T> + Send + 'a>>;
+type DynAsyncFnBoxed = dyn AsyncFn<Fut = BoxFuture<'static, ()>>;
+
+fn main() {}
diff --git a/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-1.stderr b/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-1.stderr
new file mode 100644
index 0000000..a095186
--- /dev/null
+++ b/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-1.stderr
@@ -0,0 +1,35 @@
+error[E0277]: `dyn AsyncFn<Fut = Pin<Box<dyn Future<Output = ()> + Send>>>` cannot be shared between threads safely
+  --> $DIR/leaking-vars-in-cause-code-1.rs:32:17
+   |
+LL |     assert_send(cursed_fut());
+   |     ----------- ^^^^^^^^^^^^ `dyn AsyncFn<Fut = Pin<Box<dyn Future<Output = ()> + Send>>>` cannot be shared between threads safely
+   |     |
+   |     required by a bound introduced by this call
+   |
+   = help: the trait `Sync` is not implemented for `dyn AsyncFn<Fut = Pin<Box<dyn Future<Output = ()> + Send>>>`
+   = note: required for `&dyn AsyncFn<Fut = Pin<Box<dyn Future<Output = ()> + Send>>>` to implement `Send`
+note: required because it's used within this `async` fn body
+  --> $DIR/leaking-vars-in-cause-code-1.rs:19:53
+   |
+LL |   async fn wrap_call<P: AsyncFn + ?Sized>(filter: &P) {
+   |  _____________________________________________________^
+LL | |     filter.call().await;
+LL | | }
+   | |_^
+note: required because it's used within this `async` fn body
+  --> $DIR/leaking-vars-in-cause-code-1.rs:27:23
+   |
+LL |   async fn cursed_fut() {
+   |  _______________________^
+LL | |     wrap_call(get_boxed_fn().as_ref()).await;
+LL | | }
+   | |_^
+note: required by a bound in `assert_send`
+  --> $DIR/leaking-vars-in-cause-code-1.rs:36:19
+   |
+LL | fn assert_send<T: Send>(t: T) -> T {
+   |                   ^^^^ required by this bound in `assert_send`
+
+error: aborting due to 1 previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-2.rs b/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-2.rs
new file mode 100644
index 0000000..4dd170c
--- /dev/null
+++ b/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-2.rs
@@ -0,0 +1,31 @@
+//@ compile-flags: -Znext-solver
+
+// The `cause` in `Obligation` is ignored by type folders. So infer vars in cause code is not
+// fudged.
+// Check the comments of
+// `leaking-vars-in-cause-code-1.rs` for more details.
+trait Trait<T> {}
+struct A<T>(T);
+struct B<T>(T);
+
+trait IncompleteGuidance {}
+
+impl<T> Trait<()> for A<T>
+where
+    T: IncompleteGuidance,
+{
+}
+
+impl<T, U> Trait<()> for B<T>
+//~^ ERROR: the type parameter `U` is not constrained by the impl trait, self type, or predicates
+where
+    A<T>: Trait<U>,
+{
+}
+
+fn impls_trait<T: Trait<()>>() {}
+
+fn main() {
+    impls_trait::<B<()>>();
+    //~^ ERROR: the trait bound `(): IncompleteGuidance` is not satisfied
+}
diff --git a/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-2.stderr b/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-2.stderr
new file mode 100644
index 0000000..9e8194b
--- /dev/null
+++ b/tests/ui/traits/error-reporting/leaking-vars-in-cause-code-2.stderr
@@ -0,0 +1,37 @@
+error[E0207]: the type parameter `U` is not constrained by the impl trait, self type, or predicates
+  --> $DIR/leaking-vars-in-cause-code-2.rs:19:9
+   |
+LL | impl<T, U> Trait<()> for B<T>
+   |         ^ unconstrained type parameter
+
+error[E0277]: the trait bound `(): IncompleteGuidance` is not satisfied
+  --> $DIR/leaking-vars-in-cause-code-2.rs:29:19
+   |
+LL |     impls_trait::<B<()>>();
+   |                   ^^^^^ the trait `IncompleteGuidance` is not implemented for `()`
+   |
+help: this trait has no implementations, consider adding one
+  --> $DIR/leaking-vars-in-cause-code-2.rs:11:1
+   |
+LL | trait IncompleteGuidance {}
+   | ^^^^^^^^^^^^^^^^^^^^^^^^
+note: required for `A<()>` to implement `Trait<()>`
+  --> $DIR/leaking-vars-in-cause-code-2.rs:13:9
+   |
+LL | impl<T> Trait<()> for A<T>
+   |         ^^^^^^^^^     ^^^^
+LL | where
+LL |     T: IncompleteGuidance,
+   |        ------------------ unsatisfied trait bound introduced here
+   = note: 1 redundant requirement hidden
+   = note: required for `B<()>` to implement `Trait<()>`
+note: required by a bound in `impls_trait`
+  --> $DIR/leaking-vars-in-cause-code-2.rs:26:19
+   |
+LL | fn impls_trait<T: Trait<()>>() {}
+   |                   ^^^^^^^^^ required by this bound in `impls_trait`
+
+error: aborting due to 2 previous errors
+
+Some errors have detailed explanations: E0207, E0277.
+For more information about an error, try `rustc --explain E0207`.
diff --git a/tests/ui/use/use-path-segment-kw.rs b/tests/ui/use/use-path-segment-kw.rs
index 137a9e1..680ecd3 100644
--- a/tests/ui/use/use-path-segment-kw.rs
+++ b/tests/ui/use/use-path-segment-kw.rs
@@ -1,4 +1,4 @@
-//@ edition: 2021
+//@ edition: 2018..
 
 macro_rules! macro_dollar_crate {
     () => {