diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index b2fff77e3f162..b392a9623d050 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -540,7 +540,6 @@ impl<'a> TraitDef<'a> { .filter(|a| { a.has_any_name(&[ sym::allow, - sym::expect, sym::warn, sym::deny, sym::forbid, diff --git a/compiler/rustc_infer/src/infer/lexical_region_resolve/indexed_edges.rs b/compiler/rustc_infer/src/infer/lexical_region_resolve/indexed_edges.rs new file mode 100644 index 0000000000000..ffc6e54f3cb06 --- /dev/null +++ b/compiler/rustc_infer/src/infer/lexical_region_resolve/indexed_edges.rs @@ -0,0 +1,68 @@ +use rustc_index::IndexVec; +use rustc_type_ir::RegionVid; + +use crate::infer::SubregionOrigin; +use crate::infer::region_constraints::{Constraint, ConstraintKind, RegionConstraintData}; + +/// Selects either out-edges or in-edges for [`IndexedConstraintEdges::adjacent_edges`]. +#[derive(Clone, Copy, Debug)] +pub(super) enum EdgeDirection { + Out, + In, +} + +/// Type alias for the pairs stored in [`RegionConstraintData::constraints`], +/// which we are indexing. +type ConstraintPair<'tcx> = (Constraint<'tcx>, SubregionOrigin<'tcx>); + +/// An index from region variables to their corresponding constraint edges, +/// used on some error paths. +pub(super) struct IndexedConstraintEdges<'data, 'tcx> { + out_edges: IndexVec>>, + in_edges: IndexVec>>, +} + +impl<'data, 'tcx> IndexedConstraintEdges<'data, 'tcx> { + pub(super) fn build_index(num_vars: usize, data: &'data RegionConstraintData<'tcx>) -> Self { + let mut out_edges = IndexVec::from_fn_n(|_| vec![], num_vars); + let mut in_edges = IndexVec::from_fn_n(|_| vec![], num_vars); + + for pair @ (c, _) in &data.constraints { + // Only push a var out-edge for `VarSub...` constraints. + match c.kind { + ConstraintKind::VarSubVar | ConstraintKind::VarSubReg => { + out_edges[c.sub.as_var()].push(pair) + } + ConstraintKind::RegSubVar | ConstraintKind::RegSubReg => {} + } + } + + // Index in-edges in reverse order, to match what current tests expect. + // (It's unclear whether this is important or not.) + for pair @ (c, _) in data.constraints.iter().rev() { + // Only push a var in-edge for `...SubVar` constraints. + match c.kind { + ConstraintKind::VarSubVar | ConstraintKind::RegSubVar => { + in_edges[c.sup.as_var()].push(pair) + } + ConstraintKind::VarSubReg | ConstraintKind::RegSubReg => {} + } + } + + IndexedConstraintEdges { out_edges, in_edges } + } + + /// Returns either the out-edges or in-edges of the specified region var, + /// as selected by `dir`. + pub(super) fn adjacent_edges( + &self, + region_vid: RegionVid, + dir: EdgeDirection, + ) -> &[&'data ConstraintPair<'tcx>] { + let edges = match dir { + EdgeDirection::Out => &self.out_edges, + EdgeDirection::In => &self.in_edges, + }; + &edges[region_vid] + } +} diff --git a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs index 5134b7b7ca8f1..e99dcd1ef15cb 100644 --- a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs +++ b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs @@ -3,9 +3,6 @@ use std::fmt; use rustc_data_structures::fx::FxHashSet; -use rustc_data_structures::graph::linked_graph::{ - Direction, INCOMING, LinkedGraph, NodeIndex, OUTGOING, -}; use rustc_data_structures::intern::Interned; use rustc_data_structures::unord::UnordSet; use rustc_index::{IndexSlice, IndexVec}; @@ -18,11 +15,14 @@ use rustc_span::Span; use tracing::{debug, instrument}; use super::outlives::test_type_match; +use crate::infer::lexical_region_resolve::indexed_edges::{EdgeDirection, IndexedConstraintEdges}; use crate::infer::region_constraints::{ - Constraint, ConstraintKind, GenericKind, RegionConstraintData, VarInfos, VerifyBound, + ConstraintKind, GenericKind, RegionConstraintData, VarInfos, VerifyBound, }; use crate::infer::{RegionRelations, RegionVariableOrigin, SubregionOrigin}; +mod indexed_edges; + /// This function performs lexical region resolution given a complete /// set of constraints and variable origins. It performs a fixed-point /// iteration to find region values which satisfy all constraints, @@ -118,8 +118,6 @@ struct RegionAndOrigin<'tcx> { origin: SubregionOrigin<'tcx>, } -type RegionGraph<'tcx> = LinkedGraph<(), Constraint<'tcx>>; - struct LexicalResolver<'cx, 'tcx> { region_rels: &'cx RegionRelations<'cx, 'tcx>, var_infos: VarInfos<'tcx>, @@ -626,9 +624,8 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // overlapping locations. let mut dup_vec = IndexVec::from_elem_n(None, self.num_vars()); - // Only construct the graph when necessary, because it's moderately - // expensive. - let mut graph = None; + // Only construct the edge index when necessary, because it's moderately expensive. + let mut edges: Option> = None; for (node_vid, value) in var_data.values.iter_enumerated() { match *value { @@ -662,56 +659,18 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // influence the constraints on this value for // richer diagnostics in `static_impl_trait`. - let g = graph.get_or_insert_with(|| self.construct_graph()); - self.collect_error_for_expanding_node(g, &mut dup_vec, node_vid, errors); - } - } - } - } - - fn construct_graph(&self) -> RegionGraph<'tcx> { - let num_vars = self.num_vars(); - - let mut graph = LinkedGraph::new(); - - for _ in 0..num_vars { - graph.add_node(()); - } - - // Issue #30438: two distinct dummy nodes, one for incoming - // edges (dummy_source) and another for outgoing edges - // (dummy_sink). In `dummy -> a -> b -> dummy`, using one - // dummy node leads one to think (erroneously) there exists a - // path from `b` to `a`. Two dummy nodes sidesteps the issue. - let dummy_source = graph.add_node(()); - let dummy_sink = graph.add_node(()); - - for (c, _) in &self.data.constraints { - match c.kind { - ConstraintKind::VarSubVar => { - let sub_vid = c.sub.as_var(); - let sup_vid = c.sup.as_var(); - graph.add_edge(NodeIndex(sub_vid.index()), NodeIndex(sup_vid.index()), *c); - } - ConstraintKind::RegSubVar => { - graph.add_edge(dummy_source, NodeIndex(c.sup.as_var().index()), *c); - } - ConstraintKind::VarSubReg => { - graph.add_edge(NodeIndex(c.sub.as_var().index()), dummy_sink, *c); - } - ConstraintKind::RegSubReg => { - // this would be an edge from `dummy_source` to - // `dummy_sink`; just ignore it. + let e = edges.get_or_insert_with(|| { + IndexedConstraintEdges::build_index(self.num_vars(), &self.data) + }); + self.collect_error_for_expanding_node(e, &mut dup_vec, node_vid, errors); } } } - - graph } fn collect_error_for_expanding_node( &self, - graph: &RegionGraph<'tcx>, + edges: &IndexedConstraintEdges<'_, 'tcx>, dup_vec: &mut IndexSlice>, node_idx: RegionVid, errors: &mut Vec>, @@ -719,9 +678,9 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // Errors in expanding nodes result from a lower-bound that is // not contained by an upper-bound. let (mut lower_bounds, lower_vid_bounds, lower_dup) = - self.collect_bounding_regions(graph, node_idx, INCOMING, Some(dup_vec)); + self.collect_bounding_regions(edges, node_idx, EdgeDirection::In, Some(dup_vec)); let (mut upper_bounds, _, upper_dup) = - self.collect_bounding_regions(graph, node_idx, OUTGOING, Some(dup_vec)); + self.collect_bounding_regions(edges, node_idx, EdgeDirection::Out, Some(dup_vec)); if lower_dup || upper_dup { return; @@ -829,9 +788,9 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { /// those returned by a previous call for another region. fn collect_bounding_regions( &self, - graph: &RegionGraph<'tcx>, + edges: &IndexedConstraintEdges<'_, 'tcx>, orig_node_idx: RegionVid, - dir: Direction, + dir: EdgeDirection, mut dup_vec: Option<&mut IndexSlice>>, ) -> (Vec>, FxHashSet, bool) { struct WalkState<'tcx> { @@ -850,7 +809,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // to start off the process, walk the source node in the // direction specified - process_edges(&self.data, &mut state, graph, orig_node_idx, dir); + process_edges(&mut state, edges, orig_node_idx, dir); while let Some(node_idx) = state.stack.pop() { // check whether we've visited this node on some previous walk @@ -867,30 +826,25 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { ); } - process_edges(&self.data, &mut state, graph, node_idx, dir); + process_edges(&mut state, edges, node_idx, dir); } let WalkState { result, dup_found, set, .. } = state; return (result, set, dup_found); fn process_edges<'tcx>( - this: &RegionConstraintData<'tcx>, state: &mut WalkState<'tcx>, - graph: &RegionGraph<'tcx>, + edges: &IndexedConstraintEdges<'_, 'tcx>, source_vid: RegionVid, - dir: Direction, + dir: EdgeDirection, ) { debug!("process_edges(source_vid={:?}, dir={:?})", source_vid, dir); - let source_node_index = NodeIndex(source_vid.index()); - for (_, edge) in graph.adjacent_edges(source_node_index, dir) { - let get_origin = - || this.constraints.iter().find(|(c, _)| *c == edge.data).unwrap().1.clone(); - - match edge.data.kind { + for (c, origin) in edges.adjacent_edges(source_vid, dir) { + match c.kind { ConstraintKind::VarSubVar => { - let from_vid = edge.data.sub.as_var(); - let to_vid = edge.data.sup.as_var(); + let from_vid = c.sub.as_var(); + let to_vid = c.sup.as_var(); let opp_vid = if from_vid == source_vid { to_vid } else { from_vid }; if state.set.insert(opp_vid) { state.stack.push(opp_vid); @@ -898,13 +852,13 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } ConstraintKind::RegSubVar => { - let origin = get_origin(); - state.result.push(RegionAndOrigin { region: edge.data.sub, origin }); + let origin = origin.clone(); + state.result.push(RegionAndOrigin { region: c.sub, origin }); } ConstraintKind::VarSubReg => { - let origin = get_origin(); - state.result.push(RegionAndOrigin { region: edge.data.sup, origin }); + let origin = origin.clone(); + state.result.push(RegionAndOrigin { region: c.sup, origin }); } ConstraintKind::RegSubReg => panic!( diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index f63351ebfd809..cec41524325e0 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -289,7 +289,6 @@ struct RcInner { } /// Calculate layout for `RcInner` using the inner value's layout -#[inline] fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout { // Calculate layout using the given value layout. // Previously, layout was calculated on the expression @@ -2519,25 +2518,15 @@ impl Default for Rc { /// ``` #[inline] fn default() -> Self { - // First create an uninitialized allocation before creating an instance - // of `T`. This avoids having `T` on the stack and avoids the need to - // codegen a call to the destructor for `T` leading to generally better - // codegen. See #131460 for some more details. - let mut rc = Rc::new_uninit(); - - // SAFETY: this is a freshly allocated `Rc` so it's guaranteed there are - // no other strong or weak pointers other than `rc` itself. unsafe { - let raw = Rc::get_mut_unchecked(&mut rc); - - // Note that `ptr::write` here is used specifically instead of - // `MaybeUninit::write` to avoid creating an extra stack copy of `T` - // in debug mode. See #136043 for more context. - ptr::write(raw.as_mut_ptr(), T::default()); + Self::from_inner( + Box::leak(Box::write( + Box::new_uninit(), + RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() }, + )) + .into(), + ) } - - // SAFETY: this allocation was just initialized above. - unsafe { rc.assume_init() } } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index d097588f8e633..dc82357dd146b 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -392,7 +392,6 @@ struct ArcInner { } /// Calculate layout for `ArcInner` using the inner value's layout -#[inline] fn arcinner_layout_for_value_layout(layout: Layout) -> Layout { // Calculate layout using the given value layout. // Previously, layout was calculated on the expression @@ -3725,25 +3724,19 @@ impl Default for Arc { /// assert_eq!(*x, 0); /// ``` fn default() -> Arc { - // First create an uninitialized allocation before creating an instance - // of `T`. This avoids having `T` on the stack and avoids the need to - // codegen a call to the destructor for `T` leading to generally better - // codegen. See #131460 for some more details. - let mut arc = Arc::new_uninit(); - - // SAFETY: this is a freshly allocated `Arc` so it's guaranteed there - // are no other strong or weak pointers other than `arc` itself. unsafe { - let raw = Arc::get_mut_unchecked(&mut arc); - - // Note that `ptr::write` here is used specifically instead of - // `MaybeUninit::write` to avoid creating an extra stack copy of `T` - // in debug mode. See #136043 for more context. - ptr::write(raw.as_mut_ptr(), T::default()); + Self::from_inner( + Box::leak(Box::write( + Box::new_uninit(), + ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + data: T::default(), + }, + )) + .into(), + ) } - - // SAFETY: this allocation was just initialized above. - unsafe { arc.assume_init() } } } diff --git a/library/panic_unwind/src/lib.rs b/library/panic_unwind/src/lib.rs index 5372c44cedf75..e89d5e60df62a 100644 --- a/library/panic_unwind/src/lib.rs +++ b/library/panic_unwind/src/lib.rs @@ -14,6 +14,7 @@ #![no_std] #![unstable(feature = "panic_unwind", issue = "32837")] #![doc(issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")] +#![cfg_attr(all(target_os = "emscripten", not(emscripten_wasm_eh)), lang_items)] #![feature(cfg_emscripten_wasm_eh)] #![feature(core_intrinsics)] #![feature(panic_unwind)] diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 2070a24f60c81..ce70478207a85 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -386,33 +386,32 @@ fn generate_macro_def_id_path( } else { ItemType::Macro }; - let mut path = clean::inline::get_item_path(tcx, def_id, item_type); - if path.len() < 2 { - // The minimum we can have is the crate name followed by the macro name. If shorter, then - // it means that `relative` was empty, which is an error. - debug!("macro path cannot be empty!"); + let path = clean::inline::get_item_path(tcx, def_id, item_type); + // The minimum we can have is the crate name followed by the macro name. If shorter, then + // it means that `relative` was empty, which is an error. + let [module_path @ .., last] = path.as_slice() else { + debug!("macro path is empty!"); + return Err(HrefError::NotInExternalCache); + }; + if module_path.is_empty() { + debug!("macro path too short: missing crate prefix (got 1 element, need at least 2)"); return Err(HrefError::NotInExternalCache); - } - - // FIXME: Try to use `iter().chain().once()` instead. - let mut prev = None; - if let Some(last) = path.pop() { - path.push(Symbol::intern(&format!("{}.{last}.html", item_type.as_str()))); - prev = Some(last); } let url = match cache.extern_locations[&def_id.krate] { ExternalLocation::Remote { ref url, is_absolute } => { let mut prefix = remote_url_prefix(url, is_absolute, cx.current.len()); - prefix.extend(path.iter().copied()); + prefix.extend(module_path.iter().copied()); + prefix.push_fmt(format_args!("{}.{last}.html", item_type.as_str())); prefix.finish() } ExternalLocation::Local => { // `root_path` always end with a `/`. format!( - "{root_path}{path}", + "{root_path}{path}/{item_type}.{last}.html", root_path = root_path.unwrap_or(""), - path = fmt::from_fn(|f| path.iter().joined("/", f)) + path = fmt::from_fn(|f| module_path.iter().joined("/", f)), + item_type = item_type.as_str(), ) } ExternalLocation::Unknown => { @@ -420,10 +419,6 @@ fn generate_macro_def_id_path( return Err(HrefError::NotInExternalCache); } }; - if let Some(prev) = prev { - path.pop(); - path.push(prev); - } Ok(HrefInfo { url, kind: item_type, rust_path: path }) } diff --git a/tests/codegen-llvm/issues/issue-111603.rs b/tests/codegen-llvm/issues/issue-111603.rs index 91eb836478eb1..2ba5a3f876aed 100644 --- a/tests/codegen-llvm/issues/issue-111603.rs +++ b/tests/codegen-llvm/issues/issue-111603.rs @@ -10,9 +10,9 @@ use std::sync::Arc; pub fn new_from_array(x: u64) -> Arc<[u64]> { // Ensure that we only generate one alloca for the array. - // CHECK: %[[A:.+]] = alloca + // CHECK: alloca // CHECK-SAME: [8000 x i8] - // CHECK-NOT: %[[B:.+]] = alloca + // CHECK-NOT: alloca let array = [x; 1000]; Arc::new(array) } @@ -20,9 +20,8 @@ pub fn new_from_array(x: u64) -> Arc<[u64]> { // CHECK-LABEL: @new_uninit #[no_mangle] pub fn new_uninit(x: u64) -> Arc<[u64; 1000]> { - // CHECK: %[[A:.+]] = alloca - // CHECK-SAME: [8000 x i8] - // CHECK-NOT: %[[B:.+]] = alloca + // CHECK: call alloc::sync::arcinner_layout_for_value_layout + // CHECK-NOT: call alloc::sync::arcinner_layout_for_value_layout let mut arc = Arc::new_uninit(); unsafe { Arc::get_mut_unchecked(&mut arc) }.write([x; 1000]); unsafe { arc.assume_init() } @@ -31,7 +30,8 @@ pub fn new_uninit(x: u64) -> Arc<[u64; 1000]> { // CHECK-LABEL: @new_uninit_slice #[no_mangle] pub fn new_uninit_slice(x: u64) -> Arc<[u64]> { - // CHECK-NOT: %[[B:.+]] = alloca + // CHECK: call alloc::sync::arcinner_layout_for_value_layout + // CHECK-NOT: call alloc::sync::arcinner_layout_for_value_layout let mut arc = Arc::new_uninit_slice(1000); for elem in unsafe { Arc::get_mut_unchecked(&mut arc) } { elem.write(x); diff --git a/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-3.rs b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-3.rs index 904366e6532e5..b5f06912a97fc 100644 --- a/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-3.rs +++ b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-3.rs @@ -1,3 +1,5 @@ +// FIXME: Bring back duplication of the `#[expect]` attribute when deriving. +// // Make sure we produce the unfulfilled expectation lint if neither the struct or the // derived code fulfilled it. @@ -5,7 +7,7 @@ #[expect(unexpected_cfgs)] //~^ WARN this lint expectation is unfulfilled -//~^^ WARN this lint expectation is unfulfilled +//FIXME ~^^ WARN this lint expectation is unfulfilled #[derive(Debug)] pub struct MyStruct { pub t_ref: i64, diff --git a/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-3.stderr b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-3.stderr index 6478ec435db07..e75483df3f78a 100644 --- a/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-3.stderr +++ b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-3.stderr @@ -1,18 +1,10 @@ warning: this lint expectation is unfulfilled - --> $DIR/derive-expect-issue-150553-3.rs:6:10 + --> $DIR/derive-expect-issue-150553-3.rs:8:10 | LL | #[expect(unexpected_cfgs)] | ^^^^^^^^^^^^^^^ | = note: `#[warn(unfulfilled_lint_expectations)]` on by default -warning: this lint expectation is unfulfilled - --> $DIR/derive-expect-issue-150553-3.rs:6:10 - | -LL | #[expect(unexpected_cfgs)] - | ^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 2 warnings emitted +warning: 1 warning emitted diff --git a/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-4.rs b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-4.rs new file mode 100644 index 0000000000000..fb0d508186f8b --- /dev/null +++ b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553-4.rs @@ -0,0 +1,15 @@ +// This test makes sure that expended items with derives don't interfear with lint expectations. +// +// See for some context. + +//@ check-pass + +#[derive(Clone, Debug)] +#[expect(unused)] +pub struct LoggingArgs { + #[cfg(false)] + x: i32, + y: i32, +} + +fn main() {} diff --git a/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553.rs b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553.rs index 1752835c8bb81..06ec71bb6c84d 100644 --- a/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553.rs +++ b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553.rs @@ -1,9 +1,11 @@ +// FIXME: Bring back duplication of the `#[expect]` attribute when deriving. +// // Make sure we properly copy the `#[expect]` attr to the derived code and that no // unfulfilled expectations are trigerred. // // See for rational. -//@ check-pass +//@ check-fail #![deny(redundant_lifetimes)] @@ -12,6 +14,7 @@ use std::fmt::Debug; #[derive(Debug)] #[expect(redundant_lifetimes)] pub struct RefWrapper<'a, T> +//~^ ERROR redundant_lifetimes where 'a: 'static, T: Debug, diff --git a/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553.stderr b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553.stderr new file mode 100644 index 0000000000000..c4047329d5c1f --- /dev/null +++ b/tests/ui/lint/rfc-2383-lint-reason/derive-expect-issue-150553.stderr @@ -0,0 +1,15 @@ +error: unnecessary lifetime parameter `'a` + --> $DIR/derive-expect-issue-150553.rs:16:23 + | +LL | pub struct RefWrapper<'a, T> + | ^^ + | + = note: you can use the `'static` lifetime directly, in place of `'a` +note: the lint level is defined here + --> $DIR/derive-expect-issue-150553.rs:10:9 + | +LL | #![deny(redundant_lifetimes)] + | ^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 1 previous error +