diff --git a/library/alloc/src/raw_vec/mod.rs b/library/alloc/src/raw_vec/mod.rs index 1e76710d35364..6b07918632377 100644 --- a/library/alloc/src/raw_vec/mod.rs +++ b/library/alloc/src/raw_vec/mod.rs @@ -71,18 +71,21 @@ unsafe fn new_cap(cap: usize) -> Cap { /// `Box<[T]>`, since `capacity()` won't yield the length. #[allow(missing_debug_implementations)] pub(crate) struct RawVec { - inner: RawVecInner, + inner: RawVecInner, + alloc: A, _marker: PhantomData, } -/// Like a `RawVec`, but only generic over the allocator, not the type. +/// Like a `RawVec`, but not generic over the allocator. /// -/// As such, all the methods need the layout passed-in as a parameter. +/// As such, all the methods need the layout passed-in as a parameter, +/// and methods that need an allocator receive it as `&dyn Allocator`. /// /// Having this separation reduces the amount of code we need to monomorphize, /// as most operations don't need the actual type, just its layout. #[allow(missing_debug_implementations)] -struct RawVecInner { +#[derive(Clone, Copy)] +struct RawVecInner { ptr: Unique, /// Never used for ZSTs; it's `capacity()`'s responsibility to return usize::MAX in that case. /// @@ -90,7 +93,6 @@ struct RawVecInner { /// /// `cap` must be in the `0..=isize::MAX` range. cap: Cap, - alloc: A, } impl RawVec { @@ -123,7 +125,7 @@ impl RawVec { #[must_use] #[inline] pub(crate) fn with_capacity(capacity: usize) -> Self { - Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData } + Self::with_capacity_in(capacity, Global) } /// Like `with_capacity`, but guarantees the buffer is zeroed. @@ -131,22 +133,7 @@ impl RawVec { #[must_use] #[inline] pub(crate) fn with_capacity_zeroed(capacity: usize) -> Self { - Self { - inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT), - _marker: PhantomData, - } - } -} - -impl RawVecInner { - #[cfg(not(any(no_global_oom_handling, test)))] - #[must_use] - #[inline] - fn with_capacity(capacity: usize, elem_layout: Layout) -> Self { - match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global, elem_layout) { - Ok(res) => res, - Err(err) => handle_error(err), - } + Self::with_capacity_zeroed_in(capacity, Global) } } @@ -174,22 +161,52 @@ const impl RawVec { #[inline] pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self { Self { - inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT), + inner: RawVecInner::with_capacity_in(capacity, &alloc, T::LAYOUT), + alloc, _marker: PhantomData, } } +} + +struct CaptureLocally<'a, T> { + value: ManuallyDrop, + old: &'a mut T, +} + +impl<'a, T> Drop for CaptureLocally<'a, T> { + fn drop(&mut self) { + // SAFETY: We have sole ownership of `self.old` during the lifetime of + // this struct, so writing back to it is safe. + unsafe { + ptr::write(self.old, ManuallyDrop::take(&mut self.value)); + } + } +} + +impl<'a, T> CaptureLocally<'a, T> { + fn new(old: &'a mut T) -> Self { + // SAFETY: We are taking ownership of the value at `old`, given that we + // store the mut reference, we have sole ownership of it for the duration of + // this struct's lifetime. The Drop impl will write it back. + Self { value: ManuallyDrop::new(unsafe { ptr::read(old) }), old } + } +} +impl RawVec { /// A specialized version of `self.reserve(len, 1)` which requires the /// caller to ensure `len == self.capacity()`. #[cfg(not(no_global_oom_handling))] - #[inline(never)] + #[inline(always)] pub(crate) fn grow_one(&mut self) { + // Copy allocator to a temporary to prevent &self from escaping compiler analysis + // through the &dyn Allocator parameter, allowing LLVM to keep + // the Vec fields in registers. + let local_alloc = CaptureLocally::new(&mut self.alloc); + // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout - unsafe { self.inner.grow_one(T::LAYOUT) } + self.inner = self.inner.grow_one(&local_alloc, T::LAYOUT); } -} -impl RawVec { #[cfg(not(no_global_oom_handling))] pub(crate) const MIN_NON_ZERO_CAP: usize = min_non_zero_cap(size_of::()); @@ -199,15 +216,15 @@ impl RawVec { pub(crate) const fn new_in(alloc: A) -> Self { // Check assumption made in `current_memory` const { assert!(T::LAYOUT.size() % T::LAYOUT.align() == 0) }; - Self { inner: RawVecInner::new_in(alloc, Alignment::of::()), _marker: PhantomData } + Self { inner: RawVecInner::new(Alignment::of::()), alloc, _marker: PhantomData } } /// Like `try_with_capacity`, but parameterized over the choice of /// allocator for the returned `RawVec`. #[inline] pub(crate) fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { - match RawVecInner::try_with_capacity_in(capacity, alloc, T::LAYOUT) { - Ok(inner) => Ok(Self { inner, _marker: PhantomData }), + match RawVecInner::try_allocate_in(capacity, AllocInit::Uninitialized, &alloc, T::LAYOUT) { + Ok(inner) => Ok(Self { inner, alloc, _marker: PhantomData }), Err(e) => Err(e), } } @@ -217,9 +234,9 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] pub(crate) fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { - Self { - inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT), - _marker: PhantomData, + match RawVecInner::try_allocate_in(capacity, AllocInit::Zeroed, &alloc, T::LAYOUT) { + Ok(inner) => Self { inner, alloc, _marker: PhantomData }, + Err(err) => handle_error(err), } } @@ -245,7 +262,7 @@ impl RawVec { let me = ManuallyDrop::new(self); unsafe { let slice = ptr::slice_from_raw_parts_mut(me.ptr() as *mut MaybeUninit, len); - Box::from_raw_in(slice, ptr::read(&me.inner.alloc)) + Box::from_raw_in(slice, ptr::read(&me.alloc)) } } @@ -265,10 +282,7 @@ impl RawVec { unsafe { let ptr = ptr.cast(); let capacity = new_cap::(capacity); - Self { - inner: RawVecInner::from_raw_parts_in(ptr, capacity, alloc), - _marker: PhantomData, - } + Self { inner: RawVecInner::from_raw_parts(ptr, capacity), alloc, _marker: PhantomData } } } @@ -283,7 +297,7 @@ impl RawVec { unsafe { let ptr = ptr.cast(); let capacity = new_cap::(capacity); - Self { inner: RawVecInner::from_nonnull_in(ptr, capacity, alloc), _marker: PhantomData } + Self { inner: RawVecInner::from_nonnull(ptr, capacity), alloc, _marker: PhantomData } } } @@ -311,7 +325,7 @@ impl RawVec { /// Returns a shared reference to the allocator backing this `RawVec`. #[inline] pub(crate) fn allocator(&self) -> &A { - self.inner.allocator() + &self.alloc } /// Ensures that the buffer contains at least enough space to hold `len + @@ -337,7 +351,7 @@ impl RawVec { #[inline] pub(crate) fn reserve(&mut self, len: usize, additional: usize) { // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout - unsafe { self.inner.reserve(len, additional, T::LAYOUT) } + unsafe { self.inner.reserve(len, additional, T::LAYOUT, &self.alloc) } } /// The same as `reserve`, but returns on errors instead of panicking or aborting. @@ -347,7 +361,7 @@ impl RawVec { additional: usize, ) -> Result<(), TryReserveError> { // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout - unsafe { self.inner.try_reserve(len, additional, T::LAYOUT) } + unsafe { self.inner.try_reserve(len, additional, T::LAYOUT, &self.alloc) } } /// Ensures that the buffer contains at least enough space to hold `len + @@ -370,7 +384,7 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] pub(crate) fn reserve_exact(&mut self, len: usize, additional: usize) { // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout - unsafe { self.inner.reserve_exact(len, additional, T::LAYOUT) } + unsafe { self.inner.reserve_exact(len, additional, T::LAYOUT, &self.alloc) } } /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. @@ -380,7 +394,7 @@ impl RawVec { additional: usize, ) -> Result<(), TryReserveError> { // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout - unsafe { self.inner.try_reserve_exact(len, additional, T::LAYOUT) } + unsafe { self.inner.try_reserve_exact(len, additional, T::LAYOUT, &self.alloc) } } /// Shrinks the buffer down to the specified capacity. If the given amount @@ -396,41 +410,46 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] pub(crate) fn shrink_to_fit(&mut self, cap: usize) { - // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout - unsafe { self.inner.shrink_to_fit(cap, T::LAYOUT) } + if let Err(err) = unsafe { self.inner.shrink(cap, T::LAYOUT, &self.alloc) } { + handle_error(err); + } } } unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. + #[inline(always)] fn drop(&mut self) { - // SAFETY: We are in a Drop impl, self.inner will not be used again. - unsafe { self.inner.deallocate(T::LAYOUT) } + // SAFETY: We are in a Drop impl, self.inner and self.alloc will not be used again. + // Copy to temporaries to prevent &self from escaping. + let inner = self.inner; + unsafe { inner.deallocate(T::LAYOUT, &self.alloc) } } } #[rustc_const_unstable(feature = "const_heap", issue = "79597")] #[rustfmt::skip] // FIXME(fee1-dead): temporary measure before rustfmt is bumped -const impl RawVecInner { +const impl RawVecInner { #[cfg(not(no_global_oom_handling))] #[inline] - fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self { - match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) { - Ok(this) => { + pub(crate) fn with_capacity_in(capacity: usize, alloc: &A, elem_layout: Layout) -> Self { + match RawVecInner::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) { + Ok(inner) => { unsafe { // Make it more obvious that a subsequent Vec::reserve(capacity) will not allocate. - hint::assert_unchecked(!this.needs_to_grow(0, capacity, elem_layout)); + hint::assert_unchecked(!inner.needs_to_grow(0, capacity, elem_layout)); } - this + inner } Err(err) => handle_error(err), } } - fn try_allocate_in( + #[inline] + fn try_allocate_in( capacity: usize, init: AllocInit, - alloc: A, + alloc: &A, elem_layout: Layout, ) -> Result { // We avoid `unwrap_or_else` here because it bloats the amount of @@ -442,7 +461,7 @@ const impl RawVecInner { // Don't allocate here because `Drop` will not deallocate when `capacity` is 0. if layout.size() == 0 { - return Ok(Self::new_in(alloc, elem_layout.alignment())); + return Ok(Self::new(elem_layout.alignment())); } let result = match init { @@ -461,21 +480,31 @@ const impl RawVecInner { Ok(Self { ptr: Unique::from(ptr.cast()), cap: unsafe { Cap::new_unchecked(capacity) }, - alloc, }) } +} + +impl RawVecInner { + #[inline] + const fn new(align: Alignment) -> Self { + let ptr = Unique::from_non_null(NonNull::without_provenance(align.as_nonzero())); + // `cap: 0` means "unallocated". zero-sized types are ignored. + Self { ptr, cap: ZERO_CAP } + } /// # Safety /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to /// initially construct `self` /// - `elem_layout`'s size must be a multiple of its alignment #[cfg(not(no_global_oom_handling))] - #[inline] - unsafe fn grow_one(&mut self, elem_layout: Layout) { + #[inline(never)] + fn grow_one(mut self, alloc: &dyn Allocator, elem_layout: Layout) -> Self { // SAFETY: Precondition passed to caller - if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } { + if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout, alloc) } + { handle_error(err); } + self } /// # Safety @@ -488,6 +517,7 @@ const impl RawVecInner { len: usize, additional: usize, elem_layout: Layout, + alloc: &dyn Allocator, ) -> Result<(), TryReserveError> { // This is ensured by the calling contexts. debug_assert!(additional > 0); @@ -509,7 +539,7 @@ const impl RawVecInner { // SAFETY: // - cap >= len + additional // - other preconditions passed to caller - let ptr = unsafe { self.finish_grow(cap, elem_layout)? }; + let ptr = unsafe { self.finish_grow(cap, elem_layout, alloc)? }; // SAFETY: `finish_grow` would have failed if `cap > isize::MAX` unsafe { self.set_ptr_and_cap(ptr, cap) }; @@ -528,6 +558,7 @@ const impl RawVecInner { &self, cap: usize, elem_layout: Layout, + alloc: &dyn Allocator, ) -> Result, TryReserveError> { let new_layout = layout_array(cap, elem_layout)?; @@ -537,10 +568,10 @@ const impl RawVecInner { unsafe { // The allocator checks for alignment equality hint::assert_unchecked(old_layout.align() == new_layout.align()); - self.alloc.grow(ptr, old_layout, new_layout) + alloc.grow(ptr, old_layout, new_layout) } } else { - self.alloc.allocate(new_layout) + alloc.allocate(new_layout) }; // FIXME(const-hack): switch back to `map_err` @@ -549,42 +580,15 @@ const impl RawVecInner { Err(_) => Err(AllocError { layout: new_layout, non_exhaustive: () }.into()), } } -} - -impl RawVecInner { - #[inline] - const fn new_in(alloc: A, align: Alignment) -> Self { - let ptr = Unique::from_non_null(NonNull::without_provenance(align.as_nonzero())); - // `cap: 0` means "unallocated". zero-sized types are ignored. - Self { ptr, cap: ZERO_CAP, alloc } - } - - #[inline] - fn try_with_capacity_in( - capacity: usize, - alloc: A, - elem_layout: Layout, - ) -> Result { - Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) - } - - #[cfg(not(no_global_oom_handling))] - #[inline] - fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self { - match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) { - Ok(res) => res, - Err(err) => handle_error(err), - } - } #[inline] - unsafe fn from_raw_parts_in(ptr: *mut u8, cap: Cap, alloc: A) -> Self { - Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc } + unsafe fn from_raw_parts(ptr: *mut u8, cap: Cap) -> Self { + Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap } } #[inline] - unsafe fn from_nonnull_in(ptr: NonNull, cap: Cap, alloc: A) -> Self { - Self { ptr: Unique::from(ptr), cap, alloc } + unsafe fn from_nonnull(ptr: NonNull, cap: Cap) -> Self { + Self { ptr: Unique::from(ptr), cap } } #[inline] @@ -602,11 +606,6 @@ impl RawVecInner { if elem_size == 0 { usize::MAX } else { self.cap.as_inner() } } - #[inline] - fn allocator(&self) -> &A { - &self.alloc - } - /// # Safety /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to /// initially construct `self` @@ -635,27 +634,34 @@ impl RawVecInner { /// - `elem_layout`'s size must be a multiple of its alignment #[cfg(not(no_global_oom_handling))] #[inline] - unsafe fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) { + unsafe fn reserve( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + alloc: &dyn Allocator, + ) { // Callers expect this function to be very cheap when there is already sufficient capacity. // Therefore, we move all the resizing and error-handling logic from grow_amortized and // handle_reserve behind a call, while making sure that this function is likely to be // inlined as just a comparison and a call if the comparison fails. #[cold] - unsafe fn do_reserve_and_handle( - slf: &mut RawVecInner, + unsafe fn do_reserve_and_handle( + slf: &mut RawVecInner, len: usize, additional: usize, elem_layout: Layout, + alloc: &dyn Allocator, ) { // SAFETY: Precondition passed to caller - if let Err(err) = unsafe { slf.grow_amortized(len, additional, elem_layout) } { + if let Err(err) = unsafe { slf.grow_amortized(len, additional, elem_layout, alloc) } { handle_error(err); } } if self.needs_to_grow(len, additional, elem_layout) { unsafe { - do_reserve_and_handle(self, len, additional, elem_layout); + do_reserve_and_handle(self, len, additional, elem_layout, alloc); } } } @@ -669,11 +675,12 @@ impl RawVecInner { len: usize, additional: usize, elem_layout: Layout, + alloc: &dyn Allocator, ) -> Result<(), TryReserveError> { if self.needs_to_grow(len, additional, elem_layout) { // SAFETY: Precondition passed to caller unsafe { - self.grow_amortized(len, additional, elem_layout)?; + self.grow_amortized(len, additional, elem_layout, alloc)?; } } unsafe { @@ -688,9 +695,15 @@ impl RawVecInner { /// initially construct `self` /// - `elem_layout`'s size must be a multiple of its alignment #[cfg(not(no_global_oom_handling))] - unsafe fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) { + unsafe fn reserve_exact( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + alloc: &dyn Allocator, + ) { // SAFETY: Precondition passed to caller - if let Err(err) = unsafe { self.try_reserve_exact(len, additional, elem_layout) } { + if let Err(err) = unsafe { self.try_reserve_exact(len, additional, elem_layout, alloc) } { handle_error(err); } } @@ -704,11 +717,12 @@ impl RawVecInner { len: usize, additional: usize, elem_layout: Layout, + alloc: &dyn Allocator, ) -> Result<(), TryReserveError> { if self.needs_to_grow(len, additional, elem_layout) { // SAFETY: Precondition passed to caller unsafe { - self.grow_exact(len, additional, elem_layout)?; + self.grow_exact(len, additional, elem_layout, alloc)?; } } unsafe { @@ -718,19 +732,6 @@ impl RawVecInner { Ok(()) } - /// # Safety - /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to - /// initially construct `self` - /// - `elem_layout`'s size must be a multiple of its alignment - /// - `cap` must be less than or equal to `self.capacity(elem_layout.size())` - #[cfg(not(no_global_oom_handling))] - #[inline] - unsafe fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) { - if let Err(err) = unsafe { self.shrink(cap, elem_layout) } { - handle_error(err); - } - } - #[inline] const fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool { additional > self.capacity(elem_layout.size()).wrapping_sub(len) @@ -756,6 +757,7 @@ impl RawVecInner { len: usize, additional: usize, elem_layout: Layout, + alloc: &dyn Allocator, ) -> Result<(), TryReserveError> { if elem_layout.size() == 0 { // Since we return a capacity of `usize::MAX` when the type size is @@ -766,7 +768,7 @@ impl RawVecInner { let cap = len.checked_add(additional).ok_or(CapacityOverflow)?; // SAFETY: preconditions passed to caller - let ptr = unsafe { self.finish_grow(cap, elem_layout)? }; + let ptr = unsafe { self.finish_grow(cap, elem_layout, alloc)? }; // SAFETY: `finish_grow` would have failed if `cap > isize::MAX` unsafe { self.set_ptr_and_cap(ptr, cap) }; @@ -780,10 +782,15 @@ impl RawVecInner { /// - `cap` must be less than or equal to `self.capacity(elem_layout.size())` #[cfg(not(no_global_oom_handling))] #[inline] - unsafe fn shrink(&mut self, cap: usize, elem_layout: Layout) -> Result<(), TryReserveError> { + unsafe fn shrink( + &mut self, + cap: usize, + elem_layout: Layout, + alloc: &dyn Allocator, + ) -> Result<(), TryReserveError> { assert!(cap <= self.capacity(elem_layout.size()), "Tried to shrink to a larger capacity"); // SAFETY: Just checked this isn't trying to grow - unsafe { self.shrink_unchecked(cap, elem_layout) } + unsafe { self.shrink_unchecked(cap, elem_layout, alloc) } } /// `shrink`, but without the capacity check. @@ -801,6 +808,7 @@ impl RawVecInner { &mut self, cap: usize, elem_layout: Layout, + alloc: &dyn Allocator, ) -> Result<(), TryReserveError> { // SAFETY: Precondition passed to caller let Some((ptr, layout)) = (unsafe { self.current_memory(elem_layout) }) else { @@ -811,7 +819,7 @@ impl RawVecInner { // for the T::IS_ZST case since current_memory() will have returned // None. if cap == 0 { - unsafe { self.alloc.deallocate(ptr, layout) }; + unsafe { alloc.deallocate(ptr, layout) }; self.ptr = unsafe { Unique::new_unchecked(ptr::without_provenance_mut(elem_layout.align())) }; self.cap = ZERO_CAP; @@ -821,7 +829,7 @@ impl RawVecInner { // overflowed earlier when capacity was larger. let new_size = elem_layout.size().unchecked_mul(cap); let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - self.alloc + alloc .shrink(ptr, layout, new_layout) .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })? }; @@ -833,18 +841,16 @@ impl RawVecInner { Ok(()) } - /// # Safety + /// # Safety: this should only be called once for a given `RawVecInner`. After this function any copies of + /// this `RawVecInner` are invalidated. /// - /// This function deallocates the owned allocation, but does not update `ptr` or `cap` to - /// prevent double-free or use-after-free. Essentially, do not do anything with the caller - /// after this function returns. - /// Ideally this function would take `self` by move, but it cannot because it exists to be - /// called from a `Drop` impl. - unsafe fn deallocate(&mut self, elem_layout: Layout) { + /// This function deallocates the owned allocation. + #[inline] + unsafe fn deallocate(self, elem_layout: Layout, alloc: &dyn Allocator) { // SAFETY: Precondition passed to caller if let Some((ptr, layout)) = unsafe { self.current_memory(elem_layout) } { unsafe { - self.alloc.deallocate(ptr, layout); + alloc.deallocate(ptr, layout); } } } diff --git a/library/alloc/src/raw_vec/tests.rs b/library/alloc/src/raw_vec/tests.rs index 15f48c03dc54c..d80d66c832fa3 100644 --- a/library/alloc/src/raw_vec/tests.rs +++ b/library/alloc/src/raw_vec/tests.rs @@ -42,9 +42,9 @@ fn allocator_param() { let a = BoundedAlloc { fuel: Cell::new(500) }; let mut v: RawVec = RawVec::with_capacity_in(50, a); - assert_eq!(v.inner.alloc.fuel.get(), 450); + assert_eq!(v.allocator().fuel.get(), 450); v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel) - assert_eq!(v.inner.alloc.fuel.get(), 250); + assert_eq!(v.allocator().fuel.get(), 250); } #[test] @@ -126,12 +126,18 @@ fn zst() { assert_eq!(v.try_reserve_exact(101, usize::MAX - 100), cap_err); zst_sanity(&v); - assert_eq!(unsafe { v.inner.grow_amortized(100, usize::MAX - 100, ZST::LAYOUT) }, cap_err); - assert_eq!(unsafe { v.inner.grow_amortized(101, usize::MAX - 100, ZST::LAYOUT) }, cap_err); + assert_eq!( + unsafe { v.inner.grow_amortized(100, usize::MAX - 100, ZST::LAYOUT, &Global) }, + cap_err + ); + assert_eq!( + unsafe { v.inner.grow_amortized(101, usize::MAX - 100, ZST::LAYOUT, &Global) }, + cap_err + ); zst_sanity(&v); - assert_eq!(unsafe { v.inner.grow_exact(100, usize::MAX - 100, ZST::LAYOUT) }, cap_err); - assert_eq!(unsafe { v.inner.grow_exact(101, usize::MAX - 100, ZST::LAYOUT) }, cap_err); + assert_eq!(unsafe { v.inner.grow_exact(100, usize::MAX - 100, ZST::LAYOUT, &Global) }, cap_err); + assert_eq!(unsafe { v.inner.grow_exact(101, usize::MAX - 100, ZST::LAYOUT, &Global) }, cap_err); zst_sanity(&v); } diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 379e964f0a0ca..80fb27c2843aa 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -966,7 +966,10 @@ const impl Vec { pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 } } +} +#[cfg(not(no_global_oom_handling))] +impl Vec { /// Appends an element to the back of a collection. /// /// # Panics diff --git a/library/alloctests/benches/vec.rs b/library/alloctests/benches/vec.rs index 1dab71fa1f4f4..0383cc1661c5d 100644 --- a/library/alloctests/benches/vec.rs +++ b/library/alloctests/benches/vec.rs @@ -1,8 +1,60 @@ use std::iter::repeat; +use std::mem::ManuallyDrop; +use std::ptr; use rand::RngCore; use test::{Bencher, black_box}; +fn do_bench_push(b: &mut Bencher, n: usize) { + b.iter(|| { + let mut v = Vec::new(); + for i in 0..n { + v.push(i); + } + v + }); +} + +#[bench] +fn bench_push_0100(b: &mut Bencher) { + do_bench_push(b, 100); +} + +#[bench] +fn bench_push_1000(b: &mut Bencher) { + do_bench_push(b, 1000); +} + +#[bench] +fn bench_push_10000(b: &mut Bencher) { + do_bench_push(b, 10000); +} + +fn do_bench_push_preallocated(b: &mut Bencher, n: usize) { + b.iter(|| { + let mut v = Vec::with_capacity(n); + for i in 0..n { + v.push(i); + } + black_box(v.as_slice()); + }); +} + +#[bench] +fn bench_push_preallocated_0100(b: &mut Bencher) { + do_bench_push_preallocated(b, 100); +} + +#[bench] +fn bench_push_preallocated_1000(b: &mut Bencher) { + do_bench_push_preallocated(b, 1000); +} + +#[bench] +fn bench_push_preallocated_10000(b: &mut Bencher) { + do_bench_push_preallocated(b, 10000); +} + #[bench] fn bench_new(b: &mut Bencher) { b.iter(|| Vec::::new())