Skip to content

Commit 0f4ef00

Browse files
committed
Pass a Layout to oom
As discussed in #49668 (comment) and subsequent, there are use-cases where the OOM handler needs to know the size of the allocation that failed. The alignment might also be a cause for allocation failure, so providing it as well can be useful.
1 parent 5015fa3 commit 0f4ef00

File tree

10 files changed

+161
-112
lines changed

10 files changed

+161
-112
lines changed

src/liballoc/alloc.rs

+7-6
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
115115
if !ptr.is_null() {
116116
ptr as *mut u8
117117
} else {
118-
oom()
118+
oom(layout)
119119
}
120120
}
121121
}
@@ -134,12 +134,13 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
134134
}
135135

136136
#[rustc_allocator_nounwind]
137-
pub fn oom() -> ! {
138-
extern {
137+
pub fn oom(layout: Layout) -> ! {
138+
#[allow(improper_ctypes)]
139+
extern "Rust" {
139140
#[lang = "oom"]
140-
fn oom_impl() -> !;
141+
fn oom_impl(layout: Layout) -> !;
141142
}
142-
unsafe { oom_impl() }
143+
unsafe { oom_impl(layout) }
143144
}
144145

145146
#[cfg(test)]
@@ -154,7 +155,7 @@ mod tests {
154155
unsafe {
155156
let layout = Layout::from_size_align(1024, 1).unwrap();
156157
let ptr = Global.alloc_zeroed(layout.clone())
157-
.unwrap_or_else(|_| oom());
158+
.unwrap_or_else(|_| oom(layout));
158159

159160
let mut i = ptr.cast::<u8>().as_ptr();
160161
let end = i.offset(layout.size() as isize);

src/liballoc/arc.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -553,7 +553,7 @@ impl<T: ?Sized> Arc<T> {
553553
let layout = Layout::for_value(&*fake_ptr);
554554

555555
let mem = Global.alloc(layout)
556-
.unwrap_or_else(|_| oom());
556+
.unwrap_or_else(|_| oom(layout));
557557

558558
// Initialize the real ArcInner
559559
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;

src/liballoc/raw_vec.rs

+82-74
Original file line numberDiff line numberDiff line change
@@ -96,14 +96,15 @@ impl<T, A: Alloc> RawVec<T, A> {
9696
NonNull::<T>::dangling().as_opaque()
9797
} else {
9898
let align = mem::align_of::<T>();
99+
let layout = Layout::from_size_align(alloc_size, align).unwrap();
99100
let result = if zeroed {
100-
a.alloc_zeroed(Layout::from_size_align(alloc_size, align).unwrap())
101+
a.alloc_zeroed(layout)
101102
} else {
102-
a.alloc(Layout::from_size_align(alloc_size, align).unwrap())
103+
a.alloc(layout)
103104
};
104105
match result {
105106
Ok(ptr) => ptr,
106-
Err(_) => oom(),
107+
Err(_) => oom(layout),
107108
}
108109
};
109110

@@ -318,7 +319,7 @@ impl<T, A: Alloc> RawVec<T, A> {
318319
new_size);
319320
match ptr_res {
320321
Ok(ptr) => (new_cap, ptr.cast().into()),
321-
Err(_) => oom(),
322+
Err(_) => oom(Layout::from_size_align_unchecked(new_size, cur.align())),
322323
}
323324
}
324325
None => {
@@ -327,7 +328,7 @@ impl<T, A: Alloc> RawVec<T, A> {
327328
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
328329
match self.a.alloc_array::<T>(new_cap) {
329330
Ok(ptr) => (new_cap, ptr.into()),
330-
Err(_) => oom(),
331+
Err(_) => oom(Layout::array::<T>(new_cap).unwrap()),
331332
}
332333
}
333334
};
@@ -389,37 +390,7 @@ impl<T, A: Alloc> RawVec<T, A> {
389390
pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize)
390391
-> Result<(), CollectionAllocErr> {
391392

392-
unsafe {
393-
// NOTE: we don't early branch on ZSTs here because we want this
394-
// to actually catch "asking for more than usize::MAX" in that case.
395-
// If we make it past the first branch then we are guaranteed to
396-
// panic.
397-
398-
// Don't actually need any more capacity.
399-
// Wrapping in case they gave a bad `used_cap`.
400-
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
401-
return Ok(());
402-
}
403-
404-
// Nothing we can really do about these checks :(
405-
let new_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?;
406-
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
407-
408-
alloc_guard(new_layout.size())?;
409-
410-
let res = match self.current_layout() {
411-
Some(layout) => {
412-
debug_assert!(new_layout.align() == layout.align());
413-
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
414-
}
415-
None => self.a.alloc(new_layout),
416-
};
417-
418-
self.ptr = res?.cast().into();
419-
self.cap = new_cap;
420-
421-
Ok(())
422-
}
393+
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact)
423394
}
424395

425396
/// Ensures that the buffer contains at least enough space to hold
@@ -443,9 +414,9 @@ impl<T, A: Alloc> RawVec<T, A> {
443414
///
444415
/// Aborts on OOM
445416
pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
446-
match self.try_reserve_exact(used_cap, needed_extra_cap) {
417+
match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) {
447418
Err(CapacityOverflow) => capacity_overflow(),
448-
Err(AllocErr) => oom(),
419+
Err(AllocErr) => unreachable!(),
449420
Ok(()) => { /* yay */ }
450421
}
451422
}
@@ -467,37 +438,7 @@ impl<T, A: Alloc> RawVec<T, A> {
467438
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
468439
pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize)
469440
-> Result<(), CollectionAllocErr> {
470-
unsafe {
471-
// NOTE: we don't early branch on ZSTs here because we want this
472-
// to actually catch "asking for more than usize::MAX" in that case.
473-
// If we make it past the first branch then we are guaranteed to
474-
// panic.
475-
476-
// Don't actually need any more capacity.
477-
// Wrapping in case they give a bad `used_cap`
478-
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
479-
return Ok(());
480-
}
481-
482-
let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)?;
483-
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
484-
485-
// FIXME: may crash and burn on over-reserve
486-
alloc_guard(new_layout.size())?;
487-
488-
let res = match self.current_layout() {
489-
Some(layout) => {
490-
debug_assert!(new_layout.align() == layout.align());
491-
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
492-
}
493-
None => self.a.alloc(new_layout),
494-
};
495-
496-
self.ptr = res?.cast().into();
497-
self.cap = new_cap;
498-
499-
Ok(())
500-
}
441+
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized)
501442
}
502443

503444
/// Ensures that the buffer contains at least enough space to hold
@@ -553,12 +494,12 @@ impl<T, A: Alloc> RawVec<T, A> {
553494
/// # }
554495
/// ```
555496
pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
556-
match self.try_reserve(used_cap, needed_extra_cap) {
497+
match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) {
557498
Err(CapacityOverflow) => capacity_overflow(),
558-
Err(AllocErr) => oom(),
499+
Err(AllocErr) => unreachable!(),
559500
Ok(()) => { /* yay */ }
560-
}
561-
}
501+
}
502+
}
562503
/// Attempts to ensure that the buffer contains at least enough space to hold
563504
/// `used_cap + needed_extra_cap` elements. If it doesn't already have
564505
/// enough capacity, will reallocate in place enough space plus comfortable slack
@@ -670,14 +611,81 @@ impl<T, A: Alloc> RawVec<T, A> {
670611
old_layout,
671612
new_size) {
672613
Ok(p) => self.ptr = p.cast().into(),
673-
Err(_) => oom(),
614+
Err(_) => oom(Layout::from_size_align_unchecked(new_size, align)),
674615
}
675616
}
676617
self.cap = amount;
677618
}
678619
}
679620
}
680621

622+
enum Fallibility {
623+
Fallible,
624+
Infallible,
625+
}
626+
627+
use self::Fallibility::*;
628+
629+
enum ReserveStrategy {
630+
Exact,
631+
Amortized,
632+
}
633+
634+
use self::ReserveStrategy::*;
635+
636+
impl<T, A: Alloc> RawVec<T, A> {
637+
fn reserve_internal(
638+
&mut self,
639+
used_cap: usize,
640+
needed_extra_cap: usize,
641+
fallibility: Fallibility,
642+
strategy: ReserveStrategy,
643+
) -> Result<(), CollectionAllocErr> {
644+
unsafe {
645+
use alloc::AllocErr;
646+
647+
// NOTE: we don't early branch on ZSTs here because we want this
648+
// to actually catch "asking for more than usize::MAX" in that case.
649+
// If we make it past the first branch then we are guaranteed to
650+
// panic.
651+
652+
// Don't actually need any more capacity.
653+
// Wrapping in case they gave a bad `used_cap`.
654+
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
655+
return Ok(());
656+
}
657+
658+
// Nothing we can really do about these checks :(
659+
let new_cap = match strategy {
660+
Exact => used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?,
661+
Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?,
662+
};
663+
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
664+
665+
alloc_guard(new_layout.size())?;
666+
667+
let res = match self.current_layout() {
668+
Some(layout) => {
669+
debug_assert!(new_layout.align() == layout.align());
670+
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
671+
}
672+
None => self.a.alloc(new_layout),
673+
};
674+
675+
match (&res, fallibility) {
676+
(Err(AllocErr), Infallible) => oom(new_layout),
677+
_ => {}
678+
}
679+
680+
self.ptr = res?.cast().into();
681+
self.cap = new_cap;
682+
683+
Ok(())
684+
}
685+
}
686+
687+
}
688+
681689
impl<T> RawVec<T, Global> {
682690
/// Converts the entire buffer into `Box<[T]>`.
683691
///

src/liballoc/rc.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -668,7 +668,7 @@ impl<T: ?Sized> Rc<T> {
668668
let layout = Layout::for_value(&*fake_ptr);
669669

670670
let mem = Global.alloc(layout)
671-
.unwrap_or_else(|_| oom());
671+
.unwrap_or_else(|_| oom(layout));
672672

673673
// Initialize the real RcBox
674674
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox<T>;

src/libstd/alloc.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,14 @@
1313
#![unstable(issue = "32838", feature = "allocator_api")]
1414

1515
#[doc(inline)] #[allow(deprecated)] pub use alloc_crate::alloc::Heap;
16-
#[doc(inline)] pub use alloc_crate::alloc::{Global, oom};
16+
#[doc(inline)] pub use alloc_crate::alloc::{Global, Layout, oom};
1717
#[doc(inline)] pub use alloc_system::System;
1818
#[doc(inline)] pub use core::alloc::*;
1919

2020
#[cfg(not(test))]
2121
#[doc(hidden)]
2222
#[lang = "oom"]
23-
pub extern fn rust_oom() -> ! {
23+
pub extern fn rust_oom(_: Layout) -> ! {
2424
rtabort!("memory allocation failed");
2525
}
2626

src/libstd/collections/hash/map.rs

+29-10
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
use self::Entry::*;
1212
use self::VacantEntryState::*;
1313

14-
use alloc::{CollectionAllocErr, oom};
14+
use alloc::CollectionAllocErr;
1515
use cell::Cell;
1616
use borrow::Borrow;
1717
use cmp::max;
@@ -23,8 +23,10 @@ use mem::{self, replace};
2323
use ops::{Deref, Index};
2424
use sys;
2525

26-
use super::table::{self, Bucket, EmptyBucket, FullBucket, FullBucketMut, RawTable, SafeHash};
26+
use super::table::{self, Bucket, EmptyBucket, Fallibility, FullBucket, FullBucketMut, RawTable,
27+
SafeHash};
2728
use super::table::BucketState::{Empty, Full};
29+
use super::table::Fallibility::{Fallible, Infallible};
2830

2931
const MIN_NONZERO_RAW_CAPACITY: usize = 32; // must be a power of two
3032

@@ -783,11 +785,11 @@ impl<K, V, S> HashMap<K, V, S>
783785
/// ```
784786
#[stable(feature = "rust1", since = "1.0.0")]
785787
pub fn reserve(&mut self, additional: usize) {
786-
match self.try_reserve(additional) {
788+
match self.reserve_internal(additional, Infallible) {
787789
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
788-
Err(CollectionAllocErr::AllocErr) => oom(),
790+
Err(CollectionAllocErr::AllocErr) => unreachable!(),
789791
Ok(()) => { /* yay */ }
790-
}
792+
}
791793
}
792794

793795
/// Tries to reserve capacity for at least `additional` more elements to be inserted
@@ -809,17 +811,24 @@ impl<K, V, S> HashMap<K, V, S>
809811
/// ```
810812
#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
811813
pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
814+
self.reserve_internal(additional, Fallible)
815+
}
816+
817+
fn reserve_internal(&mut self, additional: usize, fallibility: Fallibility)
818+
-> Result<(), CollectionAllocErr> {
819+
812820
let remaining = self.capacity() - self.len(); // this can't overflow
813821
if remaining < additional {
814-
let min_cap = self.len().checked_add(additional)
822+
let min_cap = self.len()
823+
.checked_add(additional)
815824
.ok_or(CollectionAllocErr::CapacityOverflow)?;
816825
let raw_cap = self.resize_policy.try_raw_capacity(min_cap)?;
817-
self.try_resize(raw_cap)?;
826+
self.try_resize(raw_cap, fallibility)?;
818827
} else if self.table.tag() && remaining <= self.len() {
819828
// Probe sequence is too long and table is half full,
820829
// resize early to reduce probing length.
821830
let new_capacity = self.table.capacity() * 2;
822-
self.try_resize(new_capacity)?;
831+
self.try_resize(new_capacity, fallibility)?;
823832
}
824833
Ok(())
825834
}
@@ -831,11 +840,21 @@ impl<K, V, S> HashMap<K, V, S>
831840
/// 2) Ensure `new_raw_cap` is a power of two or zero.
832841
#[inline(never)]
833842
#[cold]
834-
fn try_resize(&mut self, new_raw_cap: usize) -> Result<(), CollectionAllocErr> {
843+
fn try_resize(
844+
&mut self,
845+
new_raw_cap: usize,
846+
fallibility: Fallibility,
847+
) -> Result<(), CollectionAllocErr> {
835848
assert!(self.table.size() <= new_raw_cap);
836849
assert!(new_raw_cap.is_power_of_two() || new_raw_cap == 0);
837850

838-
let mut old_table = replace(&mut self.table, RawTable::try_new(new_raw_cap)?);
851+
let mut old_table = replace(
852+
&mut self.table,
853+
match fallibility {
854+
Infallible => RawTable::new(new_raw_cap),
855+
Fallible => RawTable::try_new(new_raw_cap)?,
856+
}
857+
);
839858
let old_size = old_table.size();
840859

841860
if old_table.size() == 0 {

0 commit comments

Comments
 (0)