1010
1111use core:: ptr:: { self , Unique } ;
1212use core:: mem;
13+ use core:: nonzero;
1314use core:: slice:: { self , SliceExt } ;
14- use core_alloc:: Allocator ;
15+ use core_alloc:: { Allocator , Kind } ;
1516use super :: heap;
1617use super :: heap:: Allocator as HeapAllocator ;
17- use super :: oom;
1818use super :: boxed:: Box ;
1919use core:: ops:: Drop ;
2020use core:: cmp;
@@ -115,24 +115,19 @@ impl<T, A> RawVec<T, A> where A: Allocator {
115115 /// # Aborts
116116 ///
117117 /// Aborts on OOM
118- pub fn with_capacity_in ( cap : usize , a : A ) -> Self {
118+ pub fn with_capacity_in ( cap : usize , mut a : A ) -> Self {
119119 unsafe {
120- let elem_size = mem:: size_of :: < T > ( ) ;
121-
122- let alloc_size = cap. checked_mul ( elem_size) . expect ( "capacity overflow" ) ;
123- alloc_guard ( alloc_size) ;
124-
125- // handles ZSTs and `cap = 0` alike
126- let ptr = if alloc_size == 0 {
127- heap:: EMPTY as * mut u8
120+ let ptr;
121+ // handle ZSTs and `cap = 0` alike
122+ if cap == 0 || mem:: size_of :: < T > ( ) == 0 {
123+ ptr = heap:: EMPTY as * mut u8 ;
128124 } else {
129- let align = mem:: align_of :: < T > ( ) ;
130- let ptr = heap:: allocate ( alloc_size, align) ;
131- if ptr. is_null ( ) {
132- oom ( )
133- }
134- ptr
135- } ;
125+ let alloc_kind = Kind :: array :: < T > ( cap)
126+ . unwrap_or_else ( || panic ! ( "capacity overflow" ) ) ;
127+
128+ alloc_guard ( * alloc_kind. size ( ) ) ;
129+ ptr = * a. alloc ( alloc_kind) . unwrap_or_else ( |_| a. oom ( ) ) ;
130+ }
136131
137132 RawVec {
138133 ptr : Unique :: new ( ptr as * mut _ ) ,
@@ -232,34 +227,31 @@ impl<T, A> RawVec<T, A> where A: Allocator {
232227 // 0, getting to here necessarily means the RawVec is overfull.
233228 assert ! ( elem_size != 0 , "capacity overflow" ) ;
234229
235- let align = mem:: align_of :: < T > ( ) ;
236-
237230 let ( new_cap, ptr) = if self . cap == 0 {
238231 // skip to 4 because tiny Vec's are dumb; but not if that would cause overflow
239232 let new_cap = if elem_size > ( !0 ) / 8 {
240233 1
241234 } else {
242235 4
243236 } ;
244- let ptr = heap :: allocate ( new_cap * elem_size , align ) ;
237+ let ptr = self . a . alloc ( Kind :: array :: < T > ( new_cap) . unwrap ( ) ) ;
245238 ( new_cap, ptr)
246239 } else {
247240 // Since we guarantee that we never allocate more than isize::MAX bytes,
248241 // `elem_size * self.cap <= isize::MAX` as a precondition, so this can't overflow
249242 let new_cap = 2 * self . cap ;
250- let new_alloc_size = new_cap * elem_size;
251- alloc_guard ( new_alloc_size) ;
252- let ptr = heap:: reallocate ( self . ptr ( ) as * mut _ ,
253- self . cap * elem_size,
254- new_alloc_size,
255- align) ;
243+ let new_alloc = Kind :: array :: < T > ( new_cap) . unwrap ( ) ;
244+ alloc_guard ( * new_alloc. size ( ) ) ;
245+ let ( ptr, cap) = ( self . ptr ( ) , self . cap ) ;
246+ let alloc = & mut self . a ;
247+ let ptr = alloc. realloc ( nonzero:: NonZero :: new ( ptr as * mut _ ) ,
248+ Kind :: array :: < T > ( cap) . unwrap ( ) ,
249+ new_alloc) ;
256250 ( new_cap, ptr)
257251 } ;
258252
259- // If allocate or reallocate fail, we'll get `null` back
260- if ptr. is_null ( ) {
261- oom ( )
262- }
253+ // If allocate or reallocate fail, we'll get `Err` back
254+ let ptr = * ptr. unwrap_or_else ( |_| self . a . oom ( ) ) ;
263255
264256 self . ptr = Unique :: new ( ptr as * mut _ ) ;
265257 self . cap = new_cap;
@@ -288,9 +280,6 @@ impl<T, A> RawVec<T, A> where A: Allocator {
288280 /// Aborts on OOM
289281 pub fn reserve_exact ( & mut self , used_cap : usize , needed_extra_cap : usize ) {
290282 unsafe {
291- let elem_size = mem:: size_of :: < T > ( ) ;
292- let align = mem:: align_of :: < T > ( ) ;
293-
294283 // NOTE: we don't early branch on ZSTs here because we want this
295284 // to actually catch "asking for more than usize::MAX" in that case.
296285 // If we make it past the first branch then we are guaranteed to
@@ -304,23 +293,24 @@ impl<T, A> RawVec<T, A> where A: Allocator {
304293
305294 // Nothing we can really do about these checks :(
306295 let new_cap = used_cap. checked_add ( needed_extra_cap) . expect ( "capacity overflow" ) ;
307- let new_alloc_size = new_cap. checked_mul ( elem_size) . expect ( "capacity overflow" ) ;
308- alloc_guard ( new_alloc_size) ;
309296
297+ let new_alloc = Kind :: array :: < T > ( new_cap)
298+ . unwrap_or_else ( || panic ! ( "capacity overflow" ) ) ;
299+ alloc_guard ( * new_alloc. size ( ) ) ;
300+
301+ let old_ptr = self . ptr ( ) ;
302+ let alloc = & mut self . a ;
310303 let ptr = if self . cap == 0 {
311- heap :: allocate ( new_alloc_size , align )
304+ alloc . alloc ( new_alloc )
312305 } else {
313- heap :: reallocate ( self . ptr ( ) as * mut _ ,
314- self . cap * elem_size ,
315- new_alloc_size ,
316- align )
306+ let old_alloc = Kind :: array :: < T > ( self . cap ) . unwrap ( ) ;
307+ alloc . realloc ( nonzero :: NonZero :: new ( old_ptr as * mut _ ) ,
308+ old_alloc ,
309+ new_alloc )
317310 } ;
318311
319- // If allocate or reallocate fail, we'll get `null` back
320- if ptr. is_null ( ) {
321- oom ( )
322- }
323-
312+ // If allocate or reallocate fail, we'll get `Err` back
313+ let ptr = * ptr. unwrap_or_else ( |_| alloc. oom ( ) ) ;
324314 self . ptr = Unique :: new ( ptr as * mut _ ) ;
325315 self . cap = new_cap;
326316 }
@@ -372,9 +362,6 @@ impl<T, A> RawVec<T, A> where A: Allocator {
372362 /// ```
373363 pub fn reserve ( & mut self , used_cap : usize , needed_extra_cap : usize ) {
374364 unsafe {
375- let elem_size = mem:: size_of :: < T > ( ) ;
376- let align = mem:: align_of :: < T > ( ) ;
377-
378365 // NOTE: we don't early branch on ZSTs here because we want this
379366 // to actually catch "asking for more than usize::MAX" in that case.
380367 // If we make it past the first branch then we are guaranteed to
@@ -396,24 +383,24 @@ impl<T, A> RawVec<T, A> where A: Allocator {
396383 // `double_cap` guarantees exponential growth.
397384 let new_cap = cmp:: max ( double_cap, required_cap) ;
398385
399- let new_alloc_size = new_cap. checked_mul ( elem_size) . expect ( "capacity overflow" ) ;
386+ let new_alloc = Kind :: array :: < T > ( new_cap)
387+ . unwrap_or_else ( || panic ! ( "capacity overflow" ) ) ;
400388 // FIXME: may crash and burn on over-reserve
401- alloc_guard ( new_alloc_size ) ;
389+ alloc_guard ( * new_alloc . size ( ) ) ;
402390
391+ let ptr = self . ptr ( ) ;
392+ let alloc = & mut self . a ;
403393 let ptr = if self . cap == 0 {
404- heap :: allocate ( new_alloc_size , align )
394+ alloc . alloc ( new_alloc )
405395 } else {
406- heap :: reallocate ( self . ptr ( ) as * mut _ ,
407- self . cap * elem_size ,
408- new_alloc_size ,
409- align )
396+ let old_alloc = Kind :: array :: < T > ( self . cap ) . unwrap ( ) ;
397+ alloc . realloc ( nonzero :: NonZero :: new ( ptr as * mut _ ) ,
398+ old_alloc ,
399+ new_alloc )
410400 } ;
411401
412- // If allocate or reallocate fail, we'll get `null` back
413- if ptr. is_null ( ) {
414- oom ( )
415- }
416-
402+ // If allocate or reallocate fail, we'll get `Err` back
403+ let ptr = * ptr. unwrap_or_else ( |_| alloc. oom ( ) ) ;
417404 self . ptr = Unique :: new ( ptr as * mut _ ) ;
418405 self . cap = new_cap;
419406 }
@@ -431,7 +418,6 @@ impl<T, A> RawVec<T, A> where A: Allocator {
431418 /// Aborts on OOM.
432419 pub fn shrink_to_fit ( & mut self , amount : usize ) {
433420 let elem_size = mem:: size_of :: < T > ( ) ;
434- let align = mem:: align_of :: < T > ( ) ;
435421
436422 // Set the `cap` because they might be about to promote to a `Box<[T]>`
437423 if elem_size == 0 {
@@ -449,13 +435,14 @@ impl<T, A> RawVec<T, A> where A: Allocator {
449435 unsafe {
450436 // Overflow check is unnecessary as the vector is already at
451437 // least this large.
452- let ptr = heap:: reallocate ( self . ptr ( ) as * mut _ ,
453- self . cap * elem_size,
454- amount * elem_size,
455- align) ;
456- if ptr. is_null ( ) {
457- oom ( )
458- }
438+ let old_kind = Kind :: array :: < T > ( self . cap ) . unwrap ( ) ;
439+ let new_kind = Kind :: array :: < T > ( amount) . unwrap ( ) ;
440+ let ptr = self . ptr ( ) ;
441+ let alloc = & mut self . a ;
442+ let ptr = alloc. realloc ( nonzero:: NonZero :: new ( ptr as * mut _ ) ,
443+ old_kind,
444+ new_kind) ;
445+ let ptr = * ptr. unwrap_or_else ( |_| alloc. oom ( ) ) ;
459446 self . ptr = Unique :: new ( ptr as * mut _ ) ;
460447 }
461448 self . cap = amount;
0 commit comments