@@ -72,6 +72,9 @@ pub(crate) trait Allocation<T>: Debug {
7272/// A generic typed allocation wrapping a RawAllocation.
7373///
7474/// This is currently the only Allocation implementation, since it is shared by all allocators.
75+ ///
76+ /// # Invariants
77+ /// The alloaction at `alloc` must have a size equal or greater than `alloc_size` plus `debug_offset` plus `padding`.
7578pub ( crate ) struct GenericAlloc < T , U : RawAllocation > {
7679 alloc : U ,
7780 alloc_size : usize ,
@@ -83,14 +86,17 @@ pub(crate) struct GenericAlloc<T, U: RawAllocation> {
8386}
8487
8588impl < T , U : RawAllocation > Allocation < T > for GenericAlloc < T , U > {
89+ /// Returns a pointer to the inner (usable) part of the allocation.
8690 fn ptr ( & self ) -> Option < NonNull < T > > {
87- self . alloc
88- . ptr ( )
89- . map ( |p| unsafe { NonNull :: new_unchecked ( p . as_ptr ( ) . add ( self . debug_offset ) as * mut T ) } )
91+ // SAFETY: self.debug_offset is always within the allocation per the invariant, so is safe to add
92+ // to the base pointer.
93+ unsafe { self . alloc . ptr ( ) . map ( |p| p . add ( self . debug_offset ) . cast ( ) ) }
9094 }
95+ /// Returns the GPU pointer to the inner (usable) part of the allocation.
9196 fn gpu_ptr ( & self ) -> u64 {
9297 self . alloc . gpu_ptr ( ) + self . debug_offset as u64
9398 }
99+ /// Returns the size of the inner (usable) part of the allocation.
94100 fn size ( & self ) -> usize {
95101 self . alloc_size
96102 }
@@ -133,6 +139,8 @@ impl<T, U: RawAllocation> Drop for GenericAlloc<T, U> {
133139 let debug_len = mem:: size_of :: < AllocDebugData > ( ) ;
134140 if self . debug_offset >= debug_len {
135141 if let Some ( p) = self . alloc . ptr ( ) {
142+ // SAFETY: self.debug_offset is always greater than the alloc size per
143+ // the invariant, and greater than debug_len as checked above.
136144 unsafe {
137145 let p = p. as_ptr ( ) . add ( self . debug_offset - debug_len) ;
138146 ( p as * mut u32 ) . write ( STATE_DEAD ) ;
@@ -141,11 +149,14 @@ impl<T, U: RawAllocation> Drop for GenericAlloc<T, U> {
141149 }
142150 if debug_enabled ( DebugFlags :: FillAllocations ) {
143151 if let Some ( p) = self . ptr ( ) {
152+ // SAFETY: Writing to our inner base pointer with our known inner size is safe.
144153 unsafe { ( p. as_ptr ( ) as * mut u8 ) . write_bytes ( 0xde , self . size ( ) ) } ;
145154 }
146155 }
147156 if self . padding != 0 {
148157 if let Some ( p) = self . ptr ( ) {
158+ // SAFETY: Per the invariant, we have at least `self.padding` bytes trailing
159+ // the inner base pointer, after `size()` bytes.
149160 let guard = unsafe {
150161 core:: slice:: from_raw_parts (
151162 ( p. as_ptr ( ) as * mut u8 as * const u8 ) . add ( self . size ( ) ) ,
@@ -278,6 +289,8 @@ pub(crate) trait Allocator {
278289 debug. name [ ..len] . copy_from_slice ( & name[ ..len] ) ;
279290
280291 if let Some ( p) = alloc. ptr ( ) {
292+ // SAFETY: Per the size calculations above, this pointer math and the
293+ // writes never exceed the allocation size.
281294 unsafe {
282295 let p = p. as_ptr ( ) ;
283296 p. write_bytes ( 0x42 , debug_offset - 2 * debug_len) ;
@@ -311,12 +324,15 @@ pub(crate) trait Allocator {
311324
312325 if debug_enabled ( DebugFlags :: FillAllocations ) {
313326 if let Some ( p) = ret. ptr ( ) {
327+ // SAFETY: Writing to our inner base pointer with our known inner size is safe.
314328 unsafe { ( p. as_ptr ( ) as * mut u8 ) . write_bytes ( 0xaa , ret. size ( ) ) } ;
315329 }
316330 }
317331
318332 if padding != 0 {
319333 if let Some ( p) = ret. ptr ( ) {
334+ // SAFETY: Per the invariant, we have at least `self.padding` bytes trailing
335+ // the inner base pointer, after `size()` bytes.
320336 let guard = unsafe {
321337 core:: slice:: from_raw_parts_mut (
322338 ( p. as_ptr ( ) as * mut u8 ) . add ( ret. size ( ) ) ,
@@ -397,6 +413,7 @@ pub(crate) struct SimpleAllocation {
397413
398414/// SAFETY: `SimpleAllocation` just points to raw memory and should be safe to send across threads.
399415unsafe impl Send for SimpleAllocation { }
416+ /// SAFETY: `SimpleAllocation` just points to raw memory and should be safe to share across threads.
400417unsafe impl Sync for SimpleAllocation { }
401418
402419impl Drop for SimpleAllocation {
@@ -511,6 +528,7 @@ impl Allocator for SimpleAllocator {
511528
512529 let iova = mapping. iova ( ) ;
513530
531+ // SAFETY: Per the math above to calculate `size_aligned`, this can never overflow.
514532 let ptr = unsafe { p. add ( offset) } ;
515533 let gpu_ptr = iova + offset as u64 ;
516534
@@ -542,8 +560,9 @@ pub(crate) struct HeapAllocationInner {
542560 real_size : usize ,
543561}
544562
545- /// SAFETY: `SimpleAllocation ` just points to raw memory and should be safe to send across threads.
563+ /// SAFETY: `HeapAllocationInner ` just points to raw memory and should be safe to send across threads.
546564unsafe impl Send for HeapAllocationInner { }
565+ /// SAFETY: `HeapAllocationInner` just points to raw memory and should be safe to share between threads.
547566unsafe impl Sync for HeapAllocationInner { }
548567
549568/// Outer view of a heap allocation.
@@ -751,15 +770,14 @@ impl HeapAllocator {
751770 let gpu_ptr = self . top ;
752771 let mapping = obj
753772 . map_at ( & self . vm , gpu_ptr, self . prot , self . cpu_maps )
754- . map_err ( |err| {
773+ . inspect_err ( |err| {
755774 dev_err ! (
756775 self . dev. as_ref( ) ,
757776 "HeapAllocator[{}]::add_block: Failed to map at {:#x} ({:?})\n " ,
758777 & * self . name,
759778 gpu_ptr,
760779 err
761780 ) ;
762- err
763781 } ) ?;
764782
765783 self . mm
@@ -939,6 +957,7 @@ impl HeapAllocator {
939957 assert ! ( obj_start <= start) ;
940958 assert ! ( obj_start + obj_size as u64 >= end) ;
941959 node. as_mut ( ) . inner_mut ( ) . ptr =
960+ // SAFETY: Per the asserts above, this offset is always within the allocation.
942961 NonNull :: new ( unsafe { p. add ( ( start - obj_start) as usize ) } ) ;
943962 mod_dev_dbg ! (
944963 self . dev,
0 commit comments