Bug 1902540 - Crashtest. r=dholbert
[gecko.git] / third_party / rust / gpu-alloc / src / buddy.rs
blobdea303b948a859bf9d0e7d7733e3a0b4c360673f
1 use {\r
2     crate::{\r
3         align_up, error::AllocationError, heap::Heap, slab::Slab, unreachable_unchecked,\r
4         util::try_arc_unwrap, MemoryBounds,\r
5     },\r
6     alloc::{sync::Arc, vec::Vec},\r
7     core::{convert::TryFrom as _, mem::replace, ptr::NonNull},\r
8     gpu_alloc_types::{AllocationFlags, DeviceMapError, MemoryDevice, MemoryPropertyFlags},\r
9 };\r
11 #[derive(Debug)]\r
12 pub(crate) struct BuddyBlock<M> {\r
13     pub memory: Arc<M>,\r
14     pub ptr: Option<NonNull<u8>>,\r
15     pub offset: u64,\r
16     pub size: u64,\r
17     pub chunk: usize,\r
18     pub index: usize,\r
19 }\r
21 unsafe impl<M> Sync for BuddyBlock<M> where M: Sync {}\r
22 unsafe impl<M> Send for BuddyBlock<M> where M: Send {}\r
24 #[derive(Clone, Copy, Debug)]\r
25 enum PairState {\r
26     Exhausted,\r
27     Ready {\r
28         ready: Side,\r
29         next: usize,\r
30         prev: usize,\r
31     },\r
32 }\r
34 impl PairState {\r
35     unsafe fn replace_next(&mut self, value: usize) -> usize {\r
36         match self {\r
37             PairState::Exhausted => unreachable_unchecked(),\r
38             PairState::Ready { next, .. } => replace(next, value),\r
39         }\r
40     }\r
42     unsafe fn replace_prev(&mut self, value: usize) -> usize {\r
43         match self {\r
44             PairState::Exhausted => unreachable_unchecked(),\r
45             PairState::Ready { prev, .. } => replace(prev, value),\r
46         }\r
47     }\r
48 }\r
50 #[derive(Clone, Copy, Debug, PartialEq, Eq)]\r
51 enum Side {\r
52     Left,\r
53     Right,\r
54 }\r
55 use Side::*;\r
57 #[derive(Debug)]\r
58 struct PairEntry {\r
59     state: PairState,\r
60     chunk: usize,\r
61     offset: u64,\r
62     parent: Option<usize>,\r
63 }\r
65 struct SizeBlockEntry {\r
66     chunk: usize,\r
67     offset: u64,\r
68     index: usize,\r
69 }\r
71 #[derive(Debug)]\r
72 struct Size {\r
73     next_ready: usize,\r
74     pairs: Slab<PairEntry>,\r
75 }\r
76 #[derive(Debug)]\r
77 enum Release {\r
78     None,\r
79     Parent(usize),\r
80     Chunk(usize),\r
81 }\r
83 impl Size {\r
84     fn new() -> Self {\r
85         Size {\r
86             pairs: Slab::new(),\r
87             next_ready: 0,\r
88         }\r
89     }\r
91     unsafe fn add_pair_and_acquire_left(\r
92         &mut self,\r
93         chunk: usize,\r
94         offset: u64,\r
95         parent: Option<usize>,\r
96     ) -> SizeBlockEntry {\r
97         if self.next_ready < self.pairs.len() {\r
98             unreachable_unchecked()\r
99         }\r
101         let index = self.pairs.insert(PairEntry {\r
102             state: PairState::Exhausted,\r
103             chunk,\r
104             offset,\r
105             parent,\r
106         });\r
108         let entry = self.pairs.get_unchecked_mut(index);\r
109         entry.state = PairState::Ready {\r
110             next: index,\r
111             prev: index,\r
112             ready: Right, // Left is allocated.\r
113         };\r
114         self.next_ready = index;\r
116         SizeBlockEntry {\r
117             chunk,\r
118             offset,\r
119             index: index << 1,\r
120         }\r
121     }\r
123     fn acquire(&mut self, size: u64) -> Option<SizeBlockEntry> {\r
124         if self.next_ready >= self.pairs.len() {\r
125             return None;\r
126         }\r
128         let ready = self.next_ready;\r
130         let entry = unsafe { self.pairs.get_unchecked_mut(ready) };\r
131         let chunk = entry.chunk;\r
132         let offset = entry.offset;\r
134         let bit = match entry.state {\r
135             PairState::Exhausted => unsafe { unreachable_unchecked() },\r
136             PairState::Ready { ready, next, prev } => {\r
137                 entry.state = PairState::Exhausted;\r
139                 if prev == self.next_ready {\r
140                     // The only ready entry.\r
141                     debug_assert_eq!(next, self.next_ready);\r
142                     self.next_ready = self.pairs.len();\r
143                 } else {\r
144                     let prev_entry = unsafe { self.pairs.get_unchecked_mut(prev) };\r
145                     let prev_next = unsafe { prev_entry.state.replace_next(next) };\r
146                     debug_assert_eq!(prev_next, self.next_ready);\r
148                     let next_entry = unsafe { self.pairs.get_unchecked_mut(next) };\r
149                     let next_prev = unsafe { next_entry.state.replace_prev(prev) };\r
150                     debug_assert_eq!(next_prev, self.next_ready);\r
152                     self.next_ready = next;\r
153                 }\r
155                 match ready {\r
156                     Left => 0,\r
157                     Right => 1,\r
158                 }\r
159             }\r
160         };\r
162         Some(SizeBlockEntry {\r
163             chunk,\r
164             offset: offset + bit as u64 * size,\r
165             index: (ready << 1) | bit as usize,\r
166         })\r
167     }\r
169     fn release(&mut self, index: usize) -> Release {\r
170         let side = match index & 1 {\r
171             0 => Side::Left,\r
172             1 => Side::Right,\r
173             _ => unsafe { unreachable_unchecked() },\r
174         };\r
175         let entry_index = index >> 1;\r
177         let len = self.pairs.len();\r
179         let entry = self.pairs.get_mut(entry_index);\r
181         let chunk = entry.chunk;\r
182         let offset = entry.offset;\r
183         let parent = entry.parent;\r
185         match entry.state {\r
186             PairState::Exhausted => {\r
187                 if self.next_ready == len {\r
188                     entry.state = PairState::Ready {\r
189                         ready: side,\r
190                         next: entry_index,\r
191                         prev: entry_index,\r
192                     };\r
193                     self.next_ready = entry_index;\r
194                 } else {\r
195                     debug_assert!(self.next_ready < len);\r
197                     let next = self.next_ready;\r
198                     let next_entry = unsafe { self.pairs.get_unchecked_mut(next) };\r
199                     let prev = unsafe { next_entry.state.replace_prev(entry_index) };\r
201                     let prev_entry = unsafe { self.pairs.get_unchecked_mut(prev) };\r
202                     let prev_next = unsafe { prev_entry.state.replace_next(entry_index) };\r
203                     debug_assert_eq!(prev_next, next);\r
205                     let entry = unsafe { self.pairs.get_unchecked_mut(entry_index) };\r
206                     entry.state = PairState::Ready {\r
207                         ready: side,\r
208                         next,\r
209                         prev,\r
210                     };\r
211                 }\r
212                 Release::None\r
213             }\r
215             PairState::Ready { ready, .. } if ready == side => {\r
216                 panic!("Attempt to dealloate already free block")\r
217             }\r
219             PairState::Ready { next, prev, .. } => {\r
220                 unsafe {\r
221                     self.pairs.remove_unchecked(entry_index);\r
222                 }\r
224                 if prev == entry_index {\r
225                     debug_assert_eq!(next, entry_index);\r
226                     self.next_ready = self.pairs.len();\r
227                 } else {\r
228                     let prev_entry = unsafe { self.pairs.get_unchecked_mut(prev) };\r
229                     let prev_next = unsafe { prev_entry.state.replace_next(next) };\r
230                     debug_assert_eq!(prev_next, entry_index);\r
232                     let next_entry = unsafe { self.pairs.get_unchecked_mut(next) };\r
233                     let next_prev = unsafe { next_entry.state.replace_prev(prev) };\r
234                     debug_assert_eq!(next_prev, entry_index);\r
236                     self.next_ready = next;\r
237                 }\r
239                 match parent {\r
240                     Some(parent) => Release::Parent(parent),\r
241                     None => {\r
242                         debug_assert_eq!(offset, 0);\r
243                         Release::Chunk(chunk)\r
244                     }\r
245                 }\r
246             }\r
247         }\r
248     }\r
251 #[derive(Debug)]\r
252 struct Chunk<M> {\r
253     memory: Arc<M>,\r
254     ptr: Option<NonNull<u8>>,\r
255     size: u64,\r
258 #[derive(Debug)]\r
259 pub(crate) struct BuddyAllocator<M> {\r
260     minimal_size: u64,\r
261     chunks: Slab<Chunk<M>>,\r
262     sizes: Vec<Size>,\r
263     memory_type: u32,\r
264     props: MemoryPropertyFlags,\r
265     atom_mask: u64,\r
268 unsafe impl<M> Sync for BuddyAllocator<M> where M: Sync {}\r
269 unsafe impl<M> Send for BuddyAllocator<M> where M: Send {}\r
271 impl<M> BuddyAllocator<M>\r
272 where\r
273     M: MemoryBounds + 'static,\r
275     pub fn new(\r
276         minimal_size: u64,\r
277         initial_dedicated_size: u64,\r
278         memory_type: u32,\r
279         props: MemoryPropertyFlags,\r
280         atom_mask: u64,\r
281     ) -> Self {\r
282         assert!(\r
283             minimal_size.is_power_of_two(),\r
284             "Minimal allocation size of buddy allocator must be power of two"\r
285         );\r
286         assert!(\r
287             initial_dedicated_size.is_power_of_two(),\r
288             "Dedicated allocation size of buddy allocator must be power of two"\r
289         );\r
291         let initial_sizes = (initial_dedicated_size\r
292             .trailing_zeros()\r
293             .saturating_sub(minimal_size.trailing_zeros())) as usize;\r
295         BuddyAllocator {\r
296             minimal_size,\r
297             chunks: Slab::new(),\r
298             sizes: (0..initial_sizes).map(|_| Size::new()).collect(),\r
299             memory_type,\r
300             props,\r
301             atom_mask: atom_mask | (minimal_size - 1),\r
302         }\r
303     }\r
305     #[cfg_attr(feature = "tracing", tracing::instrument(skip(self, device)))]\r
306     pub unsafe fn alloc(\r
307         &mut self,\r
308         device: &impl MemoryDevice<M>,\r
309         size: u64,\r
310         align_mask: u64,\r
311         flags: AllocationFlags,\r
312         heap: &mut Heap,\r
313         allocations_remains: &mut u32,\r
314     ) -> Result<BuddyBlock<M>, AllocationError> {\r
315         let align_mask = align_mask | self.atom_mask;\r
317         let size = align_up(size, align_mask)\r
318             .and_then(|size| size.checked_next_power_of_two())\r
319             .ok_or(AllocationError::OutOfDeviceMemory)?;\r
321         let size = size.max(self.minimal_size);\r
323         let size_index = size.trailing_zeros() - self.minimal_size.trailing_zeros();\r
324         let size_index =\r
325             usize::try_from(size_index).map_err(|_| AllocationError::OutOfDeviceMemory)?;\r
327         while self.sizes.len() <= size_index {\r
328             self.sizes.push(Size::new());\r
329         }\r
331         let host_visible = self.host_visible();\r
333         let mut candidate_size_index = size_index;\r
335         let (mut entry, entry_size_index) = loop {\r
336             let sizes_len = self.sizes.len();\r
338             let candidate_size_entry = &mut self.sizes[candidate_size_index];\r
339             let candidate_size = self.minimal_size << candidate_size_index;\r
341             if let Some(entry) = candidate_size_entry.acquire(candidate_size) {\r
342                 break (entry, candidate_size_index);\r
343             }\r
345             if sizes_len == candidate_size_index + 1 {\r
346                 // That's size of device allocation.\r
347                 if *allocations_remains == 0 {\r
348                     return Err(AllocationError::TooManyObjects);\r
349                 }\r
351                 let chunk_size = self.minimal_size << (candidate_size_index + 1);\r
352                 let mut memory = device.allocate_memory(chunk_size, self.memory_type, flags)?;\r
353                 *allocations_remains -= 1;\r
354                 heap.alloc(chunk_size);\r
356                 let ptr = if host_visible {\r
357                     match device.map_memory(&mut memory, 0, chunk_size) {\r
358                         Ok(ptr) => Some(ptr),\r
359                         Err(DeviceMapError::OutOfDeviceMemory) => {\r
360                             return Err(AllocationError::OutOfDeviceMemory)\r
361                         }\r
362                         Err(DeviceMapError::MapFailed) | Err(DeviceMapError::OutOfHostMemory) => {\r
363                             return Err(AllocationError::OutOfHostMemory)\r
364                         }\r
365                     }\r
366                 } else {\r
367                     None\r
368                 };\r
370                 let chunk = self.chunks.insert(Chunk {\r
371                     memory: Arc::new(memory),\r
372                     ptr,\r
373                     size: chunk_size,\r
374                 });\r
376                 let entry = candidate_size_entry.add_pair_and_acquire_left(chunk, 0, None);\r
378                 break (entry, candidate_size_index);\r
379             }\r
381             candidate_size_index += 1;\r
382         };\r
384         for size_index in (size_index..entry_size_index).rev() {\r
385             let size_entry = &mut self.sizes[size_index];\r
386             entry =\r
387                 size_entry.add_pair_and_acquire_left(entry.chunk, entry.offset, Some(entry.index));\r
388         }\r
390         let chunk_entry = self.chunks.get_unchecked(entry.chunk);\r
392         debug_assert!(\r
393             entry\r
394                 .offset\r
395                 .checked_add(size)\r
396                 .map_or(false, |end| end <= chunk_entry.size),\r
397             "Offset + size is not in chunk bounds"\r
398         );\r
400         Ok(BuddyBlock {\r
401             memory: chunk_entry.memory.clone(),\r
402             ptr: chunk_entry\r
403                 .ptr\r
404                 .map(|ptr| NonNull::new_unchecked(ptr.as_ptr().add(entry.offset as usize))),\r
405             offset: entry.offset,\r
406             size,\r
407             chunk: entry.chunk,\r
408             index: entry.index,\r
409         })\r
410     }\r
412     #[cfg_attr(feature = "tracing", tracing::instrument(skip(self, device)))]\r
413     pub unsafe fn dealloc(\r
414         &mut self,\r
415         device: &impl MemoryDevice<M>,\r
416         block: BuddyBlock<M>,\r
417         heap: &mut Heap,\r
418         allocations_remains: &mut u32,\r
419     ) {\r
420         debug_assert!(block.size.is_power_of_two());\r
422         let size_index =\r
423             (block.size.trailing_zeros() - self.minimal_size.trailing_zeros()) as usize;\r
425         let mut release_index = block.index;\r
426         let mut release_size_index = size_index;\r
428         loop {\r
429             match self.sizes[release_size_index].release(release_index) {\r
430                 Release::Parent(parent) => {\r
431                     release_size_index += 1;\r
432                     release_index = parent;\r
433                 }\r
434                 Release::Chunk(chunk) => {\r
435                     debug_assert_eq!(chunk, block.chunk);\r
436                     debug_assert_eq!(\r
437                         self.chunks.get(chunk).size,\r
438                         self.minimal_size << (release_size_index + 1)\r
439                     );\r
440                     let chunk = self.chunks.remove(chunk);\r
441                     drop(block);\r
443                     let memory = try_arc_unwrap(chunk.memory)\r
444                         .expect("Memory shared after last block deallocated");\r
446                     device.deallocate_memory(memory);\r
447                     *allocations_remains += 1;\r
448                     heap.dealloc(chunk.size);\r
450                     return;\r
451                 }\r
452                 Release::None => return,\r
453             }\r
454         }\r
455     }\r
457     fn host_visible(&self) -> bool {\r
458         self.props.contains(MemoryPropertyFlags::HOST_VISIBLE)\r
459     }\r