1 use crate::opaque_pointee::opaque_pointee;
2 use std::alloc::{self, Layout};
3 use std::cell::UnsafeCell;
4 use std::marker::PhantomData;
5 use std::mem::{align_of, MaybeUninit};
6 use std::ptr::{self, NonNull};
9 opaque_pointee!(upb_Arena);
10 pub type RawArena = NonNull<upb_Arena>;
12 /// See `upb/port/def.inc`.
13 const UPB_MALLOC_ALIGN: usize = 8;
14 const _CHECK_UPB_MALLOC_ALIGN_AT_LEAST_POINTER_ALIGNED: () =
15 assert!(UPB_MALLOC_ALIGN >= align_of::<*const ()>());
17 /// A wrapper over a `upb_Arena`.
19 /// This is not a safe wrapper per se, because the allocation functions still
20 /// have sharp edges (see their safety docs for more info).
22 /// This is an owning type and will automatically free the arena when
25 /// Note that this type is not `Sync` as it implements unsynchronized interior
26 /// mutability. The upb_Arena C object could be understood as being Sync (at
27 /// least vacuously under current API since there are not any const upb_Arena*
28 /// API functions), but the Rust Arena is necessarily expressed as interior
29 /// mutability (&self rather than &mut self receivers) See https://doc.rust-lang.org/nomicon/lifetime-mismatch.html and
30 /// https://blog.reverberate.org/2021/12/19/arenas-and-rust.html, and the
31 /// 'known problems' section of https://rust-lang.github.io/rust-clippy/master/index.html#/mut_from_ref.
34 // Safety invariant: this must always be a valid arena
36 _not_sync: PhantomData<UnsafeCell<()>>,
39 // SAFETY: `Arena` uniquely holds the underlying RawArena and has no
41 unsafe impl Send for Arena {}
44 /// Allocates a fresh arena.
46 pub fn new() -> Self {
49 fn arena_new_failed() -> ! {
50 panic!("Could not create a new UPB arena");
54 // - `upb_Arena_New` is assumed to be implemented correctly and always sound to
55 // call; if it returned a non-null pointer, it is a valid arena.
57 let Some(raw) = upb_Arena_New() else { arena_new_failed() };
58 Self { raw, _not_sync: PhantomData }
63 /// - The `raw_arena` must point to a valid arena.
64 /// - The caller must ensure that the Arena's destructor does not run.
65 pub unsafe fn from_raw(raw_arena: RawArena) -> Self {
66 Arena { raw: raw_arena, _not_sync: PhantomData }
69 /// Returns the raw, UPB-managed pointer to the arena.
71 pub fn raw(&self) -> RawArena {
75 /// Allocates some memory on the arena.
79 /// - `layout`'s alignment must be less than `UPB_MALLOC_ALIGN`.
80 #[allow(clippy::mut_from_ref)]
82 pub unsafe fn alloc(&self, layout: Layout) -> &mut [MaybeUninit<u8>] {
83 debug_assert!(layout.align() <= UPB_MALLOC_ALIGN);
84 // SAFETY: `self.raw` is a valid UPB arena
85 let ptr = unsafe { upb_Arena_Malloc(self.raw, layout.size()) };
87 alloc::handle_alloc_error(layout);
91 // - `upb_Arena_Malloc` promises that if the return pointer is non-null, it is
92 // dereferencable for `size` bytes and has an alignment of `UPB_MALLOC_ALIGN`
93 // until the arena is destroyed.
94 // - `[MaybeUninit<u8>]` has no alignment requirement, and `ptr` is aligned to a
95 // `UPB_MALLOC_ALIGN` boundary.
96 unsafe { slice::from_raw_parts_mut(ptr.cast(), layout.size()) }
99 /// Same as alloc() but panics if `layout.align() > UPB_MALLOC_ALIGN`.
100 #[allow(clippy::mut_from_ref)]
102 pub fn checked_alloc(&self, layout: Layout) -> &mut [MaybeUninit<u8>] {
103 assert!(layout.align() <= UPB_MALLOC_ALIGN);
104 // SAFETY: layout.align() <= UPB_MALLOC_ALIGN asserted.
105 unsafe { self.alloc(layout) }
108 /// Copies the T into this arena and returns a pointer to the T data inside
110 pub fn copy_in<'a, T: Copy>(&'a self, data: &T) -> &'a T {
111 let layout = Layout::for_value(data);
112 let alloc = self.checked_alloc(layout);
115 // - alloc is valid for `layout.len()` bytes and is the uninit bytes are written
116 // to not read from until written.
117 // - T is copy so copying the bytes of the value is sound.
119 let alloc = alloc.as_mut_ptr().cast::<MaybeUninit<T>>();
120 // let data = (data as *const T).cast::<MaybeUninit<T>>();
121 (*alloc).write(*data)
125 pub fn copy_str_in<'a>(&'a self, s: &str) -> &'a str {
126 let copied_bytes = self.copy_slice_in(s.as_bytes());
127 // SAFETY: `copied_bytes` has same contents as `s` and so must meet &str
129 unsafe { std::str::from_utf8_unchecked(copied_bytes) }
132 pub fn copy_slice_in<'a, T: Copy>(&'a self, data: &[T]) -> &'a [T] {
133 let layout = Layout::for_value(data);
134 let alloc: *mut T = self.checked_alloc(layout).as_mut_ptr().cast();
137 // - uninit_alloc is valid for `layout.len()` bytes and is the uninit bytes are
138 // written to not read from until written.
139 // - T is copy so copying the bytes of the values is sound.
141 ptr::copy_nonoverlapping(data.as_ptr(), alloc, data.len());
142 slice::from_raw_parts_mut(alloc, data.len())
147 impl Default for Arena {
148 fn default() -> Self {
153 impl Drop for Arena {
157 upb_Arena_Free(self.raw);
163 // `Option<NonNull<T: Sized>>` is ABI-compatible with `*mut T`
164 fn upb_Arena_New() -> Option<RawArena>;
165 fn upb_Arena_Free(arena: RawArena);
166 fn upb_Arena_Malloc(arena: RawArena, size: usize) -> *mut u8;
175 // SAFETY: FFI unit test uses C API under expected patterns.
177 let arena = upb_Arena_New().unwrap();
178 let bytes = upb_Arena_Malloc(arena, 3);
180 upb_Arena_Free(arena);
185 fn test_arena_new_and_free() {
186 let arena = Arena::new();