1#![feature(
2 const_block_items,
3 box_vec_non_null,
4 pointer_is_aligned_to,
5 ptr_as_ref_unchecked
6)]
7#![no_std]
8
9#[cfg(test)]
10mod tests;
11
12extern crate alloc;
13
14use ab_io_type::MAX_ALIGNMENT;
15use alloc::alloc::realloc;
16use alloc::boxed::Box;
17use core::alloc::Layout;
18use core::mem::MaybeUninit;
19use core::ops::{Deref, DerefMut};
20use core::ptr::NonNull;
21use core::slice;
22use core::sync::atomic::{AtomicU32, Ordering};
23use stable_deref_trait::{CloneStableDeref, StableDeref};
24use yoke::CloneableCart;
25
26const {
27 assert!(
28 align_of::<u128>() == size_of::<u128>(),
29 "Size and alignment are both 16 bytes"
30 );
31 assert!(
32 align_of::<u128>() == MAX_ALIGNMENT as usize,
33 "Alignment of u128 is a max alignment"
34 );
35 assert!(size_of::<u128>() >= size_of::<AtomicU32>());
36 assert!(align_of::<u128>() >= align_of::<AtomicU32>());
37}
38
39#[repr(C, align(16))]
40struct ConstInnerBuffer {
41 strong_count: AtomicU32,
42}
43
44const {
45 assert!(align_of::<ConstInnerBuffer>() == align_of::<u128>());
46 assert!(size_of::<ConstInnerBuffer>() == size_of::<u128>());
47}
48
49static EMPTY_SHARED_ALIGNED_BUFFER: SharedAlignedBuffer = SharedAlignedBuffer {
50 inner: InnerBuffer {
51 buffer: NonNull::from_ref({
52 static BUFFER: MaybeUninit<ConstInnerBuffer> = MaybeUninit::new(ConstInnerBuffer {
53 strong_count: AtomicU32::new(1),
54 });
55
56 &BUFFER
57 })
58 .cast::<MaybeUninit<u128>>(),
59 capacity: 0,
60 len: 0,
61 },
62};
63
64#[derive(Debug)]
65struct InnerBuffer {
66 buffer: NonNull<MaybeUninit<u128>>,
68 capacity: u32,
69 len: u32,
70}
71
72unsafe impl Send for InnerBuffer {}
74unsafe impl Sync for InnerBuffer {}
76
77impl Default for InnerBuffer {
78 #[inline(always)]
79 fn default() -> Self {
80 EMPTY_SHARED_ALIGNED_BUFFER.inner.clone()
81 }
82}
83
84impl Clone for InnerBuffer {
85 #[inline(always)]
86 fn clone(&self) -> Self {
87 self.strong_count_ref().fetch_add(1, Ordering::AcqRel);
88
89 Self {
90 buffer: self.buffer,
91 capacity: self.capacity,
92 len: self.len,
93 }
94 }
95}
96
97impl Drop for InnerBuffer {
98 #[inline(always)]
99 fn drop(&mut self) {
100 if self.strong_count_ref().fetch_sub(1, Ordering::AcqRel) == 1 {
101 let _ = unsafe {
103 Box::from_non_null(NonNull::slice_from_raw_parts(
104 self.buffer,
105 1 + (self.capacity as usize).div_ceil(size_of::<u128>()),
106 ))
107 };
108 }
109 }
110}
111
112impl InnerBuffer {
113 #[inline(always)]
118 fn allocate(capacity: u32) -> Self {
119 let buffer = Box::into_non_null(Box::<[u128]>::new_uninit_slice(
120 1 + (capacity as usize).div_ceil(size_of::<u128>()),
121 ));
122 unsafe { buffer.cast::<AtomicU32>().write(AtomicU32::new(1)) };
125 Self {
126 buffer: buffer.cast::<MaybeUninit<u128>>(),
127 capacity,
128 len: 0,
129 }
130 }
131
132 #[inline(always)]
133 fn resize(&mut self, capacity: u32) {
134 let layout = Layout::for_value(unsafe {
136 slice::from_raw_parts(
137 self.buffer.as_ptr(),
138 1 + (self.capacity as usize).div_ceil(size_of::<u128>()),
139 )
140 });
141
142 let new_size = size_of::<u128>() + (capacity as usize).next_multiple_of(layout.align());
144
145 let new_ptr = unsafe {
148 realloc(self.buffer.as_ptr().cast::<u8>(), layout, new_size).cast::<MaybeUninit<u128>>()
149 };
150 let Some(new_ptr) = NonNull::new(new_ptr) else {
151 panic!("Realloc from {} to {new_size} has failed", self.capacity());
152 };
153
154 self.buffer = new_ptr;
155 self.capacity = capacity;
156 }
157
158 #[inline(always)]
159 const fn len(&self) -> u32 {
160 self.len
161 }
162
163 #[inline(always)]
165 unsafe fn set_len(&mut self, len: u32) {
166 debug_assert!(
167 len <= self.capacity(),
168 "Too many bytes {} > {}",
169 len,
170 self.capacity()
171 );
172 self.len = len;
173 }
174
175 #[inline(always)]
176 const fn capacity(&self) -> u32 {
177 self.capacity
178 }
179
180 #[inline(always)]
181 const fn strong_count_ref(&self) -> &AtomicU32 {
182 unsafe { self.buffer.as_ptr().cast::<AtomicU32>().as_ref_unchecked() }
185 }
186
187 #[inline(always)]
188 const fn as_slice(&self) -> &[u8] {
189 let len = self.len() as usize;
190 unsafe { slice::from_raw_parts(self.as_ptr(), len) }
192 }
193
194 #[inline(always)]
195 const fn as_mut_slice(&mut self) -> &mut [u8] {
196 let len = self.len() as usize;
197 unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), len) }
199 }
200
201 #[inline(always)]
202 const fn as_ptr(&self) -> *const u8 {
203 unsafe { self.buffer.as_ptr().cast_const().add(1).cast::<u8>() }
205 }
206
207 #[inline(always)]
208 const fn as_mut_ptr(&mut self) -> *mut u8 {
209 unsafe { self.buffer.as_ptr().add(1).cast::<u8>() }
211 }
212}
213
214#[derive(Debug)]
222pub struct OwnedAlignedBuffer {
223 inner: InnerBuffer,
224}
225
226impl Deref for OwnedAlignedBuffer {
227 type Target = [u8];
228
229 #[inline(always)]
230 fn deref(&self) -> &Self::Target {
231 self.as_slice()
232 }
233}
234
235impl DerefMut for OwnedAlignedBuffer {
236 #[inline(always)]
237 fn deref_mut(&mut self) -> &mut Self::Target {
238 self.as_mut_slice()
239 }
240}
241
242unsafe impl StableDeref for OwnedAlignedBuffer {}
244
245impl Clone for OwnedAlignedBuffer {
246 #[inline(always)]
247 fn clone(&self) -> Self {
248 let mut new_instance = Self::with_capacity(self.capacity());
249 new_instance.copy_from_slice(self.as_slice());
250 new_instance
251 }
252}
253
254impl OwnedAlignedBuffer {
255 #[inline(always)]
259 pub fn with_capacity(capacity: u32) -> Self {
260 Self {
261 inner: InnerBuffer::allocate(capacity),
262 }
263 }
264
265 #[inline(always)]
270 pub fn from_bytes(bytes: &[u8]) -> Self {
271 let mut instance = Self::with_capacity(0);
272 instance.copy_from_slice(bytes);
273 instance
274 }
275
276 #[inline(always)]
277 pub const fn as_slice(&self) -> &[u8] {
278 self.inner.as_slice()
279 }
280
281 #[inline(always)]
282 pub const fn as_mut_slice(&mut self) -> &mut [u8] {
283 self.inner.as_mut_slice()
284 }
285
286 #[inline(always)]
287 pub const fn as_ptr(&self) -> *const u8 {
288 self.inner.as_ptr()
289 }
290
291 #[inline(always)]
292 pub const fn as_mut_ptr(&mut self) -> *mut u8 {
293 self.inner.as_mut_ptr()
294 }
295
296 #[inline(always)]
297 pub fn into_shared(self) -> SharedAlignedBuffer {
298 SharedAlignedBuffer { inner: self.inner }
299 }
300
301 #[inline(always)]
305 pub fn ensure_capacity(&mut self, capacity: u32) {
306 if capacity > self.capacity() {
307 self.inner.resize(capacity)
308 }
309 }
310
311 #[inline(always)]
316 pub fn copy_from_slice(&mut self, bytes: &[u8]) {
317 let Ok(len) = u32::try_from(bytes.len()) else {
318 panic!("Too many bytes {}", bytes.len());
319 };
320
321 if len > self.capacity() {
322 self.inner
323 .resize(len.max(self.capacity().saturating_mul(2)));
324 }
325
326 unsafe {
329 self.as_mut_ptr()
330 .copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
331
332 self.inner.set_len(len);
333 }
334 }
335
336 #[inline(always)]
340 #[must_use]
341 pub fn append(&mut self, bytes: &[u8]) -> bool {
342 let Ok(len) = u32::try_from(bytes.len()) else {
343 return false;
344 };
345
346 let Some(new_len) = self.len().checked_add(len) else {
347 return false;
348 };
349
350 if new_len > self.capacity() {
351 self.inner
352 .resize(new_len.max(self.capacity().saturating_mul(2)));
353 }
354
355 unsafe {
358 self.as_mut_ptr()
359 .add(self.len() as usize)
360 .copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
361
362 self.inner.set_len(new_len);
363 }
364
365 true
366 }
367
368 #[inline(always)]
369 pub const fn is_empty(&self) -> bool {
370 self.inner.len() == 0
371 }
372
373 #[inline(always)]
374 pub const fn len(&self) -> u32 {
375 self.inner.len()
376 }
377
378 #[inline(always)]
379 pub const fn capacity(&self) -> u32 {
380 self.inner.capacity()
381 }
382
383 #[inline(always)]
391 pub unsafe fn set_len(&mut self, new_len: u32) {
392 unsafe {
394 self.inner.set_len(new_len);
395 }
396 }
397}
398
399#[derive(Debug, Default, Clone)]
409pub struct SharedAlignedBuffer {
410 inner: InnerBuffer,
411}
412
413impl Deref for SharedAlignedBuffer {
414 type Target = [u8];
415
416 #[inline(always)]
417 fn deref(&self) -> &Self::Target {
418 self.as_slice()
419 }
420}
421
422unsafe impl StableDeref for SharedAlignedBuffer {}
424unsafe impl CloneStableDeref for SharedAlignedBuffer {}
426unsafe impl CloneableCart for SharedAlignedBuffer {}
428
429impl SharedAlignedBuffer {
430 #[inline(always)]
432 pub const fn empty_ref() -> &'static Self {
433 &EMPTY_SHARED_ALIGNED_BUFFER
434 }
435
436 #[inline(always)]
441 pub fn from_bytes(bytes: &[u8]) -> Self {
442 OwnedAlignedBuffer::from_bytes(bytes).into_shared()
443 }
444
445 #[inline(always)]
452 pub fn into_owned(self) -> OwnedAlignedBuffer {
453 if self.inner.strong_count_ref().load(Ordering::Acquire) == 1 {
454 OwnedAlignedBuffer { inner: self.inner }
455 } else {
456 OwnedAlignedBuffer::from_bytes(self.as_slice())
457 }
458 }
459
460 #[inline(always)]
461 pub const fn as_slice(&self) -> &[u8] {
462 self.inner.as_slice()
463 }
464
465 #[inline(always)]
466 pub const fn as_ptr(&self) -> *const u8 {
467 self.inner.as_ptr()
468 }
469
470 #[inline(always)]
471 pub const fn is_empty(&self) -> bool {
472 self.inner.len() == 0
473 }
474
475 #[inline(always)]
476 pub const fn len(&self) -> u32 {
477 self.inner.len()
478 }
479}