1#![feature(
2 box_vec_non_null,
3 non_null_from_ref,
4 pointer_is_aligned_to,
5 ptr_as_ref_unchecked
6)]
7#![no_std]
8
9#[cfg(test)]
10mod tests;
11
12extern crate alloc;
13
14use ab_contracts_io_type::MAX_ALIGNMENT;
15use alloc::boxed::Box;
16use core::mem::MaybeUninit;
17use core::ptr::NonNull;
18use core::slice;
19use core::sync::atomic::{AtomicU32, Ordering};
20
21const _: () = {
22 assert!(
23 align_of::<u128>() == size_of::<u128>(),
24 "Size and alignment are both 16 bytes"
25 );
26 assert!(
27 align_of::<u128>() == MAX_ALIGNMENT as usize,
28 "Alignment of u128 is max alignment"
29 );
30 assert!(size_of::<u128>() >= size_of::<AtomicU32>());
31 assert!(align_of::<u128>() >= align_of::<AtomicU32>());
32};
33
34#[repr(C, align(16))]
35struct ConstInnerBuffer {
36 strong_count: AtomicU32,
37}
38
39const _: () = {
40 assert!(align_of::<ConstInnerBuffer>() == align_of::<u128>());
41 assert!(size_of::<ConstInnerBuffer>() == size_of::<u128>());
42};
43
44static EMPTY_SHARED_ALIGNED_BUFFER: SharedAlignedBuffer = SharedAlignedBuffer {
45 inner: InnerBuffer {
46 buffer: NonNull::from_ref({
47 static BUFFER: MaybeUninit<ConstInnerBuffer> = MaybeUninit::new(ConstInnerBuffer {
48 strong_count: AtomicU32::new(1),
49 });
50
51 &BUFFER
52 })
53 .cast::<MaybeUninit<u128>>(),
54 capacity: 0,
55 len: 0,
56 },
57};
58
59#[derive(Debug)]
60struct InnerBuffer {
61 buffer: NonNull<MaybeUninit<u128>>,
63 capacity: u32,
64 len: u32,
65}
66
67unsafe impl Send for InnerBuffer {}
69unsafe impl Sync for InnerBuffer {}
71
72impl Default for InnerBuffer {
73 #[inline(always)]
74 fn default() -> Self {
75 EMPTY_SHARED_ALIGNED_BUFFER.inner.clone()
76 }
77}
78
79impl Clone for InnerBuffer {
80 #[inline(always)]
81 fn clone(&self) -> Self {
82 self.strong_count_ref().fetch_add(1, Ordering::AcqRel);
83
84 Self {
85 buffer: self.buffer,
86 capacity: self.capacity,
87 len: self.len,
88 }
89 }
90}
91
92impl Drop for InnerBuffer {
93 #[inline(always)]
94 fn drop(&mut self) {
95 if self.strong_count_ref().fetch_sub(1, Ordering::AcqRel) == 1 {
96 let _ = unsafe {
98 Box::from_non_null(NonNull::slice_from_raw_parts(
99 self.buffer,
100 1 + (self.capacity as usize).div_ceil(size_of::<u128>()),
101 ))
102 };
103 }
104 }
105}
106
107impl InnerBuffer {
108 #[inline(always)]
113 fn allocate(capacity: u32) -> Self {
114 let buffer = Box::into_non_null(Box::<[u128]>::new_uninit_slice(
115 1 + (capacity as usize).div_ceil(size_of::<u128>()),
116 ));
117 unsafe { buffer.cast::<AtomicU32>().write(AtomicU32::new(1)) };
120 Self {
121 buffer: buffer.cast::<MaybeUninit<u128>>(),
122 capacity,
123 len: 0,
124 }
125 }
126
127 #[inline(always)]
128 fn len(&self) -> u32 {
129 self.len
130 }
131
132 #[inline(always)]
134 unsafe fn set_len(&mut self, len: u32) {
135 self.len = len;
136 }
137
138 #[inline(always)]
139 fn capacity(&self) -> u32 {
140 self.capacity
141 }
142
143 #[inline(always)]
144 fn strong_count_ref(&self) -> &AtomicU32 {
145 unsafe { self.buffer.as_ptr().cast::<AtomicU32>().as_ref_unchecked() }
148 }
149
150 #[inline(always)]
151 fn as_slice(&self) -> &[u8] {
152 let len = self.len() as usize;
153 unsafe { slice::from_raw_parts(self.as_ptr(), len) }
155 }
156
157 #[inline(always)]
158 fn as_mut_slice(&mut self) -> &mut [u8] {
159 let len = self.len() as usize;
160 unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), len) }
162 }
163
164 #[inline(always)]
165 fn as_ptr(&self) -> *const u8 {
166 unsafe { self.buffer.as_ptr().cast_const().add(1).cast::<u8>() }
168 }
169
170 #[inline(always)]
171 fn as_mut_ptr(&mut self) -> *mut u8 {
172 unsafe { self.buffer.as_ptr().add(1).cast::<u8>() }
174 }
175}
176
177#[derive(Debug)]
185pub struct OwnedAlignedBuffer {
186 inner: InnerBuffer,
187}
188
189impl Clone for OwnedAlignedBuffer {
190 #[inline(always)]
191 fn clone(&self) -> Self {
192 let mut new_instance = Self::with_capacity(self.capacity());
193 new_instance.copy_from_slice(self.as_slice());
194 new_instance
195 }
196}
197
198impl OwnedAlignedBuffer {
199 #[inline(always)]
203 pub fn with_capacity(capacity: u32) -> Self {
204 Self {
205 inner: InnerBuffer::allocate(capacity),
206 }
207 }
208
209 #[inline(always)]
214 pub fn from_bytes(bytes: &[u8]) -> Self {
215 let mut instance = Self::with_capacity(0);
216 instance.copy_from_slice(bytes);
217 instance
218 }
219
220 #[inline(always)]
221 pub fn as_slice(&self) -> &[u8] {
222 self.inner.as_slice()
223 }
224
225 #[inline(always)]
226 pub fn as_mut_slice(&mut self) -> &mut [u8] {
227 self.inner.as_mut_slice()
228 }
229
230 #[inline(always)]
231 pub fn as_ptr(&self) -> *const u8 {
232 self.inner.as_ptr()
233 }
234
235 #[inline(always)]
236 pub fn as_mut_ptr(&mut self) -> *mut u8 {
237 self.inner.as_mut_ptr()
238 }
239
240 #[inline(always)]
241 pub fn into_shared(self) -> SharedAlignedBuffer {
242 SharedAlignedBuffer { inner: self.inner }
243 }
244
245 #[inline(always)]
249 pub fn ensure_capacity(&mut self, capacity: u32) {
250 if capacity > self.capacity() {
252 let mut new_buffer = Self::with_capacity(capacity);
253 new_buffer.copy_from_slice(self.as_slice());
254 *self = new_buffer;
255 }
256 }
257
258 #[inline(always)]
263 pub fn copy_from_slice(&mut self, bytes: &[u8]) {
264 let Ok(len) = u32::try_from(bytes.len()) else {
265 panic!("Too many bytes {}", bytes.len());
266 };
267
268 if len > self.capacity() {
270 self.inner = InnerBuffer::allocate(len);
272 }
273
274 unsafe {
277 self.as_mut_ptr()
278 .copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
279
280 self.inner.set_len(len);
281 }
282 }
283
284 #[inline(always)]
288 #[must_use]
289 pub fn append(&mut self, bytes: &[u8]) -> bool {
290 let Ok(len) = u32::try_from(bytes.len()) else {
291 return false;
292 };
293
294 let Some(new_len) = self.len().checked_add(len) else {
295 return false;
296 };
297
298 if new_len > self.capacity() {
300 self.inner = InnerBuffer::allocate(new_len.max(self.capacity() * 2));
302 }
303
304 unsafe {
307 self.as_mut_ptr()
308 .add(self.len() as usize)
309 .copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
310
311 self.inner.set_len(new_len);
312 }
313
314 true
315 }
316
317 #[inline(always)]
318 pub fn is_empty(&self) -> bool {
319 self.inner.len() == 0
320 }
321
322 #[inline(always)]
323 pub fn len(&self) -> u32 {
324 self.inner.len()
325 }
326
327 #[inline(always)]
328 pub fn capacity(&self) -> u32 {
329 self.inner.capacity()
330 }
331
332 #[inline(always)]
340 pub unsafe fn set_len(&mut self, new_len: u32) {
341 assert!(
342 new_len <= self.capacity(),
343 "Too many bytes {} > {}",
344 new_len,
345 self.capacity()
346 );
347 unsafe {
349 self.inner.set_len(new_len);
350 }
351 }
352}
353
354#[derive(Debug, Default, Clone)]
364pub struct SharedAlignedBuffer {
365 inner: InnerBuffer,
366}
367
368unsafe impl Send for SharedAlignedBuffer {}
370unsafe impl Sync for SharedAlignedBuffer {}
372
373impl SharedAlignedBuffer {
374 #[inline(always)]
376 pub fn empty_ref() -> &'static Self {
377 &EMPTY_SHARED_ALIGNED_BUFFER
378 }
379
380 #[inline(always)]
385 pub fn from_bytes(bytes: &[u8]) -> Self {
386 OwnedAlignedBuffer::from_bytes(bytes).into_shared()
387 }
388
389 #[inline(always)]
396 pub fn into_owned(self) -> OwnedAlignedBuffer {
397 if self.inner.strong_count_ref().load(Ordering::Acquire) == 1 {
398 OwnedAlignedBuffer { inner: self.inner }
399 } else {
400 OwnedAlignedBuffer::from_bytes(self.as_slice())
401 }
402 }
403
404 #[inline(always)]
405 pub fn as_slice(&self) -> &[u8] {
406 self.inner.as_slice()
407 }
408
409 #[inline(always)]
410 pub fn as_ptr(&self) -> *const u8 {
411 self.inner.as_ptr()
412 }
413
414 #[inline(always)]
415 pub fn is_empty(&self) -> bool {
416 self.inner.len() == 0
417 }
418
419 #[inline(always)]
420 pub fn len(&self) -> u32 {
421 self.inner.len()
422 }
423}