ab_contracts_slots/
aligned_buffer.rs1#[cfg(test)]
2mod tests;
3
4use ab_contracts_io_type::MAX_ALIGNMENT;
5use alloc::boxed::Box;
6use core::mem::MaybeUninit;
7use core::ptr::NonNull;
8use core::sync::atomic::{AtomicU32, Ordering};
9use core::{mem, slice};
10
11#[repr(C, align(16))]
12struct AlignedBytes([u8; MAX_ALIGNMENT as usize]);
13
14const _: () = {
15 assert!(
16 align_of::<AlignedBytes>() == size_of::<AlignedBytes>(),
17 "Size and alignment are both 16 bytes"
18 );
19};
20
21#[repr(C, align(16))]
22struct ConstInnerBuffer {
23 strong_count: AtomicU32,
24}
25
26const _: () = {
27 assert!(align_of::<ConstInnerBuffer>() == align_of::<AlignedBytes>());
28 assert!(size_of::<ConstInnerBuffer>() == size_of::<AlignedBytes>());
29};
30
31static mut CONST_INNER_BUFFER: &mut [ConstInnerBuffer] = &mut [ConstInnerBuffer {
32 strong_count: AtomicU32::new(1),
33}];
34
35struct ConstInnerBufferPtr(NonNull<[MaybeUninit<AlignedBytes>]>);
36unsafe impl Sync for ConstInnerBufferPtr {}
38
39static EMPTY_SHARED_ALIGNED_BUFFER_BUFFER: ConstInnerBufferPtr = unsafe {
42 mem::transmute::<NonNull<[ConstInnerBuffer]>, ConstInnerBufferPtr>(NonNull::from_mut(
43 CONST_INNER_BUFFER,
44 ))
45};
46static EMPTY_SHARED_ALIGNED_BUFFER: SharedAlignedBuffer = SharedAlignedBuffer {
47 inner: InnerBuffer {
48 buffer: EMPTY_SHARED_ALIGNED_BUFFER_BUFFER.0,
49 len: 0,
50 },
51};
52
53#[derive(Debug)]
54#[repr(C)]
55struct InnerBuffer {
56 buffer: NonNull<[MaybeUninit<AlignedBytes>]>,
58 len: u32,
59}
60
61unsafe impl Send for InnerBuffer {}
63unsafe impl Sync for InnerBuffer {}
65
66impl Default for InnerBuffer {
67 #[inline(always)]
68 fn default() -> Self {
69 let buffer = InnerBuffer {
70 buffer: EMPTY_SHARED_ALIGNED_BUFFER.inner.buffer,
71 len: 0,
72 };
73 buffer.strong_count_ref().fetch_add(1, Ordering::AcqRel);
74 buffer
75 }
76}
77
78impl Clone for InnerBuffer {
79 #[inline(always)]
80 fn clone(&self) -> Self {
81 self.strong_count_ref().fetch_add(1, Ordering::AcqRel);
82
83 Self {
84 buffer: self.buffer,
85 len: self.len,
86 }
87 }
88}
89
90impl Drop for InnerBuffer {
91 #[inline(always)]
92 fn drop(&mut self) {
93 if self.strong_count_ref().fetch_sub(1, Ordering::AcqRel) == 1 {
94 let _ = unsafe { Box::from_non_null(self.buffer) };
96 }
97 }
98}
99
100impl InnerBuffer {
101 #[inline(always)]
106 fn allocate(capacity: u32) -> Self {
107 let buffer = Box::into_non_null(Box::new_uninit_slice(
108 1 + (capacity as usize).div_ceil(size_of::<AlignedBytes>()),
109 ));
110 let mut instance = Self { buffer, len: 0 };
111 unsafe {
113 instance.len_write(0);
114 instance.strong_count_initialize();
115 }
116 instance
117 }
118
119 #[inline(always)]
120 fn len_read(&self) -> u32 {
121 self.len
122 }
123
124 #[inline(always)]
126 unsafe fn len_write(&mut self, len: u32) {
127 self.len = len;
128 }
129
130 #[inline(always)]
132 fn capacity(&self) -> u32 {
133 ((self.buffer.len() - 1) * size_of::<AlignedBytes>()) as u32
136 }
137
138 #[inline(always)]
139 fn strong_count_ref(&self) -> &AtomicU32 {
140 unsafe { self.buffer.as_ptr().cast::<AtomicU32>().as_ref_unchecked() }
143 }
144
145 #[inline(always)]
146 fn strong_count_initialize(&mut self) {
147 unsafe {
150 self.buffer
151 .as_ptr()
152 .cast::<AtomicU32>()
153 .write(AtomicU32::new(1))
154 };
155 }
156
157 #[inline(always)]
158 fn as_slice(&self) -> &[u8] {
159 let len = self.len_read() as usize;
160 unsafe { slice::from_raw_parts(self.as_ptr(), len) }
162 }
163
164 #[inline(always)]
165 fn as_mut_slice(&mut self) -> &mut [u8] {
166 let len = self.len_read() as usize;
167 unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), len) }
169 }
170
171 #[inline(always)]
172 fn as_ptr(&self) -> *const u8 {
173 let buffer_ptr = self.buffer.as_ptr().cast_const().cast::<u8>();
174 unsafe { buffer_ptr.add(size_of::<AlignedBytes>()) }
176 }
177
178 #[inline(always)]
179 fn as_mut_ptr(&mut self) -> *mut u8 {
180 let buffer_ptr = self.buffer.as_ptr().cast::<u8>();
181 unsafe { buffer_ptr.add(size_of::<AlignedBytes>()) }
183 }
184}
185
186#[derive(Debug)]
194pub struct OwnedAlignedBuffer {
195 inner: InnerBuffer,
196}
197
198impl Clone for OwnedAlignedBuffer {
199 #[inline(always)]
200 fn clone(&self) -> Self {
201 let mut new_instance = Self::with_capacity(self.capacity());
202 new_instance.copy_from_slice(self.as_slice());
203 new_instance
204 }
205}
206
207impl OwnedAlignedBuffer {
208 #[inline(always)]
212 pub fn with_capacity(capacity: u32) -> Self {
213 Self {
214 inner: InnerBuffer::allocate(capacity),
215 }
216 }
217
218 #[inline(always)]
223 pub fn from_bytes(bytes: &[u8]) -> Self {
224 let mut instance = Self::with_capacity(0);
225 instance.copy_from_slice(bytes);
226 instance
227 }
228
229 #[inline(always)]
230 pub fn as_slice(&self) -> &[u8] {
231 self.inner.as_slice()
232 }
233
234 #[inline(always)]
235 pub fn as_mut_slice(&mut self) -> &mut [u8] {
236 self.inner.as_mut_slice()
237 }
238
239 #[inline(always)]
240 pub fn as_ptr(&self) -> *const u8 {
241 self.inner.as_ptr()
242 }
243
244 #[inline(always)]
245 pub fn as_mut_ptr(&mut self) -> *mut u8 {
246 self.inner.as_mut_ptr()
247 }
248
249 #[inline(always)]
250 pub fn into_shared(self) -> SharedAlignedBuffer {
251 SharedAlignedBuffer { inner: self.inner }
252 }
253
254 #[inline(always)]
258 pub fn ensure_capacity(&mut self, capacity: u32) {
259 if capacity > self.capacity() {
261 let mut new_buffer = Self::with_capacity(capacity);
262 new_buffer.copy_from_slice(self.as_slice());
263 *self = new_buffer;
264 }
265 }
266
267 #[inline(always)]
272 pub fn copy_from_slice(&mut self, bytes: &[u8]) {
273 let Ok(len) = u32::try_from(bytes.len()) else {
274 panic!("Too many bytes");
275 };
276
277 if len > self.capacity() {
278 self.inner = InnerBuffer::allocate(len);
280 }
281
282 unsafe {
285 self.as_mut_ptr()
286 .copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
287
288 self.inner.len_write(len);
289 }
290 }
291
292 #[inline(always)]
293 pub fn is_empty(&self) -> bool {
294 self.inner.len_read() == 0
295 }
296
297 #[inline(always)]
298 pub fn len(&self) -> u32 {
299 self.inner.len_read()
300 }
301
302 #[inline(always)]
304 pub fn capacity(&self) -> u32 {
305 self.inner.capacity()
306 }
307
308 #[inline(always)]
316 pub unsafe fn set_len(&mut self, new_len: u32) {
317 assert!(
318 new_len <= self.capacity(),
319 "Too many bytes {} > {}",
320 new_len,
321 self.capacity()
322 );
323 unsafe {
325 self.inner.len_write(new_len);
326 }
327 }
328}
329
330#[derive(Debug, Default, Clone)]
340pub struct SharedAlignedBuffer {
341 inner: InnerBuffer,
342}
343
344unsafe impl Send for SharedAlignedBuffer {}
346unsafe impl Sync for SharedAlignedBuffer {}
348
349impl SharedAlignedBuffer {
350 #[inline(always)]
352 pub fn empty_ref() -> &'static Self {
353 &EMPTY_SHARED_ALIGNED_BUFFER
354 }
355
356 #[inline(always)]
361 pub fn from_bytes(bytes: &[u8]) -> Self {
362 OwnedAlignedBuffer::from_bytes(bytes).into_shared()
363 }
364
365 #[inline(always)]
372 pub fn into_owned(self) -> OwnedAlignedBuffer {
373 if self.inner.strong_count_ref().load(Ordering::Acquire) == 1 {
374 OwnedAlignedBuffer { inner: self.inner }
375 } else {
376 OwnedAlignedBuffer::from_bytes(self.as_slice())
377 }
378 }
379
380 #[inline(always)]
381 pub fn as_slice(&self) -> &[u8] {
382 self.inner.as_slice()
383 }
384
385 #[inline(always)]
386 pub fn as_ptr(&self) -> *const u8 {
387 self.inner.as_ptr()
388 }
389
390 #[inline(always)]
391 pub fn is_empty(&self) -> bool {
392 self.inner.len_read() == 0
393 }
394
395 #[inline(always)]
396 pub fn len(&self) -> u32 {
397 self.inner.len_read()
398 }
399}