ab_io_type/
variable_bytes.rs

1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9/// Container for storing variable number of bytes.
10///
11/// `RECOMMENDED_ALLOCATION` is what is being used when a host needs to allocate memory for call
12/// into guest, but guest may receive an allocation with more or less memory in practice depending
13/// on other circumstances, like when called from another contract with specific allocation
14/// specified.
15#[derive(Debug)]
16#[repr(C)]
17pub struct VariableBytes<const RECOMMENDED_ALLOCATION: u32 = 0> {
18    bytes: NonNull<u8>,
19    size: NonNull<u32>,
20    capacity: u32,
21}
22
23// SAFETY: Low-level (effectively internal) implementation that upholds safety requirements
24unsafe impl<const RECOMMENDED_ALLOCATION: u32> IoType for VariableBytes<RECOMMENDED_ALLOCATION> {
25    const METADATA: &[u8] = {
26        const fn metadata(recommended_allocation: u32) -> ([u8; MAX_METADATA_CAPACITY], usize) {
27            if recommended_allocation == 0 {
28                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes0 as u8]]);
29            } else if recommended_allocation == 512 {
30                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes512 as u8]]);
31            } else if recommended_allocation == 1024 {
32                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes1024 as u8]]);
33            } else if recommended_allocation == 2028 {
34                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes2028 as u8]]);
35            } else if recommended_allocation == 4096 {
36                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes4096 as u8]]);
37            } else if recommended_allocation == 8192 {
38                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes8192 as u8]]);
39            } else if recommended_allocation == 16384 {
40                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes16384 as u8]]);
41            } else if recommended_allocation == 32768 {
42                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes32768 as u8]]);
43            } else if recommended_allocation == 65536 {
44                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes65536 as u8]]);
45            } else if recommended_allocation == 131_072 {
46                return concat_metadata_sources(&[
47                    &[IoTypeMetadataKind::VariableBytes131072 as u8],
48                ]);
49            } else if recommended_allocation == 262_144 {
50                return concat_metadata_sources(&[
51                    &[IoTypeMetadataKind::VariableBytes262144 as u8],
52                ]);
53            } else if recommended_allocation == 524_288 {
54                return concat_metadata_sources(&[
55                    &[IoTypeMetadataKind::VariableBytes524288 as u8],
56                ]);
57            } else if recommended_allocation == 1_048_576 {
58                return concat_metadata_sources(&[&[
59                    IoTypeMetadataKind::VariableBytes1048576 as u8
60                ]]);
61            }
62
63            let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
64                (IoTypeMetadataKind::VariableBytes8b, 1)
65            } else if recommended_allocation < 2u32.pow(16) {
66                (IoTypeMetadataKind::VariableBytes16b, 2)
67            } else {
68                (IoTypeMetadataKind::VariableBytes32b, 4)
69            };
70
71            concat_metadata_sources(&[
72                &[io_type as u8],
73                recommended_allocation.to_le_bytes().split_at(size_bytes).0,
74            ])
75        }
76
77        // Strange syntax to allow Rust to extend the lifetime of metadata scratch automatically
78        metadata(RECOMMENDED_ALLOCATION)
79            .0
80            .split_at(metadata(RECOMMENDED_ALLOCATION).1)
81            .0
82    };
83
84    // TODO: Use `[u8; RECOMMENDED_ALLOCATION as usize]` once stabilized `generic_const_exprs`
85    //  allows us to do so
86    type PointerType = u8;
87
88    #[inline(always)]
89    fn size(&self) -> u32 {
90        self.size()
91    }
92
93    #[inline(always)]
94    unsafe fn size_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
95        DerefWrapper(self.size)
96    }
97
98    #[inline(always)]
99    unsafe fn size_mut_ptr(&mut self) -> impl DerefMut<Target = *mut u32> {
100        DerefWrapper(self.size.as_ptr())
101    }
102
103    #[inline(always)]
104    fn capacity(&self) -> u32 {
105        self.capacity
106    }
107
108    #[inline(always)]
109    unsafe fn capacity_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
110        DerefWrapper(NonNull::from_ref(&self.capacity))
111    }
112
113    #[inline(always)]
114    #[track_caller]
115    unsafe fn set_size(&mut self, size: u32) {
116        debug_assert!(
117            size <= self.capacity,
118            "`set_size` called with invalid input {size} for capacity {}",
119            self.capacity
120        );
121
122        // SAFETY: guaranteed to be initialized by constructors
123        unsafe {
124            self.size.write(size);
125        }
126    }
127
128    #[inline(always)]
129    #[track_caller]
130    unsafe fn from_ptr<'a>(
131        ptr: &'a NonNull<Self::PointerType>,
132        size: &'a u32,
133        capacity: u32,
134    ) -> impl Deref<Target = Self> + 'a {
135        debug_assert!(ptr.is_aligned(), "Misaligned pointer");
136        debug_assert!(
137            *size <= capacity,
138            "Size {size} must not exceed capacity {capacity}"
139        );
140
141        DerefWrapper(Self {
142            bytes: *ptr,
143            size: NonNull::from_ref(size),
144            capacity,
145        })
146    }
147
148    #[inline(always)]
149    #[track_caller]
150    unsafe fn from_mut_ptr<'a>(
151        ptr: &'a mut NonNull<Self::PointerType>,
152        size: &'a mut *mut u32,
153        capacity: u32,
154    ) -> impl DerefMut<Target = Self> + 'a {
155        debug_assert!(!size.is_null(), "`null` pointer for non-`TrivialType` size");
156        // SAFETY: Must be guaranteed by the caller + debug check above
157        let size = unsafe { NonNull::new_unchecked(*size) };
158        debug_assert!(ptr.is_aligned(), "Misaligned pointer");
159        {
160            // SAFETY: Must be guaranteed by the caller
161            let size = unsafe { size.read() };
162            debug_assert!(
163                size <= capacity,
164                "Size {size} must not exceed capacity {capacity}"
165            );
166        }
167
168        DerefWrapper(Self {
169            bytes: *ptr,
170            size,
171            capacity,
172        })
173    }
174
175    #[inline(always)]
176    unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
177        &self.bytes
178    }
179
180    #[inline(always)]
181    unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
182        &mut self.bytes
183    }
184}
185
186impl<const RECOMMENDED_ALLOCATION: u32> IoTypeOptional for VariableBytes<RECOMMENDED_ALLOCATION> {}
187
188impl<const RECOMMENDED_ALLOCATION: u32> VariableBytes<RECOMMENDED_ALLOCATION> {
189    /// Create a new shared instance from provided memory buffer.
190    ///
191    /// # Panics
192    /// Panics if `buffer.len() != size`
193    //
194    // `impl Deref` is used to tie lifetime of returned value to inputs, but still treat it as a
195    // shared reference for most practical purposes.
196    #[inline(always)]
197    #[track_caller]
198    pub const fn from_buffer<'a>(
199        buffer: &'a [<Self as IoType>::PointerType],
200        size: &'a u32,
201    ) -> impl Deref<Target = Self> + 'a {
202        debug_assert!(buffer.len() == *size as usize, "Invalid size");
203        // TODO: Use `debug_assert_eq` when it is available in const environment
204        // debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
205
206        DerefWrapper(Self {
207            bytes: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
208            size: NonNull::from_ref(size),
209            capacity: *size,
210        })
211    }
212
213    /// Create a new exclusive instance from provided memory buffer.
214    ///
215    /// # Panics
216    /// Panics if `buffer.len() != size`
217    //
218    // `impl DerefMut` is used to tie lifetime of returned value to inputs, but still treat it as an
219    // exclusive reference for most practical purposes.
220    #[inline(always)]
221    #[track_caller]
222    pub fn from_buffer_mut<'a>(
223        buffer: &'a mut [<Self as IoType>::PointerType],
224        size: &'a mut u32,
225    ) -> impl DerefMut<Target = Self> + 'a {
226        debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
227
228        DerefWrapper(Self {
229            bytes: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
230            size: NonNull::from_mut(size),
231            capacity: *size,
232        })
233    }
234
235    /// Create a new shared instance from provided memory buffer.
236    ///
237    /// # Panics
238    /// Panics if `size > CAPACITY`
239    //
240    // `impl Deref` is used to tie lifetime of returned value to inputs, but still treat it as a
241    // shared reference for most practical purposes.
242    #[inline(always)]
243    #[track_caller]
244    pub fn from_uninit<'a>(
245        uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>],
246        size: &'a mut u32,
247    ) -> impl DerefMut<Target = Self> + 'a {
248        let capacity = uninit.len();
249        debug_assert!(
250            *size as usize <= capacity,
251            "Size {size} must not exceed capacity {capacity}"
252        );
253        let capacity = capacity as u32;
254
255        DerefWrapper(Self {
256            bytes: NonNull::new(MaybeUninit::slice_as_mut_ptr(uninit)).expect("Not null; qed"),
257            size: NonNull::from_mut(size),
258            capacity,
259        })
260    }
261
262    // Size in bytes
263    #[inline(always)]
264    pub const fn size(&self) -> u32 {
265        // SAFETY: guaranteed to be initialized by constructors
266        unsafe { self.size.read() }
267    }
268
269    /// Capacity in bytes
270    #[inline(always)]
271    pub fn capacity(&self) -> u32 {
272        self.capacity
273    }
274
275    /// Try to get access to initialized bytes
276    #[inline(always)]
277    pub const fn get_initialized(&self) -> &[u8] {
278        let size = self.size();
279        let ptr = self.bytes.as_ptr();
280        // SAFETY: guaranteed by constructor and explicit methods by the user
281        unsafe { slice::from_raw_parts(ptr, size as usize) }
282    }
283
284    /// Try to get exclusive access to initialized `Data`, returns `None` if not initialized
285    #[inline(always)]
286    pub fn get_initialized_mut(&mut self) -> &mut [u8] {
287        let size = self.size();
288        let ptr = self.bytes.as_ptr();
289        // SAFETY: guaranteed by constructor and explicit methods by the user
290        unsafe { slice::from_raw_parts_mut(ptr, size as usize) }
291    }
292
293    /// Append some bytes by using more of allocated, but currently unused bytes.
294    ///
295    /// `true` is returned on success, but if there isn't enough unused bytes left, `false` is.
296    #[inline(always)]
297    #[must_use = "Operation may fail"]
298    pub fn append(&mut self, bytes: &[u8]) -> bool {
299        let size = self.size();
300        if bytes.len() + size as usize > self.capacity as usize {
301            return false;
302        }
303
304        // May overflow, which is not allowed
305        let Ok(offset) = isize::try_from(size) else {
306            return false;
307        };
308
309        // SAFETY: allocation range and offset are checked above, the allocation itself is
310        // guaranteed by constructors
311        let mut start = unsafe { self.bytes.offset(offset) };
312        // SAFETY: Alignment is the same, writing happens in properly allocated memory guaranteed by
313        // constructors, number of bytes is checked above, Rust ownership rules will prevent any
314        // overlap here (creating reference to non-initialized part of allocation would already be
315        // undefined behavior anyway)
316        unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), start.as_mut(), bytes.len()) }
317
318        true
319    }
320
321    /// Truncate internal initialized bytes to this size.
322    ///
323    /// Returns `true` on success or `false` if `new_size` is larger than [`Self::size()`].
324    #[inline(always)]
325    #[must_use = "Operation may fail"]
326    pub fn truncate(&mut self, new_size: u32) -> bool {
327        if new_size > self.size() {
328            return false;
329        }
330
331        // SAFETY: guaranteed to be initialized by constructors
332        unsafe {
333            self.size.write(new_size);
334        }
335
336        true
337    }
338
339    /// Copy contents from another `IoType`.
340    ///
341    /// Returns `false` if actual capacity of the instance is not enough to copy contents of `src`
342    #[inline(always)]
343    #[must_use = "Operation may fail"]
344    pub fn copy_from<T>(&mut self, src: &T) -> bool
345    where
346        T: IoType,
347    {
348        let src_size = src.size();
349        if src_size > self.capacity {
350            return false;
351        }
352
353        // SAFETY: `src` can't be the same as `&mut self` if invariants of constructor arguments
354        // were upheld, size is checked to be within capacity above
355        unsafe {
356            self.bytes
357                .copy_from_nonoverlapping(src.as_ptr().cast::<u8>(), src_size as usize);
358            self.size.write(src_size);
359        }
360
361        true
362    }
363
364    /// Get exclusive access to the underlying pointer with no checks.
365    ///
366    /// Can be used for initialization with [`Self::assume_init()`] called afterward to confirm how
367    /// many bytes are in use right now.
368    #[inline(always)]
369    pub fn as_mut_ptr(&mut self) -> &mut NonNull<u8> {
370        &mut self.bytes
371    }
372
373    /// Cast a shared reference to this instance into a reference to an instance of a different
374    /// recommended allocation
375    #[inline(always)]
376    pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
377        &self,
378    ) -> &VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
379        // SAFETY: `VariableBytes` has a fixed layout due to `#[repr(C)]`, which doesn't depend on
380        // recommended allocation
381        unsafe {
382            NonNull::from_ref(self)
383                .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
384                .as_ref()
385        }
386    }
387
388    /// Cast an exclusive reference to this instance into a reference to an instance of a different
389    /// recommended allocation
390    #[inline(always)]
391    pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
392        &mut self,
393    ) -> &mut VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
394        // SAFETY: `VariableBytes` has a fixed layout due to `#[repr(C)]`, which doesn't depend on
395        // recommended allocation
396        unsafe {
397            NonNull::from_mut(self)
398                .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
399                .as_mut()
400        }
401    }
402
403    /// Reads and returns value of type `T` or `None` if there is not enough data.
404    ///
405    /// Checks alignment internally to support both aligned and unaligned reads.
406    #[inline(always)]
407    pub fn read_trivial_type<T>(&self) -> Option<T>
408    where
409        T: TrivialType,
410    {
411        if self.size() < T::SIZE {
412            return None;
413        }
414
415        let ptr = self.bytes.cast::<T>();
416
417        // SAFETY: Trivial types are safe to read as bytes, pointer validity is a guaranteed
418        // internal invariant
419        let value = unsafe {
420            if ptr.is_aligned() {
421                ptr.read()
422            } else {
423                ptr.read_unaligned()
424            }
425        };
426
427        Some(value)
428    }
429
430    /// Assume that the first `size` are initialized and can be read.
431    ///
432    /// Returns `Some(initialized_bytes)` on success or `None` if `size` is larger than its
433    /// capacity.
434    ///
435    /// # Safety
436    /// Caller must ensure `size` is actually initialized
437    #[inline(always)]
438    #[must_use = "Operation may fail"]
439    pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [u8]> {
440        if size > self.capacity {
441            return None;
442        }
443
444        // SAFETY: guaranteed to be initialized by constructors
445        unsafe {
446            self.size.write(size);
447        }
448        Some(self.get_initialized_mut())
449    }
450}