ab_contracts_io_type/
variable_bytes.rs

1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9/// Container for storing variable number of bytes.
10///
11/// `RECOMMENDED_ALLOCATION` is what is being used when a host needs to allocate memory for call
12/// into guest, but guest may receive an allocation with more or less memory in practice depending
13/// on other circumstances, like when called from another contract with specific allocation
14/// specified.
15#[derive(Debug)]
16#[repr(C)]
17pub struct VariableBytes<const RECOMMENDED_ALLOCATION: u32 = 0> {
18    bytes: NonNull<u8>,
19    size: NonNull<u32>,
20    capacity: u32,
21}
22
23unsafe impl<const RECOMMENDED_ALLOCATION: u32> IoType for VariableBytes<RECOMMENDED_ALLOCATION> {
24    const METADATA: &[u8] = {
25        const fn metadata(recommended_allocation: u32) -> ([u8; MAX_METADATA_CAPACITY], usize) {
26            if recommended_allocation == 0 {
27                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes0 as u8]]);
28            } else if recommended_allocation == 512 {
29                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes512 as u8]]);
30            } else if recommended_allocation == 1024 {
31                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes1024 as u8]]);
32            } else if recommended_allocation == 2028 {
33                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes2028 as u8]]);
34            } else if recommended_allocation == 4096 {
35                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes4096 as u8]]);
36            } else if recommended_allocation == 8192 {
37                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes8192 as u8]]);
38            } else if recommended_allocation == 16384 {
39                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes16384 as u8]]);
40            } else if recommended_allocation == 32768 {
41                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes32768 as u8]]);
42            } else if recommended_allocation == 65536 {
43                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes65536 as u8]]);
44            } else if recommended_allocation == 131_072 {
45                return concat_metadata_sources(&[
46                    &[IoTypeMetadataKind::VariableBytes131072 as u8],
47                ]);
48            } else if recommended_allocation == 262_144 {
49                return concat_metadata_sources(&[
50                    &[IoTypeMetadataKind::VariableBytes262144 as u8],
51                ]);
52            } else if recommended_allocation == 524_288 {
53                return concat_metadata_sources(&[
54                    &[IoTypeMetadataKind::VariableBytes524288 as u8],
55                ]);
56            } else if recommended_allocation == 1_048_576 {
57                return concat_metadata_sources(&[&[
58                    IoTypeMetadataKind::VariableBytes1048576 as u8
59                ]]);
60            }
61
62            let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
63                (IoTypeMetadataKind::VariableBytes8b, 1)
64            } else if recommended_allocation < 2u32.pow(16) {
65                (IoTypeMetadataKind::VariableBytes16b, 2)
66            } else {
67                (IoTypeMetadataKind::VariableBytes32b, 4)
68            };
69
70            concat_metadata_sources(&[
71                &[io_type as u8],
72                recommended_allocation.to_le_bytes().split_at(size_bytes).0,
73            ])
74        }
75
76        // Strange syntax to allow Rust to extend the lifetime of metadata scratch automatically
77        metadata(RECOMMENDED_ALLOCATION)
78            .0
79            .split_at(metadata(RECOMMENDED_ALLOCATION).1)
80            .0
81    };
82
83    // TODO: Use `[u8; RECOMMENDED_ALLOCATION as usize]` once stabilized `generic_const_exprs`
84    //  allows us to do so
85    type PointerType = u8;
86
87    #[inline(always)]
88    fn size(&self) -> u32 {
89        self.size()
90    }
91
92    #[inline(always)]
93    unsafe fn size_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
94        DerefWrapper(self.size)
95    }
96
97    #[inline(always)]
98    unsafe fn size_mut_ptr(&mut self) -> impl DerefMut<Target = *mut u32> {
99        DerefWrapper(self.size.as_ptr())
100    }
101
102    #[inline(always)]
103    fn capacity(&self) -> u32 {
104        self.capacity
105    }
106
107    #[inline(always)]
108    unsafe fn capacity_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
109        DerefWrapper(NonNull::from_ref(&self.capacity))
110    }
111
112    #[inline(always)]
113    #[track_caller]
114    unsafe fn set_size(&mut self, size: u32) {
115        debug_assert!(
116            size <= self.capacity,
117            "`set_size` called with invalid input {size} for capacity {}",
118            self.capacity
119        );
120
121        // SAFETY: guaranteed to be initialized by constructors
122        unsafe {
123            self.size.write(size);
124        }
125    }
126
127    #[inline(always)]
128    #[track_caller]
129    unsafe fn from_ptr<'a>(
130        ptr: &'a NonNull<Self::PointerType>,
131        size: &'a u32,
132        capacity: u32,
133    ) -> impl Deref<Target = Self> + 'a {
134        debug_assert!(ptr.is_aligned(), "Misaligned pointer");
135        debug_assert!(
136            *size <= capacity,
137            "Size {size} must not exceed capacity {capacity}"
138        );
139
140        DerefWrapper(Self {
141            bytes: *ptr,
142            size: NonNull::from_ref(size),
143            capacity,
144        })
145    }
146
147    #[inline(always)]
148    #[track_caller]
149    unsafe fn from_mut_ptr<'a>(
150        ptr: &'a mut NonNull<Self::PointerType>,
151        size: &'a mut *mut u32,
152        capacity: u32,
153    ) -> impl DerefMut<Target = Self> + 'a {
154        debug_assert!(!size.is_null(), "`null` pointer for non-`TrivialType` size");
155        // SAFETY: Must be guaranteed by the caller + debug check above
156        let size = unsafe { NonNull::new_unchecked(*size) };
157        debug_assert!(ptr.is_aligned(), "Misaligned pointer");
158        {
159            let size = unsafe { size.read() };
160            debug_assert!(
161                size <= capacity,
162                "Size {size} must not exceed capacity {capacity}"
163            );
164        }
165
166        DerefWrapper(Self {
167            bytes: *ptr,
168            size,
169            capacity,
170        })
171    }
172
173    #[inline(always)]
174    unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
175        &self.bytes
176    }
177
178    #[inline(always)]
179    unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
180        &mut self.bytes
181    }
182}
183
184impl<const RECOMMENDED_ALLOCATION: u32> IoTypeOptional for VariableBytes<RECOMMENDED_ALLOCATION> {}
185
186impl<const RECOMMENDED_ALLOCATION: u32> VariableBytes<RECOMMENDED_ALLOCATION> {
187    /// Create a new shared instance from provided memory buffer.
188    ///
189    /// # Panics
190    /// Panics if `buffer.len() != size`
191    //
192    // `impl Deref` is used to tie lifetime of returned value to inputs, but still treat it as a
193    // shared reference for most practical purposes.
194    #[track_caller]
195    pub const fn from_buffer<'a>(
196        buffer: &'a [<Self as IoType>::PointerType],
197        size: &'a u32,
198    ) -> impl Deref<Target = Self> + 'a {
199        debug_assert!(buffer.len() == *size as usize, "Invalid size");
200        // TODO: Use `debug_assert_eq` when it is available in const environment
201        // debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
202
203        DerefWrapper(Self {
204            bytes: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
205            size: NonNull::from_ref(size),
206            capacity: *size,
207        })
208    }
209
210    /// Create a new exclusive instance from provided memory buffer.
211    ///
212    /// # Panics
213    /// Panics if `buffer.len() != size`
214    //
215    // `impl DerefMut` is used to tie lifetime of returned value to inputs, but still treat it as an
216    // exclusive reference for most practical purposes.
217    #[track_caller]
218    pub fn from_buffer_mut<'a>(
219        buffer: &'a mut [<Self as IoType>::PointerType],
220        size: &'a mut u32,
221    ) -> impl DerefMut<Target = Self> + 'a {
222        debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
223
224        DerefWrapper(Self {
225            bytes: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
226            size: NonNull::from_mut(size),
227            capacity: *size,
228        })
229    }
230
231    /// Create a new shared instance from provided memory buffer.
232    ///
233    /// # Panics
234    /// Panics if `size > CAPACITY`
235    //
236    // `impl Deref` is used to tie lifetime of returned value to inputs, but still treat it as a
237    // shared reference for most practical purposes.
238    // TODO: Change `usize` to `u32` once stabilized `generic_const_exprs` feature allows us to do
239    //  `CAPACITY as usize`
240    #[track_caller]
241    pub fn from_uninit<'a, const CAPACITY: usize>(
242        uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>; CAPACITY],
243        size: &'a mut u32,
244    ) -> impl DerefMut<Target = Self> + 'a {
245        debug_assert!(
246            *size as usize <= CAPACITY,
247            "Size {size} must not exceed capacity {CAPACITY}"
248        );
249        let capacity = CAPACITY as u32;
250
251        DerefWrapper(Self {
252            bytes: NonNull::new(MaybeUninit::slice_as_mut_ptr(uninit)).expect("Not null; qed"),
253            size: NonNull::from_mut(size),
254            capacity,
255        })
256    }
257
258    // Size in bytes
259    #[inline(always)]
260    pub const fn size(&self) -> u32 {
261        // SAFETY: guaranteed to be initialized by constructors
262        unsafe { self.size.read() }
263    }
264
265    /// Capacity in bytes
266    #[inline(always)]
267    pub fn capacity(&self) -> u32 {
268        self.capacity
269    }
270
271    /// Try to get access to initialized bytes
272    #[inline(always)]
273    pub const fn get_initialized(&self) -> &[u8] {
274        let size = self.size();
275        let ptr = self.bytes.as_ptr();
276        // SAFETY: guaranteed by constructor and explicit methods by the user
277        unsafe { slice::from_raw_parts(ptr, size as usize) }
278    }
279
280    /// Try to get exclusive access to initialized `Data`, returns `None` if not initialized
281    #[inline(always)]
282    pub fn get_initialized_mut(&mut self) -> &mut [u8] {
283        let size = self.size();
284        let ptr = self.bytes.as_ptr();
285        // SAFETY: guaranteed by constructor and explicit methods by the user
286        unsafe { slice::from_raw_parts_mut(ptr, size as usize) }
287    }
288
289    /// Append some bytes by using more of allocated, but currently unused bytes.
290    ///
291    /// `true` is returned on success, but if there isn't enough unused bytes left, `false` is.
292    #[inline(always)]
293    #[must_use = "Operation may fail"]
294    pub fn append(&mut self, bytes: &[u8]) -> bool {
295        let size = self.size();
296        if bytes.len() + size as usize > self.capacity as usize {
297            return false;
298        }
299
300        // May overflow, which is not allowed
301        let Ok(offset) = isize::try_from(size) else {
302            return false;
303        };
304
305        // SAFETY: allocation range and offset are checked above, the allocation itself is
306        // guaranteed by constructors
307        let mut start = unsafe { self.bytes.offset(offset) };
308        // SAFETY: Alignment is the same, writing happens in properly allocated memory guaranteed by
309        // constructors, number of bytes is checked above, Rust ownership rules will prevent any
310        // overlap here (creating reference to non-initialized part of allocation would already be
311        // undefined behavior anyway)
312        unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), start.as_mut(), bytes.len()) }
313
314        true
315    }
316
317    /// Truncate internal initialized bytes to this size.
318    ///
319    /// Returns `true` on success or `false` if `new_size` is larger than [`Self::size()`].
320    #[inline(always)]
321    #[must_use = "Operation may fail"]
322    pub fn truncate(&mut self, new_size: u32) -> bool {
323        if new_size > self.size() {
324            return false;
325        }
326
327        // SAFETY: guaranteed to be initialized by constructors
328        unsafe {
329            self.size.write(new_size);
330        }
331
332        true
333    }
334
335    /// Copy contents from another `IoType`.
336    ///
337    /// Returns `false` if actual capacity of the instance is not enough to copy contents of `src`
338    #[inline(always)]
339    #[must_use = "Operation may fail"]
340    pub fn copy_from<T>(&mut self, src: &T) -> bool
341    where
342        T: IoType,
343    {
344        let src_size = src.size();
345        if src_size > self.capacity {
346            return false;
347        }
348
349        // SAFETY: `src` can't be the same as `&mut self` if invariants of constructor arguments
350        // were upheld, size is checked to be within capacity above
351        unsafe {
352            self.bytes
353                .copy_from_nonoverlapping(src.as_ptr().cast::<u8>(), src_size as usize);
354            self.size.write(src_size);
355        }
356
357        true
358    }
359
360    /// Get exclusive access to the underlying pointer with no checks.
361    ///
362    /// Can be used for initialization with [`Self::assume_init()`] called afterward to confirm how
363    /// many bytes are in use right now.
364    #[inline(always)]
365    pub fn as_mut_ptr(&mut self) -> &mut NonNull<u8> {
366        &mut self.bytes
367    }
368
369    /// Cast a shared reference to this instance into a reference to an instance of a different
370    /// recommended allocation
371    #[inline(always)]
372    pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
373        &self,
374    ) -> &VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
375        // SAFETY: `VariableBytes` has a fixed layout due to `#[repr(C)]`, which doesn't depend on
376        // recommended allocation
377        unsafe {
378            NonNull::from_ref(self)
379                .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
380                .as_ref()
381        }
382    }
383
384    /// Cast an exclusive reference to this instance into a reference to an instance of a different
385    /// recommended allocation
386    #[inline(always)]
387    pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
388        &mut self,
389    ) -> &mut VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
390        // SAFETY: `VariableBytes` has a fixed layout due to `#[repr(C)]`, which doesn't depend on
391        // recommended allocation
392        unsafe {
393            NonNull::from_mut(self)
394                .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
395                .as_mut()
396        }
397    }
398
399    /// Reads and returns value of type `T` or `None` if there is not enough data.
400    ///
401    /// Checks alignment internally to support both aligned and unaligned reads.
402    #[inline(always)]
403    pub fn read_trivial_type<T>(&self) -> Option<T>
404    where
405        T: TrivialType,
406    {
407        if self.size() < T::SIZE {
408            return None;
409        }
410
411        let ptr = self.bytes.cast::<T>();
412
413        let value = unsafe {
414            if ptr.is_aligned() {
415                ptr.read()
416            } else {
417                ptr.read_unaligned()
418            }
419        };
420
421        Some(value)
422    }
423
424    /// Assume that the first `size` are initialized and can be read.
425    ///
426    /// Returns `Some(initialized_bytes)` on success or `None` if `size` is larger than its
427    /// capacity.
428    ///
429    /// # Safety
430    /// Caller must ensure `size` is actually initialized
431    #[inline(always)]
432    #[must_use = "Operation may fail"]
433    pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [u8]> {
434        if size > self.capacity {
435            return None;
436        }
437
438        // SAFETY: guaranteed to be initialized by constructors
439        unsafe {
440            self.size.write(size);
441        }
442        Some(self.get_initialized_mut())
443    }
444}