ab_contracts_io_type/
variable_bytes.rs

1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9/// Container for storing variable number of bytes.
10///
11/// `RECOMMENDED_ALLOCATION` is what is being used when a host needs to allocate memory for call
12/// into guest, but guest may receive an allocation with more or less memory in practice depending
13/// on other circumstances, like when called from another contract with specific allocation
14/// specified.
15#[derive(Debug)]
16#[repr(C)]
17pub struct VariableBytes<const RECOMMENDED_ALLOCATION: u32 = 0> {
18    bytes: NonNull<u8>,
19    size: NonNull<u32>,
20    capacity: u32,
21}
22
23unsafe impl<const RECOMMENDED_ALLOCATION: u32> IoType for VariableBytes<RECOMMENDED_ALLOCATION> {
24    const METADATA: &[u8] = {
25        const fn metadata(recommended_allocation: u32) -> ([u8; MAX_METADATA_CAPACITY], usize) {
26            if recommended_allocation == 0 {
27                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes0 as u8]]);
28            } else if recommended_allocation == 512 {
29                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes512 as u8]]);
30            } else if recommended_allocation == 1024 {
31                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes1024 as u8]]);
32            } else if recommended_allocation == 2028 {
33                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes2028 as u8]]);
34            } else if recommended_allocation == 4096 {
35                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes4096 as u8]]);
36            } else if recommended_allocation == 8192 {
37                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes8192 as u8]]);
38            } else if recommended_allocation == 16384 {
39                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes16384 as u8]]);
40            } else if recommended_allocation == 32768 {
41                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes32768 as u8]]);
42            } else if recommended_allocation == 65536 {
43                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes65536 as u8]]);
44            } else if recommended_allocation == 131_072 {
45                return concat_metadata_sources(&[
46                    &[IoTypeMetadataKind::VariableBytes131072 as u8],
47                ]);
48            } else if recommended_allocation == 262_144 {
49                return concat_metadata_sources(&[
50                    &[IoTypeMetadataKind::VariableBytes262144 as u8],
51                ]);
52            } else if recommended_allocation == 524_288 {
53                return concat_metadata_sources(&[
54                    &[IoTypeMetadataKind::VariableBytes524288 as u8],
55                ]);
56            } else if recommended_allocation == 1_048_576 {
57                return concat_metadata_sources(&[&[
58                    IoTypeMetadataKind::VariableBytes1048576 as u8
59                ]]);
60            }
61
62            let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
63                (IoTypeMetadataKind::VariableBytes8b, 1)
64            } else if recommended_allocation < 2u32.pow(16) {
65                (IoTypeMetadataKind::VariableBytes16b, 2)
66            } else {
67                (IoTypeMetadataKind::VariableBytes32b, 4)
68            };
69
70            concat_metadata_sources(&[
71                &[io_type as u8],
72                recommended_allocation.to_le_bytes().split_at(size_bytes).0,
73            ])
74        }
75
76        // Strange syntax to allow Rust to extend the lifetime of metadata scratch automatically
77        metadata(RECOMMENDED_ALLOCATION)
78            .0
79            .split_at(metadata(RECOMMENDED_ALLOCATION).1)
80            .0
81    };
82
83    // TODO: Use `[u8; RECOMMENDED_ALLOCATION as usize]` once stabilized `generic_const_exprs`
84    //  allows us to do so
85    type PointerType = u8;
86
87    #[inline(always)]
88    fn size(&self) -> u32 {
89        self.size()
90    }
91
92    #[inline(always)]
93    unsafe fn size_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
94        DerefWrapper(self.size)
95    }
96
97    #[inline(always)]
98    unsafe fn size_mut_ptr(&mut self) -> impl DerefMut<Target = *mut u32> {
99        DerefWrapper(self.size.as_ptr())
100    }
101
102    #[inline(always)]
103    fn capacity(&self) -> u32 {
104        self.capacity
105    }
106
107    #[inline(always)]
108    unsafe fn capacity_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
109        DerefWrapper(NonNull::from_ref(&self.capacity))
110    }
111
112    #[inline(always)]
113    #[track_caller]
114    unsafe fn set_size(&mut self, size: u32) {
115        debug_assert!(
116            size <= self.capacity,
117            "`set_size` called with invalid input {size} for capacity {}",
118            self.capacity
119        );
120
121        // SAFETY: guaranteed to be initialized by constructors
122        unsafe {
123            self.size.write(size);
124        }
125    }
126
127    #[inline(always)]
128    #[track_caller]
129    unsafe fn from_ptr<'a>(
130        ptr: &'a NonNull<Self::PointerType>,
131        size: &'a u32,
132        capacity: u32,
133    ) -> impl Deref<Target = Self> + 'a {
134        debug_assert!(ptr.is_aligned(), "Misaligned pointer");
135        debug_assert!(
136            *size <= capacity,
137            "Size {size} must not exceed capacity {capacity}"
138        );
139
140        DerefWrapper(Self {
141            bytes: *ptr,
142            size: NonNull::from_ref(size),
143            capacity,
144        })
145    }
146
147    #[inline(always)]
148    #[track_caller]
149    unsafe fn from_mut_ptr<'a>(
150        ptr: &'a mut NonNull<Self::PointerType>,
151        size: &'a mut *mut u32,
152        capacity: u32,
153    ) -> impl DerefMut<Target = Self> + 'a {
154        debug_assert!(!size.is_null(), "`null` pointer for non-`TrivialType` size");
155        // SAFETY: Must be guaranteed by the caller + debug check above
156        let size = unsafe { NonNull::new_unchecked(*size) };
157        debug_assert!(ptr.is_aligned(), "Misaligned pointer");
158        {
159            let size = unsafe { size.read() };
160            debug_assert!(
161                size <= capacity,
162                "Size {size} must not exceed capacity {capacity}"
163            );
164        }
165
166        DerefWrapper(Self {
167            bytes: *ptr,
168            size,
169            capacity,
170        })
171    }
172
173    #[inline(always)]
174    unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
175        &self.bytes
176    }
177
178    #[inline(always)]
179    unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
180        &mut self.bytes
181    }
182}
183
184impl<const RECOMMENDED_ALLOCATION: u32> IoTypeOptional for VariableBytes<RECOMMENDED_ALLOCATION> {}
185
186impl<const RECOMMENDED_ALLOCATION: u32> VariableBytes<RECOMMENDED_ALLOCATION> {
187    /// Create a new shared instance from provided memory buffer.
188    ///
189    /// # Panics
190    /// Panics if `buffer.len() != size`
191    //
192    // `impl Deref` is used to tie lifetime of returned value to inputs, but still treat it as a
193    // shared reference for most practical purposes.
194    #[inline(always)]
195    #[track_caller]
196    pub const fn from_buffer<'a>(
197        buffer: &'a [<Self as IoType>::PointerType],
198        size: &'a u32,
199    ) -> impl Deref<Target = Self> + 'a {
200        debug_assert!(buffer.len() == *size as usize, "Invalid size");
201        // TODO: Use `debug_assert_eq` when it is available in const environment
202        // debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
203
204        DerefWrapper(Self {
205            bytes: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
206            size: NonNull::from_ref(size),
207            capacity: *size,
208        })
209    }
210
211    /// Create a new exclusive instance from provided memory buffer.
212    ///
213    /// # Panics
214    /// Panics if `buffer.len() != size`
215    //
216    // `impl DerefMut` is used to tie lifetime of returned value to inputs, but still treat it as an
217    // exclusive reference for most practical purposes.
218    #[inline(always)]
219    #[track_caller]
220    pub fn from_buffer_mut<'a>(
221        buffer: &'a mut [<Self as IoType>::PointerType],
222        size: &'a mut u32,
223    ) -> impl DerefMut<Target = Self> + 'a {
224        debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
225
226        DerefWrapper(Self {
227            bytes: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
228            size: NonNull::from_mut(size),
229            capacity: *size,
230        })
231    }
232
233    /// Create a new shared instance from provided memory buffer.
234    ///
235    /// # Panics
236    /// Panics if `size > CAPACITY`
237    //
238    // `impl Deref` is used to tie lifetime of returned value to inputs, but still treat it as a
239    // shared reference for most practical purposes.
240    #[inline(always)]
241    #[track_caller]
242    pub fn from_uninit<'a>(
243        uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>],
244        size: &'a mut u32,
245    ) -> impl DerefMut<Target = Self> + 'a {
246        let capacity = uninit.len();
247        debug_assert!(
248            *size as usize <= capacity,
249            "Size {size} must not exceed capacity {capacity}"
250        );
251        let capacity = capacity as u32;
252
253        DerefWrapper(Self {
254            bytes: NonNull::new(MaybeUninit::slice_as_mut_ptr(uninit)).expect("Not null; qed"),
255            size: NonNull::from_mut(size),
256            capacity,
257        })
258    }
259
260    // Size in bytes
261    #[inline(always)]
262    pub const fn size(&self) -> u32 {
263        // SAFETY: guaranteed to be initialized by constructors
264        unsafe { self.size.read() }
265    }
266
267    /// Capacity in bytes
268    #[inline(always)]
269    pub fn capacity(&self) -> u32 {
270        self.capacity
271    }
272
273    /// Try to get access to initialized bytes
274    #[inline(always)]
275    pub const fn get_initialized(&self) -> &[u8] {
276        let size = self.size();
277        let ptr = self.bytes.as_ptr();
278        // SAFETY: guaranteed by constructor and explicit methods by the user
279        unsafe { slice::from_raw_parts(ptr, size as usize) }
280    }
281
282    /// Try to get exclusive access to initialized `Data`, returns `None` if not initialized
283    #[inline(always)]
284    pub fn get_initialized_mut(&mut self) -> &mut [u8] {
285        let size = self.size();
286        let ptr = self.bytes.as_ptr();
287        // SAFETY: guaranteed by constructor and explicit methods by the user
288        unsafe { slice::from_raw_parts_mut(ptr, size as usize) }
289    }
290
291    /// Append some bytes by using more of allocated, but currently unused bytes.
292    ///
293    /// `true` is returned on success, but if there isn't enough unused bytes left, `false` is.
294    #[inline(always)]
295    #[must_use = "Operation may fail"]
296    pub fn append(&mut self, bytes: &[u8]) -> bool {
297        let size = self.size();
298        if bytes.len() + size as usize > self.capacity as usize {
299            return false;
300        }
301
302        // May overflow, which is not allowed
303        let Ok(offset) = isize::try_from(size) else {
304            return false;
305        };
306
307        // SAFETY: allocation range and offset are checked above, the allocation itself is
308        // guaranteed by constructors
309        let mut start = unsafe { self.bytes.offset(offset) };
310        // SAFETY: Alignment is the same, writing happens in properly allocated memory guaranteed by
311        // constructors, number of bytes is checked above, Rust ownership rules will prevent any
312        // overlap here (creating reference to non-initialized part of allocation would already be
313        // undefined behavior anyway)
314        unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), start.as_mut(), bytes.len()) }
315
316        true
317    }
318
319    /// Truncate internal initialized bytes to this size.
320    ///
321    /// Returns `true` on success or `false` if `new_size` is larger than [`Self::size()`].
322    #[inline(always)]
323    #[must_use = "Operation may fail"]
324    pub fn truncate(&mut self, new_size: u32) -> bool {
325        if new_size > self.size() {
326            return false;
327        }
328
329        // SAFETY: guaranteed to be initialized by constructors
330        unsafe {
331            self.size.write(new_size);
332        }
333
334        true
335    }
336
337    /// Copy contents from another `IoType`.
338    ///
339    /// Returns `false` if actual capacity of the instance is not enough to copy contents of `src`
340    #[inline(always)]
341    #[must_use = "Operation may fail"]
342    pub fn copy_from<T>(&mut self, src: &T) -> bool
343    where
344        T: IoType,
345    {
346        let src_size = src.size();
347        if src_size > self.capacity {
348            return false;
349        }
350
351        // SAFETY: `src` can't be the same as `&mut self` if invariants of constructor arguments
352        // were upheld, size is checked to be within capacity above
353        unsafe {
354            self.bytes
355                .copy_from_nonoverlapping(src.as_ptr().cast::<u8>(), src_size as usize);
356            self.size.write(src_size);
357        }
358
359        true
360    }
361
362    /// Get exclusive access to the underlying pointer with no checks.
363    ///
364    /// Can be used for initialization with [`Self::assume_init()`] called afterward to confirm how
365    /// many bytes are in use right now.
366    #[inline(always)]
367    pub fn as_mut_ptr(&mut self) -> &mut NonNull<u8> {
368        &mut self.bytes
369    }
370
371    /// Cast a shared reference to this instance into a reference to an instance of a different
372    /// recommended allocation
373    #[inline(always)]
374    pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
375        &self,
376    ) -> &VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
377        // SAFETY: `VariableBytes` has a fixed layout due to `#[repr(C)]`, which doesn't depend on
378        // recommended allocation
379        unsafe {
380            NonNull::from_ref(self)
381                .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
382                .as_ref()
383        }
384    }
385
386    /// Cast an exclusive reference to this instance into a reference to an instance of a different
387    /// recommended allocation
388    #[inline(always)]
389    pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
390        &mut self,
391    ) -> &mut VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
392        // SAFETY: `VariableBytes` has a fixed layout due to `#[repr(C)]`, which doesn't depend on
393        // recommended allocation
394        unsafe {
395            NonNull::from_mut(self)
396                .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
397                .as_mut()
398        }
399    }
400
401    /// Reads and returns value of type `T` or `None` if there is not enough data.
402    ///
403    /// Checks alignment internally to support both aligned and unaligned reads.
404    #[inline(always)]
405    pub fn read_trivial_type<T>(&self) -> Option<T>
406    where
407        T: TrivialType,
408    {
409        if self.size() < T::SIZE {
410            return None;
411        }
412
413        let ptr = self.bytes.cast::<T>();
414
415        let value = unsafe {
416            if ptr.is_aligned() {
417                ptr.read()
418            } else {
419                ptr.read_unaligned()
420            }
421        };
422
423        Some(value)
424    }
425
426    /// Assume that the first `size` are initialized and can be read.
427    ///
428    /// Returns `Some(initialized_bytes)` on success or `None` if `size` is larger than its
429    /// capacity.
430    ///
431    /// # Safety
432    /// Caller must ensure `size` is actually initialized
433    #[inline(always)]
434    #[must_use = "Operation may fail"]
435    pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [u8]> {
436        if size > self.capacity {
437            return None;
438        }
439
440        // SAFETY: guaranteed to be initialized by constructors
441        unsafe {
442            self.size.write(size);
443        }
444        Some(self.get_initialized_mut())
445    }
446}