ab_io_type/
variable_bytes.rs

1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9/// Container for storing variable number of bytes.
10///
11/// `RECOMMENDED_ALLOCATION` is what is being used when a host needs to allocate memory for call
12/// into guest, but guest may receive an allocation with more or less memory in practice depending
13/// on other circumstances, like when called from another contract with specific allocation
14/// specified.
15#[derive(Debug)]
16#[repr(C)]
17pub struct VariableBytes<const RECOMMENDED_ALLOCATION: u32 = 0> {
18    bytes: NonNull<u8>,
19    size: NonNull<u32>,
20    capacity: u32,
21}
22
23// SAFETY: Low-level (effectively internal) implementation that upholds safety requirements
24unsafe impl<const RECOMMENDED_ALLOCATION: u32> IoType for VariableBytes<RECOMMENDED_ALLOCATION> {
25    const METADATA: &[u8] = {
26        const fn metadata(recommended_allocation: u32) -> ([u8; MAX_METADATA_CAPACITY], usize) {
27            if recommended_allocation == 0 {
28                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes0 as u8]]);
29            } else if recommended_allocation == 512 {
30                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes512 as u8]]);
31            } else if recommended_allocation == 1024 {
32                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes1024 as u8]]);
33            } else if recommended_allocation == 2028 {
34                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes2028 as u8]]);
35            } else if recommended_allocation == 4096 {
36                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes4096 as u8]]);
37            } else if recommended_allocation == 8192 {
38                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes8192 as u8]]);
39            } else if recommended_allocation == 16384 {
40                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes16384 as u8]]);
41            } else if recommended_allocation == 32768 {
42                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes32768 as u8]]);
43            } else if recommended_allocation == 65536 {
44                return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes65536 as u8]]);
45            } else if recommended_allocation == 131_072 {
46                return concat_metadata_sources(&[
47                    &[IoTypeMetadataKind::VariableBytes131072 as u8],
48                ]);
49            } else if recommended_allocation == 262_144 {
50                return concat_metadata_sources(&[
51                    &[IoTypeMetadataKind::VariableBytes262144 as u8],
52                ]);
53            } else if recommended_allocation == 524_288 {
54                return concat_metadata_sources(&[
55                    &[IoTypeMetadataKind::VariableBytes524288 as u8],
56                ]);
57            } else if recommended_allocation == 1_048_576 {
58                return concat_metadata_sources(&[&[
59                    IoTypeMetadataKind::VariableBytes1048576 as u8
60                ]]);
61            }
62
63            let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
64                (IoTypeMetadataKind::VariableBytes8b, 1)
65            } else if recommended_allocation < 2u32.pow(16) {
66                (IoTypeMetadataKind::VariableBytes16b, 2)
67            } else {
68                (IoTypeMetadataKind::VariableBytes32b, 4)
69            };
70
71            concat_metadata_sources(&[
72                &[io_type as u8],
73                recommended_allocation.to_le_bytes().split_at(size_bytes).0,
74            ])
75        }
76
77        // Strange syntax to allow Rust to extend the lifetime of metadata scratch automatically
78        metadata(RECOMMENDED_ALLOCATION)
79            .0
80            .split_at(metadata(RECOMMENDED_ALLOCATION).1)
81            .0
82    };
83
84    // TODO: Use `[u8; RECOMMENDED_ALLOCATION as usize]` once stabilized `generic_const_exprs`
85    //  allows us to do so
86    type PointerType = u8;
87
88    #[inline(always)]
89    fn size(&self) -> u32 {
90        self.size()
91    }
92
93    #[inline(always)]
94    fn capacity(&self) -> u32 {
95        self.capacity
96    }
97
98    #[inline(always)]
99    #[track_caller]
100    unsafe fn set_size(&mut self, size: u32) {
101        debug_assert!(
102            size <= self.capacity,
103            "`set_size` called with invalid input {size} for capacity {}",
104            self.capacity
105        );
106
107        // SAFETY: guaranteed to be initialized by constructors
108        unsafe {
109            self.size.write(size);
110        }
111    }
112
113    #[inline(always)]
114    #[track_caller]
115    unsafe fn from_ptr<'a>(
116        ptr: &'a NonNull<Self::PointerType>,
117        size: &'a u32,
118        capacity: u32,
119    ) -> impl Deref<Target = Self> + 'a {
120        debug_assert!(ptr.is_aligned(), "Misaligned pointer");
121        debug_assert!(
122            *size <= capacity,
123            "Size {size} must not exceed capacity {capacity}"
124        );
125
126        DerefWrapper(Self {
127            bytes: *ptr,
128            size: NonNull::from_ref(size),
129            capacity,
130        })
131    }
132
133    #[inline(always)]
134    #[track_caller]
135    unsafe fn from_mut_ptr<'a>(
136        ptr: &'a mut NonNull<Self::PointerType>,
137        size: &'a mut u32,
138        capacity: u32,
139    ) -> impl DerefMut<Target = Self> + 'a {
140        debug_assert!(ptr.is_aligned(), "Misaligned pointer");
141        debug_assert!(
142            *size <= capacity,
143            "Size {size} must not exceed capacity {capacity}"
144        );
145
146        DerefWrapper(Self {
147            bytes: *ptr,
148            size: NonNull::from_mut(size),
149            capacity,
150        })
151    }
152
153    #[inline(always)]
154    unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
155        &self.bytes
156    }
157
158    #[inline(always)]
159    unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
160        &mut self.bytes
161    }
162}
163
164impl<const RECOMMENDED_ALLOCATION: u32> IoTypeOptional for VariableBytes<RECOMMENDED_ALLOCATION> {}
165
166impl<const RECOMMENDED_ALLOCATION: u32> VariableBytes<RECOMMENDED_ALLOCATION> {
167    /// Create a new shared instance from provided memory buffer.
168    ///
169    /// # Panics
170    /// Panics if `buffer.len() != size`
171    //
172    // `impl Deref` is used to tie lifetime of returned value to inputs, but still treat it as a
173    // shared reference for most practical purposes.
174    #[inline(always)]
175    #[track_caller]
176    pub const fn from_buffer<'a>(
177        buffer: &'a [<Self as IoType>::PointerType],
178        size: &'a u32,
179    ) -> impl Deref<Target = Self> + 'a {
180        debug_assert!(buffer.len() == *size as usize, "Invalid size");
181        // TODO: Use `debug_assert_eq` when it is available in const environment
182        // debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
183
184        DerefWrapper(Self {
185            bytes: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
186            size: NonNull::from_ref(size),
187            capacity: *size,
188        })
189    }
190
191    /// Create a new exclusive instance from provided memory buffer.
192    ///
193    /// # Panics
194    /// Panics if `buffer.len() != size`
195    //
196    // `impl DerefMut` is used to tie lifetime of returned value to inputs, but still treat it as an
197    // exclusive reference for most practical purposes.
198    #[inline(always)]
199    #[track_caller]
200    pub fn from_buffer_mut<'a>(
201        buffer: &'a mut [<Self as IoType>::PointerType],
202        size: &'a mut u32,
203    ) -> impl DerefMut<Target = Self> + 'a {
204        debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
205
206        DerefWrapper(Self {
207            bytes: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
208            size: NonNull::from_mut(size),
209            capacity: *size,
210        })
211    }
212
213    /// Create a new shared instance from provided memory buffer.
214    ///
215    /// # Panics
216    /// Panics if `size > CAPACITY`
217    //
218    // `impl Deref` is used to tie lifetime of returned value to inputs, but still treat it as a
219    // shared reference for most practical purposes.
220    #[inline(always)]
221    #[track_caller]
222    pub fn from_uninit<'a>(
223        uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>],
224        size: &'a mut u32,
225    ) -> impl DerefMut<Target = Self> + 'a {
226        let capacity = uninit.len();
227        debug_assert!(
228            *size as usize <= capacity,
229            "Size {size} must not exceed capacity {capacity}"
230        );
231        let capacity = capacity as u32;
232
233        DerefWrapper(Self {
234            bytes: NonNull::new(uninit.as_mut_ptr().cast_init()).expect("Not null; qed"),
235            size: NonNull::from_mut(size),
236            capacity,
237        })
238    }
239
240    // Size in bytes
241    #[inline(always)]
242    pub const fn size(&self) -> u32 {
243        // SAFETY: guaranteed to be initialized by constructors
244        unsafe { self.size.read() }
245    }
246
247    /// Capacity in bytes
248    #[inline(always)]
249    pub fn capacity(&self) -> u32 {
250        self.capacity
251    }
252
253    /// Try to get access to initialized bytes
254    #[inline(always)]
255    pub const fn get_initialized(&self) -> &[u8] {
256        let size = self.size();
257        let ptr = self.bytes.as_ptr();
258        // SAFETY: guaranteed by constructor and explicit methods by the user
259        unsafe { slice::from_raw_parts(ptr, size as usize) }
260    }
261
262    /// Try to get exclusive access to initialized `Data`, returns `None` if not initialized
263    #[inline(always)]
264    pub fn get_initialized_mut(&mut self) -> &mut [u8] {
265        let size = self.size();
266        let ptr = self.bytes.as_ptr();
267        // SAFETY: guaranteed by constructor and explicit methods by the user
268        unsafe { slice::from_raw_parts_mut(ptr, size as usize) }
269    }
270
271    /// Append some bytes by using more of allocated, but currently unused bytes.
272    ///
273    /// `true` is returned on success, but if there isn't enough unused bytes left, `false` is.
274    #[inline(always)]
275    #[must_use = "Operation may fail"]
276    pub fn append(&mut self, bytes: &[u8]) -> bool {
277        let size = self.size();
278        if bytes.len() + size as usize > self.capacity as usize {
279            return false;
280        }
281
282        // May overflow, which is not allowed
283        let Ok(offset) = isize::try_from(size) else {
284            return false;
285        };
286
287        // SAFETY: allocation range and offset are checked above, the allocation itself is
288        // guaranteed by constructors
289        let mut start = unsafe { self.bytes.offset(offset) };
290        // SAFETY: Alignment is the same, writing happens in properly allocated memory guaranteed by
291        // constructors, number of bytes is checked above, Rust ownership rules will prevent any
292        // overlap here (creating reference to non-initialized part of allocation would already be
293        // undefined behavior anyway)
294        unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), start.as_mut(), bytes.len()) }
295
296        true
297    }
298
299    /// Truncate internal initialized bytes to this size.
300    ///
301    /// Returns `true` on success or `false` if `new_size` is larger than [`Self::size()`].
302    #[inline(always)]
303    #[must_use = "Operation may fail"]
304    pub fn truncate(&mut self, new_size: u32) -> bool {
305        if new_size > self.size() {
306            return false;
307        }
308
309        // SAFETY: guaranteed to be initialized by constructors
310        unsafe {
311            self.size.write(new_size);
312        }
313
314        true
315    }
316
317    /// Copy contents from another `IoType`.
318    ///
319    /// Returns `false` if actual capacity of the instance is not enough to copy contents of `src`
320    #[inline(always)]
321    #[must_use = "Operation may fail"]
322    pub fn copy_from<T>(&mut self, src: &T) -> bool
323    where
324        T: IoType,
325    {
326        let src_size = src.size();
327        if src_size > self.capacity {
328            return false;
329        }
330
331        // SAFETY: `src` can't be the same as `&mut self` if invariants of constructor arguments
332        // were upheld, size is checked to be within capacity above
333        unsafe {
334            self.bytes
335                .copy_from_nonoverlapping(src.as_ptr().cast::<u8>(), src_size as usize);
336            self.size.write(src_size);
337        }
338
339        true
340    }
341
342    /// Get exclusive access to the underlying pointer with no checks.
343    ///
344    /// Can be used for initialization with [`Self::assume_init()`] called afterward to confirm how
345    /// many bytes are in use right now.
346    #[inline(always)]
347    pub fn as_mut_ptr(&mut self) -> &mut NonNull<u8> {
348        &mut self.bytes
349    }
350
351    /// Cast a shared reference to this instance into a reference to an instance of a different
352    /// recommended allocation
353    #[inline(always)]
354    pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
355        &self,
356    ) -> &VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
357        // SAFETY: `VariableBytes` has a fixed layout due to `#[repr(C)]`, which doesn't depend on
358        // recommended allocation
359        unsafe {
360            NonNull::from_ref(self)
361                .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
362                .as_ref()
363        }
364    }
365
366    /// Cast an exclusive reference to this instance into a reference to an instance of a different
367    /// recommended allocation
368    #[inline(always)]
369    pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
370        &mut self,
371    ) -> &mut VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
372        // SAFETY: `VariableBytes` has a fixed layout due to `#[repr(C)]`, which doesn't depend on
373        // recommended allocation
374        unsafe {
375            NonNull::from_mut(self)
376                .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
377                .as_mut()
378        }
379    }
380
381    /// Reads and returns value of type `T` or `None` if there is not enough data.
382    ///
383    /// Checks alignment internally to support both aligned and unaligned reads.
384    #[inline(always)]
385    pub fn read_trivial_type<T>(&self) -> Option<T>
386    where
387        T: TrivialType,
388    {
389        if self.size() < T::SIZE {
390            return None;
391        }
392
393        let ptr = self.bytes.cast::<T>();
394
395        // SAFETY: Trivial types are safe to read as bytes, pointer validity is a guaranteed
396        // internal invariant
397        let value = unsafe {
398            if ptr.is_aligned() {
399                ptr.read()
400            } else {
401                ptr.read_unaligned()
402            }
403        };
404
405        Some(value)
406    }
407
408    /// Assume that the first `size` are initialized and can be read.
409    ///
410    /// Returns `Some(initialized_bytes)` on success or `None` if `size` is larger than its
411    /// capacity.
412    ///
413    /// # Safety
414    /// Caller must ensure `size` is actually initialized
415    #[inline(always)]
416    #[must_use = "Operation may fail"]
417    pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [u8]> {
418        if size > self.capacity {
419            return None;
420        }
421
422        // SAFETY: guaranteed to be initialized by constructors
423        unsafe {
424            self.size.write(size);
425        }
426        Some(self.get_initialized_mut())
427    }
428}