Skip to main content

ab_core_primitives/block/
body.rs

1//! Block body primitives
2
3#[cfg(feature = "alloc")]
4pub mod owned;
5
6#[cfg(feature = "alloc")]
7use crate::block::body::owned::{
8    GenericOwnedBlockBody, OwnedBeaconChainBody, OwnedBlockBody, OwnedIntermediateShardBody,
9    OwnedLeafShardBody,
10};
11use crate::block::header::{IntermediateShardHeader, LeafShardHeader};
12use crate::block::{BlockNumber, align_to_and_ensure_zero_padding};
13use crate::hashes::Blake3Hash;
14use crate::pot::PotCheckpoints;
15use crate::segments::{LocalSegmentIndex, SegmentRoot};
16use crate::shard::{RealShardKind, ShardIndex};
17use crate::transaction::Transaction;
18use ab_blake3::{BLOCK_LEN, OUT_LEN, single_block_hash};
19use ab_io_type::trivial_type::TrivialType;
20use ab_io_type::unaligned::Unaligned;
21use ab_merkle_tree::balanced::BalancedMerkleTree;
22use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
23#[cfg(feature = "alloc")]
24use core::iter;
25use core::iter::TrustedLen;
26use core::{array, cmp, fmt, slice};
27use derive_more::From;
28use yoke::Yokeable;
29
30/// Generic block body
31pub trait GenericBlockBody<'a>
32where
33    Self: Copy + fmt::Debug + Into<BlockBody<'a>> + Send + Sync,
34{
35    /// Shard kind
36    const SHARD_KIND: RealShardKind;
37
38    /// Owned block body
39    #[cfg(feature = "alloc")]
40    type Owned: GenericOwnedBlockBody<Body<'a> = Self>
41    where
42        Self: 'a;
43
44    /// Turn into an owned version
45    #[cfg(feature = "alloc")]
46    fn to_owned(self) -> Self::Owned;
47
48    /// Compute block body root
49    fn root(&self) -> Blake3Hash;
50}
51
52/// Calculates a Merkle Tree root for a provided list of segment roots
53#[inline]
54pub fn compute_segments_root<'a, Iter>(segment_roots: Iter) -> Blake3Hash
55where
56    Iter: IntoIterator<Item = &'a SegmentRoot>,
57{
58    // TODO: This is a workaround for https://github.com/rust-lang/rust/issues/139866 that allows
59    //  the code to compile. Constant 4294967295 is hardcoded here and below for compilation to
60    //  succeed.
61    #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
62    #[expect(clippy::eq_op, reason = "Intentional documentation")]
63    const {
64        assert!(u32::MAX == 4294967295);
65    }
66    // TODO: Keyed hash
67    let root = UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
68        segment_roots.into_iter().map(|segment_root| {
69            // Hash the root again so we can prove it, otherwise root of segments is
70            // indistinguishable from individual segment roots and can be used to confuse verifier
71            single_block_hash(segment_root.as_ref())
72                .expect("Less than a single block worth of bytes; qed")
73        }),
74    );
75
76    Blake3Hash::new(root.unwrap_or_default())
77}
78
79/// Own segments produced by a shard
80#[derive(Debug, Copy, Clone)]
81pub struct OwnSegments<'a> {
82    /// Local segment index of the first own segment root
83    pub first_local_segment_index: LocalSegmentIndex,
84    /// Segment roots produced by a shard
85    pub segment_roots: &'a [SegmentRoot],
86}
87
88impl OwnSegments<'_> {
89    /// Compute the root of own segments
90    #[inline]
91    pub fn root(&self) -> Blake3Hash {
92        // TODO: Keyed hash
93        let root = BalancedMerkleTree::compute_root_only(&[
94            single_block_hash(self.first_local_segment_index.as_bytes())
95                .expect("Less than a single block worth of bytes; qed"),
96            *compute_segments_root(self.segment_roots),
97        ]);
98
99        Blake3Hash::new(root)
100    }
101
102    /// Compute the root of own segments while mixing in shard index.
103    ///
104    /// This method is useful for deriving roots on intermediate shard that can be verified by the
105    /// beacon chain later (which will have just shard index available rather than the whole leaf
106    /// shard block header).
107    #[inline]
108    pub fn root_with_shard_index(&self, shard_index: ShardIndex) -> Blake3Hash {
109        // TODO: Keyed hash
110        let root = BalancedMerkleTree::compute_root_only(&[
111            {
112                const {
113                    assert!((ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize <= BLOCK_LEN);
114                }
115                let mut pair = [0u8; (ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize];
116                pair[..ShardIndex::SIZE as usize].copy_from_slice(shard_index.as_bytes());
117                pair[ShardIndex::SIZE as usize..]
118                    .copy_from_slice(self.first_local_segment_index.as_bytes());
119
120                single_block_hash(&pair).expect("Less than a single block worth of bytes; qed")
121            },
122            *compute_segments_root(self.segment_roots),
123        ]);
124
125        Blake3Hash::new(root)
126    }
127}
128
129/// Information about intermediate shard block
130#[derive(Debug, Clone)]
131pub struct IntermediateShardBlockInfo<'a> {
132    /// A block header that corresponds to an intermediate shard
133    pub header: IntermediateShardHeader<'a>,
134    /// Segments proof if either own or child segments are present
135    pub segments_proof: Option<&'a [u8; OUT_LEN]>,
136    /// Segments in the corresponding block
137    pub own_segments: Option<OwnSegments<'a>>,
138    num_leaf_shard_blocks_with_segments: u8,
139    leaf_shards_segments_bytes: &'a [u8],
140}
141
142impl<'a> IntermediateShardBlockInfo<'a> {
143    /// Segments of leaf shards in the corresponding intermediate shard block
144    #[inline]
145    pub fn leaf_shards_segments(
146        &self,
147    ) -> impl ExactSizeIterator<Item = (ShardIndex, OwnSegments<'a>)> + TrustedLen + 'a {
148        // SAFETY: Checked in constructor
149        let (counts, mut remainder) = unsafe {
150            self.leaf_shards_segments_bytes.split_at_unchecked(
151                usize::from(self.num_leaf_shard_blocks_with_segments) * size_of::<u8>(),
152            )
153        };
154        counts.iter().map(move |&num_own_segment_roots| {
155            let num_own_segment_roots = usize::from(num_own_segment_roots);
156
157            let shard_index;
158            // SAFETY: Checked in constructor
159            (shard_index, remainder) =
160                unsafe { remainder.split_at_unchecked(ShardIndex::SIZE as usize) };
161            // SAFETY: Correct size and no alignment requirements
162            let shard_index =
163                unsafe { Unaligned::<ShardIndex>::from_bytes_unchecked(shard_index) }.as_inner();
164
165            let first_local_segment_index;
166            // SAFETY: Checked in constructor
167            (first_local_segment_index, remainder) =
168                unsafe { remainder.split_at_unchecked(LocalSegmentIndex::SIZE as usize) };
169            // SAFETY: Correct size and no alignment requirements
170            let first_local_segment_index = unsafe {
171                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
172            }
173            .as_inner();
174
175            let own_segment_roots;
176            // SAFETY: Checked in constructor
177            (own_segment_roots, remainder) =
178                unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
179            // SAFETY: Valid pointer and size, no alignment requirements
180            let own_segment_roots = unsafe {
181                slice::from_raw_parts(
182                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
183                    num_own_segment_roots,
184                )
185            };
186            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
187
188            (
189                shard_index,
190                OwnSegments {
191                    first_local_segment_index,
192                    segment_roots: own_segment_roots,
193                },
194            )
195        })
196    }
197}
198
199/// Information about a collection of intermediate shard blocks
200#[derive(Debug, Copy, Clone)]
201pub struct IntermediateShardBlocksInfo<'a> {
202    num_blocks: usize,
203    bytes: &'a [u8],
204}
205
206impl<'a> IntermediateShardBlocksInfo<'a> {
207    /// Create an instance from provided bytes.
208    ///
209    /// `bytes` do not need to be aligned.
210    ///
211    /// Returns an instance and remaining bytes on success.
212    #[inline]
213    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
214        // The layout here is as follows:
215        // * number of blocks: u16 as unaligned little-endian bytes
216        // * for each block:
217        //   * number of own segment roots: u8
218        //   * number of leaf shard blocks with segments: u8
219        // * padding to 8-bytes boundary with zeroes
220        // * for each block:
221        //   * block header: IntermediateShardHeader
222        //   * segment roots proof (if either own or child segments are present)
223        //   * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
224        //   * concatenated own segment roots
225        //   * for each leaf shard block with segments:
226        //     * number of own segment roots: u8
227        //   * for each leaf shard block with segments:
228        //     * shard index: u32 as unaligned little-endian bytes
229        //     * local segment index of the first segment root: unaligned `LocalSegmentIndex`
230        //     * concatenated own segment roots
231        //   * padding to 8-bytes boundary with zeroes
232
233        let num_blocks = bytes.split_off(..size_of::<u16>())?;
234        let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
235
236        let bytes_start = bytes;
237
238        let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() * 2))?;
239
240        let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
241
242        for _ in 0..num_blocks {
243            let num_own_segment_roots = usize::from(counts[0]);
244            let num_leaf_shard_blocks_with_segments = usize::from(counts[1]);
245            counts = &counts[2..];
246
247            (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
248
249            if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
250                let _segments_proof = remainder.split_off(..SegmentRoot::SIZE)?;
251            }
252
253            if num_own_segment_roots > 0 {
254                let _first_local_segment_index =
255                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
256                let _own_segment_roots =
257                    remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
258            }
259
260            let leaf_shard_segment_roots_counts =
261                remainder.split_off(..num_leaf_shard_blocks_with_segments * size_of::<u8>())?;
262
263            for &num_own_segment_roots in leaf_shard_segment_roots_counts {
264                let _shard_index = remainder.split_off(..ShardIndex::SIZE as usize)?;
265                let _first_local_segment_index =
266                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
267                let _own_segment_roots = remainder
268                    .split_off(..usize::from(num_own_segment_roots) * SegmentRoot::SIZE)?;
269            }
270
271            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
272        }
273
274        let info = Self {
275            num_blocks,
276            bytes: &bytes_start[..bytes_start.len() - remainder.len()],
277        };
278
279        if !info.is_internally_consistent() {
280            return None;
281        }
282
283        Some((info, remainder))
284    }
285
286    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
287    /// checks
288    #[inline]
289    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
290        // The layout here is as follows:
291        // * number of blocks: u16 as unaligned little-endian bytes
292        // * for each block:
293        //   * number of own segment roots: u8
294        //   * number of leaf shard blocks with segments: u8
295        // * padding to 8-bytes boundary with zeroes
296        // * for each block:
297        //   * block header: IntermediateShardHeader
298        //   * segment roots proof (if either own or child segments are present)
299        //   * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
300        //   * concatenated own segment roots
301        //   * for each leaf shard block with segments:
302        //     * number of own segment roots: u8
303        //   * for each leaf shard block with segments:
304        //     * shard index: u32 as unaligned little-endian bytes
305        //     * local segment index of the first segment root: unaligned `LocalSegmentIndex`
306        //     * concatenated own segment roots
307        //   * padding to 8-bytes boundary with zeroes
308
309        let num_blocks = bytes.split_off(..size_of::<u16>())?;
310        let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
311
312        let bytes_start = bytes;
313
314        let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() * 2))?;
315
316        let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
317
318        for _ in 0..num_blocks {
319            let num_own_segment_roots = usize::from(counts[0]);
320            let num_leaf_shard_blocks_with_segments = usize::from(counts[1]);
321            counts = &counts[2..];
322
323            (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
324
325            if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
326                let _segments_proof = remainder.split_off(..OUT_LEN)?;
327            }
328
329            if num_own_segment_roots > 0 {
330                let _first_local_segment_index =
331                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
332                let _own_segment_roots =
333                    remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
334            }
335
336            let leaf_shard_segment_roots_counts =
337                remainder.split_off(..num_leaf_shard_blocks_with_segments * size_of::<u8>())?;
338
339            for &num_own_segment_roots in leaf_shard_segment_roots_counts {
340                let _shard_index = remainder.split_off(..ShardIndex::SIZE as usize)?;
341                let _first_local_segment_index =
342                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
343                let _own_segment_roots = remainder
344                    .split_off(..usize::from(num_own_segment_roots) * SegmentRoot::SIZE)?;
345            }
346
347            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
348        }
349
350        Some((
351            Self {
352                num_blocks,
353                bytes: &bytes_start[..bytes_start.len() - remainder.len()],
354            },
355            remainder,
356        ))
357    }
358
359    /// Check intermediate shard info's internal consistency.
360    ///
361    /// This is usually not necessary to be called explicitly since internal consistency is checked
362    /// by [`Self::try_from_bytes()`] internally.
363    #[inline]
364    pub fn is_internally_consistent(&self) -> bool {
365        let mut last_intermediate_shard_info =
366            None::<(ShardIndex, BlockNumber, Option<LocalSegmentIndex>)>;
367
368        self.iter().all(|intermediate_shard_block| {
369            let shard_index = intermediate_shard_block.header.prefix.shard_index;
370
371            // Ensure increasing order of shard indices, block numbers and local segment indices
372            if let Some((
373                last_intermediate_shard_index,
374                last_intermediate_shard_block_number,
375                last_intermediate_shard_first_local_segment_index,
376            )) = last_intermediate_shard_info
377            {
378                match last_intermediate_shard_index.cmp(&shard_index) {
379                    cmp::Ordering::Less => {
380                        last_intermediate_shard_info.replace((
381                            shard_index,
382                            intermediate_shard_block.header.prefix.number,
383                            intermediate_shard_block
384                                .own_segments
385                                .as_ref()
386                                .map(|own_segments| own_segments.first_local_segment_index),
387                        ));
388                    }
389                    cmp::Ordering::Equal => {
390                        if last_intermediate_shard_block_number
391                            >= intermediate_shard_block.header.prefix.number
392                        {
393                            return false;
394                        }
395                        if let Some(own_segments) = &intermediate_shard_block.own_segments {
396                            if let Some(last_intermediate_shard_first_local_segment_index) =
397                                last_intermediate_shard_first_local_segment_index
398                                && last_intermediate_shard_first_local_segment_index
399                                    >= own_segments.first_local_segment_index
400                            {
401                                return false;
402                            }
403
404                            last_intermediate_shard_info.replace((
405                                shard_index,
406                                intermediate_shard_block.header.prefix.number,
407                                Some(own_segments.first_local_segment_index),
408                            ));
409                        } else {
410                            last_intermediate_shard_info.replace((
411                                shard_index,
412                                intermediate_shard_block.header.prefix.number,
413                                last_intermediate_shard_first_local_segment_index,
414                            ));
415                        }
416                    }
417                    cmp::Ordering::Greater => {
418                        return false;
419                    }
420                }
421            } else {
422                last_intermediate_shard_info.replace((
423                    shard_index,
424                    intermediate_shard_block.header.prefix.number,
425                    intermediate_shard_block
426                        .own_segments
427                        .as_ref()
428                        .map(|own_segments| own_segments.first_local_segment_index),
429                ));
430            }
431
432            let mut last_leaf_shard_info = None::<(ShardIndex, LocalSegmentIndex)>;
433
434            // Ensure increasing order of shard indices and local segment indices
435            if !intermediate_shard_block.leaf_shards_segments().all(
436                |(leaf_shard_index, own_segments)| {
437                    if !leaf_shard_index.is_child_of(shard_index) {
438                        return false;
439                    }
440
441                    if let Some((
442                        last_leaf_shard_index,
443                        last_leaf_shard_first_local_segment_index,
444                    )) = last_leaf_shard_info
445                    {
446                        match last_leaf_shard_index.cmp(&leaf_shard_index) {
447                            cmp::Ordering::Less => {
448                                // Expected
449                            }
450                            cmp::Ordering::Equal => {
451                                if last_leaf_shard_first_local_segment_index
452                                    >= own_segments.first_local_segment_index
453                                {
454                                    return false;
455                                }
456                            }
457                            cmp::Ordering::Greater => {
458                                return false;
459                            }
460                        }
461                    }
462                    last_leaf_shard_info
463                        .replace((leaf_shard_index, own_segments.first_local_segment_index));
464
465                    true
466                },
467            ) {
468                return false;
469            }
470
471            if intermediate_shard_block.own_segments.is_none()
472                && intermediate_shard_block
473                    .leaf_shards_segments()
474                    .size_hint()
475                    .0
476                    == 0
477            {
478                return intermediate_shard_block.segments_proof.is_none();
479            }
480
481            let segments_proof = intermediate_shard_block.segments_proof.unwrap_or(&[0; _]);
482
483            // TODO: This is a workaround for https://github.com/rust-lang/rust/issues/139866 that allows
484            //  the code to compile. Constant 65535 is hardcoded here and below for compilation
485            // to  succeed.
486            #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
487            #[expect(clippy::eq_op, reason = "Intentional documentation")]
488            const {
489                assert!(u16::MAX == 65535);
490            }
491            let leaf_shards_root = UnbalancedMerkleTree::compute_root_only::<65535, _, _>(
492                intermediate_shard_block.leaf_shards_segments().map(
493                    |(shard_index, own_segments)| own_segments.root_with_shard_index(shard_index),
494                ),
495            )
496            .unwrap_or_default();
497
498            BalancedMerkleTree::<2>::verify(
499                &intermediate_shard_block.header.result.body_root,
500                array::from_ref(segments_proof),
501                0,
502                BalancedMerkleTree::compute_root_only(&[
503                    *intermediate_shard_block
504                        .own_segments
505                        .as_ref()
506                        .map(OwnSegments::root)
507                        .unwrap_or_default(),
508                    leaf_shards_root,
509                ]),
510            )
511        })
512    }
513
514    /// Iterator over intermediate shard blocks in a collection
515    #[inline]
516    pub fn iter(
517        &self,
518    ) -> impl ExactSizeIterator<Item = IntermediateShardBlockInfo<'a>> + TrustedLen + 'a {
519        // SAFETY: Checked in constructor
520        let (mut counts, mut remainder) = unsafe {
521            self.bytes
522                .split_at_unchecked(self.num_blocks * (size_of::<u8>() + size_of::<u16>()))
523        };
524
525        (0..self.num_blocks).map(move |_| {
526            let num_own_segment_roots = usize::from(counts[0]);
527            let num_leaf_shard_blocks_with_segments = counts[1];
528            counts = &counts[2..];
529
530            // TODO: Unchecked method would have been helpful here
531            let header;
532            (header, remainder) = IntermediateShardHeader::try_from_bytes(remainder)
533                .expect("Already checked in constructor; qed");
534
535            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
536                .expect("Already checked in constructor; qed");
537
538            let segments_proof =
539                if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
540                    let segments_proof;
541                    // SAFETY: Checked in constructor
542                    (segments_proof, remainder) = unsafe { remainder.split_at_unchecked(OUT_LEN) };
543                    // SAFETY: Valid pointer and size, no alignment requirements
544                    Some(unsafe {
545                        segments_proof
546                            .as_ptr()
547                            .cast::<[u8; OUT_LEN]>()
548                            .as_ref_unchecked()
549                    })
550                } else {
551                    None
552                };
553
554            let own_segments = if num_own_segment_roots > 0 {
555                let first_local_segment_index;
556                // SAFETY: Checked in constructor
557                (first_local_segment_index, remainder) =
558                    unsafe { remainder.split_at_unchecked(size_of::<LocalSegmentIndex>()) };
559                // SAFETY: Correct alignment and size
560                let first_local_segment_index =
561                    *unsafe { LocalSegmentIndex::from_bytes_unchecked(first_local_segment_index) };
562
563                let own_segment_roots;
564                // SAFETY: Checked in constructor
565                (own_segment_roots, remainder) = unsafe {
566                    remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE)
567                };
568                // SAFETY: Valid pointer and size, no alignment requirements
569                let own_segment_roots = unsafe {
570                    slice::from_raw_parts(
571                        own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
572                        num_own_segment_roots,
573                    )
574                };
575                let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
576
577                Some(OwnSegments {
578                    first_local_segment_index,
579                    segment_roots: own_segment_roots,
580                })
581            } else {
582                None
583            };
584
585            // SAFETY: Checked in constructor
586            let leaf_shard_segment_roots_counts = unsafe {
587                remainder.get_unchecked(
588                    ..usize::from(num_leaf_shard_blocks_with_segments) * size_of::<u8>(),
589                )
590            };
591
592            let leaf_shards_segments_bytes;
593            // SAFETY: Checked in constructor
594            (leaf_shards_segments_bytes, remainder) = unsafe {
595                remainder.split_at_unchecked(
596                    usize::from(num_leaf_shard_blocks_with_segments) * size_of::<u8>()
597                        + usize::from(num_leaf_shard_blocks_with_segments)
598                            * (ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize
599                        + leaf_shard_segment_roots_counts.iter().fold(
600                            0_usize,
601                            |acc, &num_own_segment_roots| {
602                                acc + usize::from(num_own_segment_roots) * SegmentRoot::SIZE
603                            },
604                        ),
605                )
606            };
607
608            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
609                .expect("Already checked in constructor; qed");
610
611            IntermediateShardBlockInfo {
612                header,
613                segments_proof,
614                own_segments,
615                num_leaf_shard_blocks_with_segments,
616                leaf_shards_segments_bytes,
617            }
618        })
619    }
620
621    /// Number of intermediate shard blocks
622    #[inline(always)]
623    pub const fn len(&self) -> usize {
624        self.num_blocks
625    }
626
627    /// Returns `true` if there are no intermediate shard blocks
628    #[inline(always)]
629    pub const fn is_empty(&self) -> bool {
630        self.num_blocks == 0
631    }
632
633    /// Compute the root of segments of the intermediate shard blocks info.
634    ///
635    /// Returns the default value for an empty collection of segment roots.
636    #[inline]
637    pub fn segments_root(&self) -> Blake3Hash {
638        let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
639            self.iter().map(|shard_block_info| {
640                // TODO: This is a workaround for https://github.com/rust-lang/rust/issues/139866
641                //  that allows the code to compile. Constant 4294967295 is hardcoded here and below
642                //  for compilation to succeed.
643                #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
644                #[expect(clippy::eq_op, reason = "Intentional documentation")]
645                const {
646                    assert!(u32::MAX == 4294967295);
647                }
648                UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
649                    shard_block_info
650                        .own_segments
651                        .as_ref()
652                        .map(OwnSegments::root)
653                        .into_iter()
654                        .chain(shard_block_info.leaf_shards_segments().map(
655                            |(shard_index, own_segments)| {
656                                own_segments.root_with_shard_index(shard_index)
657                            },
658                        )),
659                )
660                .unwrap_or_default()
661            }),
662        )
663        .unwrap_or_default();
664
665        Blake3Hash::new(root)
666    }
667
668    /// Compute the root of headers of the intermediate shard blocks info.
669    ///
670    /// Returns the default value for an empty collection of shard blocks.
671    #[inline]
672    pub fn headers_root(&self) -> Blake3Hash {
673        let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
674            // TODO: Keyed hash
675            self.iter().map(|shard_block_info| {
676                // Hash the root again so we can prove it, otherwise the root of headers is
677                // indistinguishable from individual block roots and can be used to confuse
678                // verifier
679                single_block_hash(shard_block_info.header.root().as_ref())
680                    .expect("Less than a single block worth of bytes; qed")
681            }),
682        )
683        .unwrap_or_default();
684
685        Blake3Hash::new(root)
686    }
687}
688
689/// Block body that corresponds to the beacon chain
690#[derive(Debug, Copy, Clone, Yokeable)]
691// Prevent creation of potentially broken invariants externally
692#[non_exhaustive]
693pub struct BeaconChainBody<'a> {
694    /// Segments produced by this shard
695    own_segments: Option<OwnSegments<'a>>,
696    /// Intermediate shard blocks
697    intermediate_shard_blocks: IntermediateShardBlocksInfo<'a>,
698    /// Proof of time checkpoints from after future proof of time of the parent block to the
699    /// current block's future proof of time (inclusive)
700    pot_checkpoints: &'a [PotCheckpoints],
701}
702
703impl<'a> GenericBlockBody<'a> for BeaconChainBody<'a> {
704    const SHARD_KIND: RealShardKind = RealShardKind::BeaconChain;
705
706    #[cfg(feature = "alloc")]
707    type Owned = OwnedBeaconChainBody;
708
709    #[cfg(feature = "alloc")]
710    #[inline(always)]
711    fn to_owned(self) -> Self::Owned {
712        self.to_owned()
713    }
714
715    #[inline(always)]
716    fn root(&self) -> Blake3Hash {
717        self.root()
718    }
719}
720
721impl<'a> BeaconChainBody<'a> {
722    /// Create an instance from provided correctly aligned bytes.
723    ///
724    /// `bytes` should be 4-bytes aligned.
725    ///
726    /// Returns an instance and remaining bytes on success.
727    #[inline]
728    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
729        // The layout here is as follows:
730        // * number of PoT checkpoints: u32 as aligned little-endian bytes
731        // * number of own segment roots: u8
732        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
733        // * concatenated own segment roots
734        // * intermediate shard blocks: IntermediateShardBlocksInfo
735        // * concatenated PoT checkpoints
736
737        let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
738        // SAFETY: All bit patterns are valid
739        let num_pot_checkpoints =
740            *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
741
742        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
743        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
744
745        let own_segments = if num_own_segment_roots > 0 {
746            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
747            // SAFETY: Unaligned and correct size
748            let first_local_segment_index = unsafe {
749                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
750            }
751            .as_inner();
752
753            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
754            // SAFETY: Valid pointer and size, no alignment requirements
755            let own_segment_roots = unsafe {
756                slice::from_raw_parts(
757                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
758                    num_own_segment_roots,
759                )
760            };
761            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
762
763            Some(OwnSegments {
764                first_local_segment_index,
765                segment_roots: own_segment_roots,
766            })
767        } else {
768            None
769        };
770
771        let (intermediate_shard_blocks, mut remainder) =
772            IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
773
774        let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
775        // SAFETY: Valid pointer and size, no alignment requirements
776        let pot_checkpoints = unsafe {
777            slice::from_raw_parts(
778                pot_checkpoints
779                    .as_ptr()
780                    .cast::<[u8; PotCheckpoints::SIZE]>(),
781                num_pot_checkpoints,
782            )
783        };
784        let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
785
786        let body = Self {
787            own_segments,
788            intermediate_shard_blocks,
789            pot_checkpoints,
790        };
791
792        if !body.is_internally_consistent() {
793            return None;
794        }
795
796        Some((body, remainder))
797    }
798
799    /// Check block body's internal consistency.
800    ///
801    /// This is usually not necessary to be called explicitly since internal consistency is checked
802    /// by [`Self::try_from_bytes()`] internally.
803    #[inline]
804    pub fn is_internally_consistent(&self) -> bool {
805        // Nothing to check here
806        true
807    }
808
809    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
810    /// checks
811    #[inline]
812    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
813        // The layout here is as follows:
814        // * number of PoT checkpoints: u32 as aligned little-endian bytes
815        // * number of own segment roots: u8
816        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
817        // * concatenated own segment roots
818        // * intermediate shard blocks: IntermediateShardBlocksInfo
819        // * concatenated PoT checkpoints
820
821        let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
822        // SAFETY: All bit patterns are valid
823        let num_pot_checkpoints =
824            *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
825
826        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
827        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
828
829        let own_segments = if num_own_segment_roots > 0 {
830            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
831            // SAFETY: Unaligned and correct size
832            let first_local_segment_index = unsafe {
833                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
834            }
835            .as_inner();
836
837            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
838            // SAFETY: Valid pointer and size, no alignment requirements
839            let own_segment_roots = unsafe {
840                slice::from_raw_parts(
841                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
842                    num_own_segment_roots,
843                )
844            };
845            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
846
847            Some(OwnSegments {
848                first_local_segment_index,
849                segment_roots: own_segment_roots,
850            })
851        } else {
852            None
853        };
854
855        let (intermediate_shard_blocks, mut remainder) =
856            IntermediateShardBlocksInfo::try_from_bytes_unchecked(bytes)?;
857
858        let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
859        // SAFETY: Valid pointer and size, no alignment requirements
860        let pot_checkpoints = unsafe {
861            slice::from_raw_parts(
862                pot_checkpoints
863                    .as_ptr()
864                    .cast::<[u8; PotCheckpoints::SIZE]>(),
865                num_pot_checkpoints,
866            )
867        };
868        let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
869
870        Some((
871            Self {
872                own_segments,
873                intermediate_shard_blocks,
874                pot_checkpoints,
875            },
876            remainder,
877        ))
878    }
879
880    /// Create an owned version of this body
881    #[cfg(feature = "alloc")]
882    #[inline(always)]
883    pub fn to_owned(self) -> OwnedBeaconChainBody {
884        if let Some(own_segments) = self.own_segments {
885            let first_local_segment_index = own_segments.first_local_segment_index;
886
887            OwnedBeaconChainBody::new(
888                own_segments.segment_roots.iter().copied().enumerate().map(
889                    |(index, segment_root)| {
890                        (
891                            first_local_segment_index + LocalSegmentIndex::from(index as u64),
892                            segment_root,
893                        )
894                    },
895                ),
896                self.intermediate_shard_blocks.iter(),
897                self.pot_checkpoints,
898            )
899            .expect("`self` is always a valid invariant; qed")
900        } else {
901            OwnedBeaconChainBody::new(
902                iter::empty(),
903                self.intermediate_shard_blocks.iter(),
904                self.pot_checkpoints,
905            )
906            .expect("`self` is always a valid invariant; qed")
907        }
908    }
909
910    /// Segment roots produced by this shard
911    #[inline(always)]
912    pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
913        self.own_segments
914    }
915
916    /// Intermediate shard blocks
917    #[inline(always)]
918    pub fn intermediate_shard_blocks(&self) -> &IntermediateShardBlocksInfo<'a> {
919        &self.intermediate_shard_blocks
920    }
921
922    /// Proof of time checkpoints from after future proof of time of the parent block to the current
923    /// block's future proof of time (inclusive)
924    #[inline(always)]
925    pub fn pot_checkpoints(&self) -> &'a [PotCheckpoints] {
926        self.pot_checkpoints
927    }
928
929    /// Compute block body root
930    #[inline]
931    pub fn root(&self) -> Blake3Hash {
932        // TODO: Keyed hash
933        let root = BalancedMerkleTree::compute_root_only(&[
934            *self
935                .own_segments
936                .as_ref()
937                .map(OwnSegments::root)
938                .unwrap_or_default(),
939            *self.intermediate_shard_blocks.segments_root(),
940            *self.intermediate_shard_blocks.headers_root(),
941            blake3::hash(PotCheckpoints::bytes_from_slice(self.pot_checkpoints).as_flattened())
942                .into(),
943        ]);
944
945        Blake3Hash::new(root)
946    }
947}
948
949/// Information about leaf shard segments
950#[derive(Debug, Copy, Clone)]
951pub struct LeafShardOwnSegments<'a> {
952    /// Segment roots proof
953    pub segment_roots_proof: &'a [u8; OUT_LEN],
954    /// Segments produced by this shard
955    pub own_segments: OwnSegments<'a>,
956}
957
958/// Information about leaf shard block container inside intermediate shard block body
959#[derive(Debug, Clone)]
960pub struct LeafShardBlockInfo<'a> {
961    /// A block header that corresponds to an intermediate shard
962    pub header: LeafShardHeader<'a>,
963    /// Segments in the corresponding block
964    pub segments: Option<LeafShardOwnSegments<'a>>,
965}
966
967/// Information about a collection of leaf shard blocks
968#[derive(Debug, Copy, Clone)]
969pub struct LeafShardBlocksInfo<'a> {
970    num_blocks: usize,
971    bytes: &'a [u8],
972}
973
974impl<'a> LeafShardBlocksInfo<'a> {
975    /// Create an instance from provided bytes.
976    ///
977    /// `bytes` do not need to be aligned.
978    ///
979    /// Returns an instance and remaining bytes on success.
980    #[inline]
981    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
982        // The layout here is as follows:
983        // * number of blocks: u16 as unaligned little-endian bytes
984        // * for each block:
985        //   * number of own segment roots: u8
986        // * padding to 8-bytes boundary with zeroes
987        // * for each block:
988        //   * block header: LeafShardHeader
989        //   * padding to 8-bytes boundary with zeroes
990        //   * local segment index of the first segment root (if any): `LocalSegmentIndex`
991        //   * segment roots proof (if there is at least one segment root)
992        //   * concatenated own segment roots
993
994        let num_blocks = bytes.split_off(..size_of::<u16>())?;
995        let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
996
997        let bytes_start = bytes;
998
999        let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
1000
1001        let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
1002
1003        for _ in 0..num_blocks {
1004            let num_own_segment_roots = usize::from(counts[0]);
1005            counts = &counts[1..];
1006
1007            (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
1008
1009            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
1010
1011            if num_own_segment_roots > 0 {
1012                let _first_local_segment_index =
1013                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
1014                let _segment_roots_proof = remainder.split_off(..OUT_LEN)?;
1015                let _own_segment_roots =
1016                    remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1017            }
1018        }
1019
1020        let info = Self {
1021            num_blocks,
1022            bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1023        };
1024
1025        if !info.is_internally_consistent() {
1026            return None;
1027        }
1028
1029        Some((info, remainder))
1030    }
1031
1032    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
1033    /// checks
1034    #[inline]
1035    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1036        // The layout here is as follows:
1037        // * number of blocks: u16 as unaligned little-endian bytes
1038        // * for each block:
1039        //   * number of own segment roots: u8
1040        // * padding to 8-bytes boundary with zeroes
1041        // * for each block:
1042        //   * block header: LeafShardHeader
1043        //   * padding to 8-bytes boundary with zeroes
1044        //   * local segment index of the first segment root (if any): `LocalSegmentIndex`
1045        //   * segment roots proof (if there is at least one segment root)
1046        //   * concatenated own segment roots
1047
1048        let num_blocks = bytes.split_off(..size_of::<u16>())?;
1049        let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
1050
1051        let bytes_start = bytes;
1052
1053        let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
1054
1055        let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
1056
1057        for _ in 0..num_blocks {
1058            let num_own_segment_roots = usize::from(counts[0]);
1059            counts = &counts[1..];
1060
1061            (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
1062
1063            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
1064
1065            if num_own_segment_roots > 0 {
1066                let _first_local_segment_index =
1067                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
1068                let _segment_roots_proof = remainder.split_off(..OUT_LEN)?;
1069                let _own_segment_roots =
1070                    remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1071            }
1072        }
1073
1074        Some((
1075            Self {
1076                num_blocks,
1077                bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1078            },
1079            remainder,
1080        ))
1081    }
1082
1083    /// Check leaf shard info's internal consistency.
1084    ///
1085    /// This is usually not necessary to be called explicitly since internal consistency is checked
1086    /// by [`Self::try_from_bytes()`] internally.
1087    #[inline]
1088    pub fn is_internally_consistent(&self) -> bool {
1089        let mut last_leaf_shard_info = None::<(ShardIndex, BlockNumber, Option<LocalSegmentIndex>)>;
1090
1091        self.iter().all(|leaf_shard_block| {
1092            let shard_index = leaf_shard_block.header.prefix.shard_index;
1093
1094            // Ensure increasing order of shard indices, block numbers and local segment indices
1095            if let Some((
1096                last_leaf_shard_index,
1097                last_leaf_shard_block_number,
1098                last_leaf_shard_first_local_segment_index,
1099            )) = last_leaf_shard_info
1100            {
1101                match last_leaf_shard_index.cmp(&shard_index) {
1102                    cmp::Ordering::Less => {
1103                        last_leaf_shard_info.replace((
1104                            shard_index,
1105                            leaf_shard_block.header.prefix.number,
1106                            leaf_shard_block
1107                                .segments
1108                                .as_ref()
1109                                .map(|segments| segments.own_segments.first_local_segment_index),
1110                        ));
1111                    }
1112                    cmp::Ordering::Equal => {
1113                        if last_leaf_shard_block_number >= leaf_shard_block.header.prefix.number {
1114                            return false;
1115                        }
1116                        if let Some(leaf_shard_segments) = &leaf_shard_block.segments {
1117                            if let Some(last_leaf_shard_first_local_segment_index) =
1118                                last_leaf_shard_first_local_segment_index
1119                                && last_leaf_shard_first_local_segment_index
1120                                    >= leaf_shard_segments.own_segments.first_local_segment_index
1121                            {
1122                                return false;
1123                            }
1124
1125                            last_leaf_shard_info.replace((
1126                                shard_index,
1127                                leaf_shard_block.header.prefix.number,
1128                                Some(leaf_shard_segments.own_segments.first_local_segment_index),
1129                            ));
1130                        } else {
1131                            last_leaf_shard_info.replace((
1132                                shard_index,
1133                                leaf_shard_block.header.prefix.number,
1134                                last_leaf_shard_first_local_segment_index,
1135                            ));
1136                        }
1137                    }
1138                    cmp::Ordering::Greater => {
1139                        return false;
1140                    }
1141                }
1142            } else {
1143                last_leaf_shard_info.replace((
1144                    shard_index,
1145                    leaf_shard_block.header.prefix.number,
1146                    leaf_shard_block
1147                        .segments
1148                        .as_ref()
1149                        .map(|segments| segments.own_segments.first_local_segment_index),
1150                ));
1151            }
1152
1153            let Some(segments) = leaf_shard_block.segments else {
1154                return true;
1155            };
1156
1157            BalancedMerkleTree::<2>::verify(
1158                &leaf_shard_block.header.result.body_root,
1159                array::from_ref(segments.segment_roots_proof),
1160                0,
1161                *segments.own_segments.root(),
1162            )
1163        })
1164    }
1165
1166    /// Iterator over leaf shard blocks in a collection
1167    #[inline]
1168    pub fn iter(&self) -> impl ExactSizeIterator<Item = LeafShardBlockInfo<'a>> + TrustedLen + 'a {
1169        // SAFETY: Checked in constructor
1170        let (mut counts, mut remainder) = unsafe {
1171            self.bytes
1172                .split_at_unchecked(self.num_blocks * size_of::<u8>())
1173        };
1174
1175        (0..self.num_blocks).map(move |_| {
1176            let num_own_segment_roots = usize::from(counts[0]);
1177            counts = &counts[1..];
1178
1179            // TODO: Unchecked method would have been helpful here
1180            let header;
1181            (header, remainder) = LeafShardHeader::try_from_bytes(remainder)
1182                .expect("Already checked in constructor; qed");
1183
1184            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
1185                .expect("Already checked in constructor; qed");
1186
1187            let segments = if num_own_segment_roots > 0 {
1188                let first_local_segment_index;
1189                // SAFETY: Checked in constructor
1190                (first_local_segment_index, remainder) =
1191                    unsafe { remainder.split_at_unchecked(LocalSegmentIndex::SIZE as usize) };
1192                // SAFETY: Correct alignment and size
1193                let first_local_segment_index =
1194                    *unsafe { LocalSegmentIndex::from_bytes_unchecked(first_local_segment_index) };
1195
1196                let segment_roots_proof;
1197                // SAFETY: Checked in constructor
1198                (segment_roots_proof, remainder) = unsafe { remainder.split_at_unchecked(OUT_LEN) };
1199                // SAFETY: Valid pointer and size, no alignment requirements
1200                let segment_roots_proof = unsafe {
1201                    segment_roots_proof
1202                        .as_ptr()
1203                        .cast::<[u8; OUT_LEN]>()
1204                        .as_ref_unchecked()
1205                };
1206
1207                let own_segment_roots;
1208                // SAFETY: Checked in constructor
1209                (own_segment_roots, remainder) = unsafe {
1210                    remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE)
1211                };
1212                // SAFETY: Valid pointer and size, no alignment requirements
1213                let own_segment_roots = unsafe {
1214                    slice::from_raw_parts(
1215                        own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1216                        num_own_segment_roots,
1217                    )
1218                };
1219                let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1220
1221                Some(LeafShardOwnSegments {
1222                    segment_roots_proof,
1223                    own_segments: OwnSegments {
1224                        first_local_segment_index,
1225                        segment_roots: own_segment_roots,
1226                    },
1227                })
1228            } else {
1229                None
1230            };
1231
1232            LeafShardBlockInfo { header, segments }
1233        })
1234    }
1235
1236    /// Number of leaf shard blocks
1237    #[inline(always)]
1238    pub const fn len(&self) -> usize {
1239        self.num_blocks
1240    }
1241
1242    /// Returns `true` if there are no leaf shard blocks
1243    #[inline(always)]
1244    pub const fn is_empty(&self) -> bool {
1245        self.num_blocks == 0
1246    }
1247
1248    /// Compute the root of segments of the leaf shard blocks info.
1249    ///
1250    /// Returns the default value for an empty collection of segment roots.
1251    #[inline]
1252    pub fn segments_root(&self) -> Blake3Hash {
1253        let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
1254            self.iter().map(|shard_block_info| {
1255                shard_block_info
1256                    .segments
1257                    .map(|own_segments| {
1258                        own_segments
1259                            .own_segments
1260                            .root_with_shard_index(shard_block_info.header.prefix.shard_index)
1261                    })
1262                    .unwrap_or_default()
1263            }),
1264        )
1265        .unwrap_or_default();
1266
1267        Blake3Hash::new(root)
1268    }
1269
1270    /// Compute the root of headers of the leaf shard blocks info.
1271    ///
1272    /// Returns the default value for an empty collection of shard blocks.
1273    #[inline]
1274    pub fn headers_root(&self) -> Blake3Hash {
1275        let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
1276            self.iter().map(|shard_block_info| {
1277                // Hash the root again so we can prove it, otherwise root of headers is
1278                // indistinguishable from individual block roots and can be used to confuse
1279                // verifier
1280                single_block_hash(shard_block_info.header.root().as_ref())
1281                    .expect("Less than a single block worth of bytes; qed")
1282            }),
1283        )
1284        .unwrap_or_default();
1285
1286        Blake3Hash::new(root)
1287    }
1288}
1289
1290/// Block body that corresponds to an intermediate shard
1291#[derive(Debug, Copy, Clone, Yokeable)]
1292// Prevent creation of potentially broken invariants externally
1293#[non_exhaustive]
1294pub struct IntermediateShardBody<'a> {
1295    /// Segments produced by this shard
1296    own_segments: Option<OwnSegments<'a>>,
1297    /// Leaf shard blocks
1298    leaf_shard_blocks: LeafShardBlocksInfo<'a>,
1299}
1300
1301impl<'a> GenericBlockBody<'a> for IntermediateShardBody<'a> {
1302    const SHARD_KIND: RealShardKind = RealShardKind::IntermediateShard;
1303
1304    #[cfg(feature = "alloc")]
1305    type Owned = OwnedIntermediateShardBody;
1306
1307    #[cfg(feature = "alloc")]
1308    #[inline(always)]
1309    fn to_owned(self) -> Self::Owned {
1310        self.to_owned()
1311    }
1312
1313    #[inline(always)]
1314    fn root(&self) -> Blake3Hash {
1315        self.root()
1316    }
1317}
1318
1319impl<'a> IntermediateShardBody<'a> {
1320    /// Create an instance from provided bytes.
1321    ///
1322    /// `bytes` do not need to be aligned.
1323    ///
1324    /// Returns an instance and remaining bytes on success.
1325    #[inline]
1326    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1327        // The layout here is as follows:
1328        // * number of own segment roots: u8
1329        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
1330        // * concatenated own segment roots
1331        // * leaf shard blocks: LeafShardBlocksInfo
1332
1333        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1334        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1335
1336        let own_segments = if num_own_segment_roots > 0 {
1337            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1338            // SAFETY: Unaligned and correct size
1339            let first_local_segment_index = unsafe {
1340                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1341            }
1342            .as_inner();
1343
1344            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1345            // SAFETY: Valid pointer and size, no alignment requirements
1346            let own_segment_roots = unsafe {
1347                slice::from_raw_parts(
1348                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1349                    num_own_segment_roots,
1350                )
1351            };
1352            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1353
1354            Some(OwnSegments {
1355                first_local_segment_index,
1356                segment_roots: own_segment_roots,
1357            })
1358        } else {
1359            None
1360        };
1361
1362        let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
1363
1364        let body = Self {
1365            own_segments,
1366            leaf_shard_blocks,
1367        };
1368
1369        if !body.is_internally_consistent() {
1370            return None;
1371        }
1372
1373        Some((body, remainder))
1374    }
1375
1376    /// Check block body's internal consistency.
1377    ///
1378    /// This is usually not necessary to be called explicitly since internal consistency is checked
1379    /// by [`Self::try_from_bytes()`] internally.
1380    #[inline]
1381    pub fn is_internally_consistent(&self) -> bool {
1382        // Nothing to check here
1383        true
1384    }
1385
1386    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
1387    /// checks
1388    #[inline]
1389    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1390        // The layout here is as follows:
1391        // * number of own segment roots: u8
1392        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
1393        // * concatenated own segment roots
1394        // * leaf shard blocks: LeafShardBlocksInfo
1395
1396        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1397        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1398
1399        let own_segments = if num_own_segment_roots > 0 {
1400            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1401            // SAFETY: Unaligned and correct size
1402            let first_local_segment_index = unsafe {
1403                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1404            }
1405            .as_inner();
1406
1407            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1408            // SAFETY: Valid pointer and size, no alignment requirements
1409            let own_segment_roots = unsafe {
1410                slice::from_raw_parts(
1411                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1412                    num_own_segment_roots,
1413                )
1414            };
1415            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1416
1417            Some(OwnSegments {
1418                first_local_segment_index,
1419                segment_roots: own_segment_roots,
1420            })
1421        } else {
1422            None
1423        };
1424
1425        let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes_unchecked(bytes)?;
1426
1427        Some((
1428            Self {
1429                own_segments,
1430                leaf_shard_blocks,
1431            },
1432            remainder,
1433        ))
1434    }
1435
1436    /// Segment roots produced by this shard
1437    #[inline(always)]
1438    pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
1439        self.own_segments
1440    }
1441
1442    /// Leaf shard blocks
1443    #[inline(always)]
1444    pub fn leaf_shard_blocks(&self) -> &LeafShardBlocksInfo<'a> {
1445        &self.leaf_shard_blocks
1446    }
1447
1448    /// Create an owned version of this body
1449    #[cfg(feature = "alloc")]
1450    #[inline(always)]
1451    pub fn to_owned(self) -> OwnedIntermediateShardBody {
1452        if let Some(own_segments) = self.own_segments {
1453            let first_local_segment_index = own_segments.first_local_segment_index;
1454
1455            OwnedIntermediateShardBody::new(
1456                own_segments.segment_roots.iter().copied().enumerate().map(
1457                    |(index, segment_root)| {
1458                        (
1459                            first_local_segment_index + LocalSegmentIndex::from(index as u64),
1460                            segment_root,
1461                        )
1462                    },
1463                ),
1464                self.leaf_shard_blocks.iter(),
1465            )
1466            .expect("`self` is always a valid invariant; qed")
1467        } else {
1468            OwnedIntermediateShardBody::new(iter::empty(), self.leaf_shard_blocks.iter())
1469                .expect("`self` is always a valid invariant; qed")
1470        }
1471    }
1472
1473    /// Compute block body root
1474    #[inline]
1475    pub fn root(&self) -> Blake3Hash {
1476        // Explicit nested trees to emphasize that the proof size for segments is just one hash
1477        let root = BalancedMerkleTree::compute_root_only(&[
1478            BalancedMerkleTree::compute_root_only(&[
1479                *self
1480                    .own_segments
1481                    .as_ref()
1482                    .map(OwnSegments::root)
1483                    .unwrap_or_default(),
1484                *self.leaf_shard_blocks.segments_root(),
1485            ]),
1486            *self.leaf_shard_blocks.headers_root(),
1487        ]);
1488
1489        Blake3Hash::new(root)
1490    }
1491}
1492
1493/// Collection of transactions
1494#[derive(Debug, Copy, Clone)]
1495pub struct Transactions<'a> {
1496    num_transactions: usize,
1497    bytes: &'a [u8],
1498}
1499
1500impl<'a> Transactions<'a> {
1501    /// Create an instance from provided bytes.
1502    ///
1503    /// `bytes` do not need to be aligned.
1504    ///
1505    /// Returns an instance and remaining bytes on success.
1506    #[inline]
1507    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1508        // The layout here is as follows:
1509        // * number of transactions: u32 as unaligned little-endian bytes
1510        // * padding to 16-bytes boundary with zeroes
1511        // * for each transaction
1512        //   * transaction: Transaction
1513        //   * padding to 16-bytes boundary with zeroes
1514
1515        let num_transactions = bytes.split_off(..size_of::<u32>())?;
1516        let num_transactions = u32::from_le_bytes([
1517            num_transactions[0],
1518            num_transactions[1],
1519            num_transactions[2],
1520            num_transactions[3],
1521        ]) as usize;
1522
1523        let mut remainder = align_to_and_ensure_zero_padding::<u128>(bytes)?;
1524        let bytes_start = remainder;
1525
1526        for _ in 0..num_transactions {
1527            (_, remainder) = Transaction::try_from_bytes(bytes)?;
1528            remainder = align_to_and_ensure_zero_padding::<u128>(remainder)?;
1529        }
1530
1531        Some((
1532            Self {
1533                num_transactions,
1534                bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1535            },
1536            remainder,
1537        ))
1538    }
1539
1540    /// Iterator over transactions in a collection
1541    #[inline]
1542    pub fn iter(&self) -> impl ExactSizeIterator<Item = Transaction<'a>> + TrustedLen + 'a {
1543        let mut remainder = self.bytes;
1544
1545        (0..self.num_transactions).map(move |_| {
1546            // SAFETY: Checked in constructor
1547            let transaction = unsafe { Transaction::from_bytes_unchecked(remainder) };
1548
1549            remainder = &remainder[transaction.encoded_size()..];
1550            remainder = align_to_and_ensure_zero_padding::<u128>(remainder)
1551                .expect("Already checked in constructor; qed");
1552
1553            transaction
1554        })
1555    }
1556
1557    /// Number of transactions
1558    #[inline(always)]
1559    pub const fn len(&self) -> usize {
1560        self.num_transactions
1561    }
1562
1563    /// Returns `true` if there are no transactions
1564    #[inline(always)]
1565    pub const fn is_empty(&self) -> bool {
1566        self.num_transactions == 0
1567    }
1568
1569    /// Compute the root of transactions.
1570    ///
1571    /// Returns the default value for an empty collection of transactions.
1572    #[inline]
1573    pub fn root(&self) -> Blake3Hash {
1574        let root = UnbalancedMerkleTree::compute_root_only::<{ u32::MAX as u64 }, _, _>(
1575            self.iter().map(|transaction| {
1576                // Hash the hash again so we can prove it, otherwise root of transactions is
1577                // indistinguishable from individual transaction roots and can be used to
1578                // confuse verifier
1579                single_block_hash(transaction.hash().as_ref())
1580                    .expect("Less than a single block worth of bytes; qed")
1581            }),
1582        )
1583        .unwrap_or_default();
1584
1585        Blake3Hash::new(root)
1586    }
1587}
1588
1589/// Block body that corresponds to a leaf shard
1590#[derive(Debug, Copy, Clone, Yokeable)]
1591// Prevent creation of potentially broken invariants externally
1592#[non_exhaustive]
1593pub struct LeafShardBody<'a> {
1594    /// Segments produced by this shard
1595    own_segments: Option<OwnSegments<'a>>,
1596    /// User transactions
1597    transactions: Transactions<'a>,
1598}
1599
1600impl<'a> GenericBlockBody<'a> for LeafShardBody<'a> {
1601    const SHARD_KIND: RealShardKind = RealShardKind::LeafShard;
1602
1603    #[cfg(feature = "alloc")]
1604    type Owned = OwnedLeafShardBody;
1605
1606    #[cfg(feature = "alloc")]
1607    #[inline(always)]
1608    fn to_owned(self) -> Self::Owned {
1609        self.to_owned()
1610    }
1611
1612    #[inline(always)]
1613    fn root(&self) -> Blake3Hash {
1614        self.root()
1615    }
1616}
1617
1618impl<'a> LeafShardBody<'a> {
1619    /// Create an instance from provided bytes.
1620    ///
1621    /// `bytes` do not need to be aligned.
1622    ///
1623    /// Returns an instance and remaining bytes on success.
1624    #[inline]
1625    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1626        // The layout here is as follows:
1627        // * number of own segment roots: u8
1628        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
1629        // * concatenated own segment roots
1630        // * transactions: Transactions
1631
1632        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1633        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1634
1635        let own_segments = if num_own_segment_roots > 0 {
1636            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1637            // SAFETY: Unaligned and correct size
1638            let first_local_segment_index = unsafe {
1639                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1640            }
1641            .as_inner();
1642
1643            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1644            // SAFETY: Valid pointer and size, no alignment requirements
1645            let own_segment_roots = unsafe {
1646                slice::from_raw_parts(
1647                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1648                    num_own_segment_roots,
1649                )
1650            };
1651            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1652
1653            Some(OwnSegments {
1654                first_local_segment_index,
1655                segment_roots: own_segment_roots,
1656            })
1657        } else {
1658            None
1659        };
1660
1661        let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1662
1663        let body = Self {
1664            own_segments,
1665            transactions,
1666        };
1667
1668        if !body.is_internally_consistent() {
1669            return None;
1670        }
1671
1672        Some((body, remainder))
1673    }
1674
1675    /// Check block body's internal consistency.
1676    ///
1677    /// This is usually not necessary to be called explicitly since internal consistency is checked
1678    /// by [`Self::try_from_bytes()`] internally.
1679    #[inline]
1680    pub fn is_internally_consistent(&self) -> bool {
1681        // Nothing to check here
1682        true
1683    }
1684
1685    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
1686    /// checks
1687    #[inline]
1688    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1689        // The layout here is as follows:
1690        // * number of own segment roots: u8
1691        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
1692        // * concatenated own segment roots
1693        // * transactions: Transactions
1694
1695        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1696        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1697
1698        let own_segments = if num_own_segment_roots > 0 {
1699            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1700            // SAFETY: Unaligned and correct size
1701            let first_local_segment_index = unsafe {
1702                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1703            }
1704            .as_inner();
1705
1706            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1707            // SAFETY: Valid pointer and size, no alignment requirements
1708            let own_segment_roots = unsafe {
1709                slice::from_raw_parts(
1710                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1711                    num_own_segment_roots,
1712                )
1713            };
1714            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1715
1716            Some(OwnSegments {
1717                first_local_segment_index,
1718                segment_roots: own_segment_roots,
1719            })
1720        } else {
1721            None
1722        };
1723
1724        let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1725
1726        Some((
1727            Self {
1728                own_segments,
1729                transactions,
1730            },
1731            remainder,
1732        ))
1733    }
1734
1735    /// Segment roots produced by this shard
1736    #[inline(always)]
1737    pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
1738        self.own_segments
1739    }
1740
1741    /// User transactions
1742    #[inline(always)]
1743    pub fn transactions(&self) -> &Transactions<'a> {
1744        &self.transactions
1745    }
1746
1747    /// Create an owned version of this body
1748    #[cfg(feature = "alloc")]
1749    #[inline(always)]
1750    pub fn to_owned(self) -> OwnedLeafShardBody {
1751        let mut builder = if let Some(own_segments) = self.own_segments {
1752            let first_local_segment_index = own_segments.first_local_segment_index;
1753
1754            OwnedLeafShardBody::init(own_segments.segment_roots.iter().copied().enumerate().map(
1755                |(index, segment_root)| {
1756                    (
1757                        first_local_segment_index + LocalSegmentIndex::from(index as u64),
1758                        segment_root,
1759                    )
1760                },
1761            ))
1762            .expect("`self` is always a valid invariant; qed")
1763        } else {
1764            OwnedLeafShardBody::init(iter::empty())
1765                .expect("`self` is always a valid invariant; qed")
1766        };
1767        for transaction in self.transactions.iter() {
1768            builder
1769                .add_transaction(transaction)
1770                .expect("`self` is always a valid invariant; qed");
1771        }
1772
1773        builder.finish()
1774    }
1775
1776    /// Compute block body root
1777    #[inline]
1778    pub fn root(&self) -> Blake3Hash {
1779        let root = BalancedMerkleTree::compute_root_only(&[
1780            *self
1781                .own_segments
1782                .as_ref()
1783                .map(OwnSegments::root)
1784                .unwrap_or_default(),
1785            *self.transactions.root(),
1786        ]);
1787
1788        Blake3Hash::new(root)
1789    }
1790}
1791
1792/// Block body that together with [`BlockHeader`] form a [`Block`]
1793///
1794/// [`BlockHeader`]: crate::block::header::BlockHeader
1795/// [`Block`]: crate::block::Block
1796#[derive(Debug, Copy, Clone, From)]
1797pub enum BlockBody<'a> {
1798    /// Block body corresponds to the beacon chain
1799    BeaconChain(BeaconChainBody<'a>),
1800    /// Block body corresponds to an intermediate shard
1801    IntermediateShard(IntermediateShardBody<'a>),
1802    /// Block body corresponds to a leaf shard
1803    LeafShard(LeafShardBody<'a>),
1804}
1805
1806impl<'a> BlockBody<'a> {
1807    /// Try to create a new instance from provided bytes for the provided shard index.
1808    ///
1809    /// `bytes` do not need to be aligned.
1810    ///
1811    /// Returns an instance and remaining bytes on success, `None` if too few bytes were given,
1812    /// bytes are not properly aligned or input is otherwise invalid.
1813    #[inline]
1814    pub fn try_from_bytes(bytes: &'a [u8], shard_kind: RealShardKind) -> Option<(Self, &'a [u8])> {
1815        match shard_kind {
1816            RealShardKind::BeaconChain => {
1817                let (body, remainder) = BeaconChainBody::try_from_bytes(bytes)?;
1818                Some((Self::BeaconChain(body), remainder))
1819            }
1820            RealShardKind::IntermediateShard => {
1821                let (body, remainder) = IntermediateShardBody::try_from_bytes(bytes)?;
1822                Some((Self::IntermediateShard(body), remainder))
1823            }
1824            RealShardKind::LeafShard => {
1825                let (body, remainder) = LeafShardBody::try_from_bytes(bytes)?;
1826                Some((Self::LeafShard(body), remainder))
1827            }
1828        }
1829    }
1830
1831    /// Check block body's internal consistency.
1832    ///
1833    /// This is usually not necessary to be called explicitly since internal consistency is checked
1834    /// by [`Self::try_from_bytes()`] internally.
1835    #[inline]
1836    pub fn is_internally_consistent(&self) -> bool {
1837        match self {
1838            Self::BeaconChain(body) => body.is_internally_consistent(),
1839            Self::IntermediateShard(body) => body.is_internally_consistent(),
1840            Self::LeafShard(body) => body.is_internally_consistent(),
1841        }
1842    }
1843
1844    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
1845    /// checks
1846    #[inline]
1847    pub fn try_from_bytes_unchecked(
1848        bytes: &'a [u8],
1849        shard_kind: RealShardKind,
1850    ) -> Option<(Self, &'a [u8])> {
1851        match shard_kind {
1852            RealShardKind::BeaconChain => {
1853                let (body, remainder) = BeaconChainBody::try_from_bytes_unchecked(bytes)?;
1854                Some((Self::BeaconChain(body), remainder))
1855            }
1856            RealShardKind::IntermediateShard => {
1857                let (body, remainder) = IntermediateShardBody::try_from_bytes_unchecked(bytes)?;
1858                Some((Self::IntermediateShard(body), remainder))
1859            }
1860            RealShardKind::LeafShard => {
1861                let (body, remainder) = LeafShardBody::try_from_bytes_unchecked(bytes)?;
1862                Some((Self::LeafShard(body), remainder))
1863            }
1864        }
1865    }
1866
1867    /// Create an owned version of this body
1868    #[cfg(feature = "alloc")]
1869    #[inline(always)]
1870    pub fn to_owned(self) -> OwnedBlockBody {
1871        match self {
1872            Self::BeaconChain(body) => body.to_owned().into(),
1873            Self::IntermediateShard(body) => body.to_owned().into(),
1874            Self::LeafShard(body) => body.to_owned().into(),
1875        }
1876    }
1877
1878    /// Compute block body root.
1879    ///
1880    /// Block body hash is actually a Merkle Tree Root. The leaves are derived from individual
1881    /// fields this enum in the declaration order.
1882    #[inline]
1883    pub fn root(&self) -> Blake3Hash {
1884        match self {
1885            Self::BeaconChain(body) => body.root(),
1886            Self::IntermediateShard(body) => body.root(),
1887            Self::LeafShard(body) => body.root(),
1888        }
1889    }
1890}