1#[cfg(feature = "alloc")]
4pub mod owned;
5
6#[cfg(feature = "alloc")]
7use crate::block::body::owned::{
8 GenericOwnedBlockBody, OwnedBeaconChainBody, OwnedBlockBody, OwnedIntermediateShardBody,
9 OwnedLeafShardBody,
10};
11use crate::block::header::{IntermediateShardHeader, LeafShardHeader};
12use crate::block::{BlockNumber, align_to_and_ensure_zero_padding};
13use crate::hashes::Blake3Hash;
14use crate::pot::PotCheckpoints;
15use crate::segments::{LocalSegmentIndex, SegmentRoot};
16use crate::shard::{RealShardKind, ShardIndex};
17use crate::transaction::Transaction;
18use ab_blake3::{BLOCK_LEN, OUT_LEN, single_block_hash};
19use ab_io_type::trivial_type::TrivialType;
20use ab_io_type::unaligned::Unaligned;
21use ab_merkle_tree::balanced::BalancedMerkleTree;
22use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
23#[cfg(feature = "alloc")]
24use core::iter;
25use core::iter::TrustedLen;
26use core::{array, cmp, fmt, slice};
27use derive_more::From;
28use yoke::Yokeable;
29
30pub trait GenericBlockBody<'a>
32where
33 Self: Copy + fmt::Debug + Into<BlockBody<'a>> + Send + Sync,
34{
35 const SHARD_KIND: RealShardKind;
37
38 #[cfg(feature = "alloc")]
40 type Owned: GenericOwnedBlockBody<Body<'a> = Self>
41 where
42 Self: 'a;
43
44 #[cfg(feature = "alloc")]
46 fn to_owned(self) -> Self::Owned;
47
48 fn root(&self) -> Blake3Hash;
50}
51
52#[inline]
54pub fn compute_segments_root<'a, const MAX_SEGMENTS: u64, Iter>(segment_roots: Iter) -> Blake3Hash
55where
56 [(); MAX_SEGMENTS.next_power_of_two().ilog2() as usize + 1]:,
57 Iter: IntoIterator<Item = &'a SegmentRoot>,
58{
59 let root = UnbalancedMerkleTree::compute_root_only::<MAX_SEGMENTS, _, _>(
61 segment_roots.into_iter().map(|segment_root| {
62 single_block_hash(segment_root.as_ref())
65 .expect("Less than a single block worth of bytes; qed")
66 }),
67 );
68
69 Blake3Hash::new(root.unwrap_or_default())
70}
71
72#[derive(Debug, Copy, Clone)]
74pub struct OwnSegments<'a> {
75 pub first_local_segment_index: LocalSegmentIndex,
77 pub segment_roots: &'a [SegmentRoot],
79}
80
81impl OwnSegments<'_> {
82 #[inline]
84 pub fn root(&self) -> Blake3Hash {
85 let root = BalancedMerkleTree::compute_root_only(&[
87 single_block_hash(self.first_local_segment_index.as_bytes())
88 .expect("Less than a single block worth of bytes; qed"),
89 *compute_segments_root::<{ u8::MAX as u64 }, _>(self.segment_roots),
90 ]);
91
92 Blake3Hash::new(root)
93 }
94
95 #[inline]
101 pub fn root_with_shard_index(&self, shard_index: ShardIndex) -> Blake3Hash {
102 let root = BalancedMerkleTree::compute_root_only(&[
104 {
105 const {
106 assert!((ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize <= BLOCK_LEN);
107 }
108 let mut pair = [0u8; (ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize];
109 pair[..ShardIndex::SIZE as usize].copy_from_slice(shard_index.as_bytes());
110 pair[ShardIndex::SIZE as usize..]
111 .copy_from_slice(self.first_local_segment_index.as_bytes());
112
113 single_block_hash(&pair).expect("Less than a single block worth of bytes; qed")
114 },
115 *compute_segments_root::<{ u8::MAX as u64 }, _>(self.segment_roots),
116 ]);
117
118 Blake3Hash::new(root)
119 }
120}
121
122#[derive(Debug, Clone)]
124pub struct IntermediateShardBlockInfo<'a> {
125 pub header: IntermediateShardHeader<'a>,
127 pub segments_proof: Option<&'a [u8; OUT_LEN]>,
129 pub own_segments: Option<OwnSegments<'a>>,
131 num_leaf_shard_blocks_with_segments: u8,
132 leaf_shards_segments_bytes: &'a [u8],
133}
134
135impl<'a> IntermediateShardBlockInfo<'a> {
136 #[inline]
138 pub fn leaf_shards_segments(
139 &self,
140 ) -> impl ExactSizeIterator<Item = (ShardIndex, OwnSegments<'a>)> + TrustedLen + use<'a> {
141 let (counts, mut remainder) = unsafe {
143 self.leaf_shards_segments_bytes.split_at_unchecked(
144 usize::from(self.num_leaf_shard_blocks_with_segments) * size_of::<u8>(),
145 )
146 };
147 counts.iter().map(move |&num_own_segment_roots| {
148 let num_own_segment_roots = usize::from(num_own_segment_roots);
149
150 let shard_index;
151 (shard_index, remainder) =
153 unsafe { remainder.split_at_unchecked(ShardIndex::SIZE as usize) };
154 let shard_index =
156 unsafe { Unaligned::<ShardIndex>::from_bytes_unchecked(shard_index) }.as_inner();
157
158 let first_local_segment_index;
159 (first_local_segment_index, remainder) =
161 unsafe { remainder.split_at_unchecked(LocalSegmentIndex::SIZE as usize) };
162 let first_local_segment_index = unsafe {
164 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
165 }
166 .as_inner();
167
168 let own_segment_roots;
169 (own_segment_roots, remainder) =
171 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
172 let own_segment_roots = unsafe {
174 slice::from_raw_parts(
175 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
176 num_own_segment_roots,
177 )
178 };
179 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
180
181 (
182 shard_index,
183 OwnSegments {
184 first_local_segment_index,
185 segment_roots: own_segment_roots,
186 },
187 )
188 })
189 }
190}
191
192#[derive(Debug, Copy, Clone)]
194pub struct IntermediateShardBlocksInfo<'a> {
195 num_blocks: usize,
196 bytes: &'a [u8],
197}
198
199impl<'a> IntermediateShardBlocksInfo<'a> {
200 #[inline]
206 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
207 let num_blocks = bytes.split_off(..size_of::<u16>())?;
227 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
228
229 let bytes_start = bytes;
230
231 let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() * 2))?;
232
233 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
234
235 for _ in 0..num_blocks {
236 let num_own_segment_roots = usize::from(counts[0]);
237 let num_leaf_shard_blocks_with_segments = usize::from(counts[1]);
238 counts = &counts[2..];
239
240 (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
241
242 if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
243 let _segments_proof = remainder.split_off(..SegmentRoot::SIZE)?;
244 }
245
246 if num_own_segment_roots > 0 {
247 let _first_local_segment_index =
248 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
249 let _own_segment_roots =
250 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
251 }
252
253 let leaf_shard_segment_roots_counts =
254 remainder.split_off(..num_leaf_shard_blocks_with_segments * size_of::<u8>())?;
255
256 for &num_own_segment_roots in leaf_shard_segment_roots_counts {
257 let _shard_index = remainder.split_off(..ShardIndex::SIZE as usize)?;
258 let _first_local_segment_index =
259 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
260 let _own_segment_roots = remainder
261 .split_off(..usize::from(num_own_segment_roots) * SegmentRoot::SIZE)?;
262 }
263
264 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
265 }
266
267 let info = Self {
268 num_blocks,
269 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
270 };
271
272 if !info.is_internally_consistent() {
273 return None;
274 }
275
276 Some((info, remainder))
277 }
278
279 #[inline]
282 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
283 let num_blocks = bytes.split_off(..size_of::<u16>())?;
303 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
304
305 let bytes_start = bytes;
306
307 let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() * 2))?;
308
309 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
310
311 for _ in 0..num_blocks {
312 let num_own_segment_roots = usize::from(counts[0]);
313 let num_leaf_shard_blocks_with_segments = usize::from(counts[1]);
314 counts = &counts[2..];
315
316 (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
317
318 if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
319 let _segments_proof = remainder.split_off(..OUT_LEN)?;
320 }
321
322 if num_own_segment_roots > 0 {
323 let _first_local_segment_index =
324 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
325 let _own_segment_roots =
326 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
327 }
328
329 let leaf_shard_segment_roots_counts =
330 remainder.split_off(..num_leaf_shard_blocks_with_segments * size_of::<u8>())?;
331
332 for &num_own_segment_roots in leaf_shard_segment_roots_counts {
333 let _shard_index = remainder.split_off(..ShardIndex::SIZE as usize)?;
334 let _first_local_segment_index =
335 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
336 let _own_segment_roots = remainder
337 .split_off(..usize::from(num_own_segment_roots) * SegmentRoot::SIZE)?;
338 }
339
340 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
341 }
342
343 Some((
344 Self {
345 num_blocks,
346 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
347 },
348 remainder,
349 ))
350 }
351
352 #[inline]
357 pub fn is_internally_consistent(&self) -> bool {
358 let mut last_intermediate_shard_info =
359 None::<(ShardIndex, BlockNumber, Option<LocalSegmentIndex>)>;
360
361 self.iter().all(|intermediate_shard_block| {
362 let shard_index = intermediate_shard_block.header.prefix.shard_index;
363
364 if let Some((
366 last_intermediate_shard_index,
367 last_intermediate_shard_block_number,
368 last_intermediate_shard_first_local_segment_index,
369 )) = last_intermediate_shard_info
370 {
371 match last_intermediate_shard_index.cmp(&shard_index) {
372 cmp::Ordering::Less => {
373 last_intermediate_shard_info.replace((
374 shard_index,
375 intermediate_shard_block.header.prefix.number,
376 intermediate_shard_block
377 .own_segments
378 .as_ref()
379 .map(|own_segments| own_segments.first_local_segment_index),
380 ));
381 }
382 cmp::Ordering::Equal => {
383 if last_intermediate_shard_block_number
384 >= intermediate_shard_block.header.prefix.number
385 {
386 return false;
387 }
388 if let Some(own_segments) = &intermediate_shard_block.own_segments {
389 if let Some(last_intermediate_shard_first_local_segment_index) =
390 last_intermediate_shard_first_local_segment_index
391 && last_intermediate_shard_first_local_segment_index
392 >= own_segments.first_local_segment_index
393 {
394 return false;
395 }
396
397 last_intermediate_shard_info.replace((
398 shard_index,
399 intermediate_shard_block.header.prefix.number,
400 Some(own_segments.first_local_segment_index),
401 ));
402 } else {
403 last_intermediate_shard_info.replace((
404 shard_index,
405 intermediate_shard_block.header.prefix.number,
406 last_intermediate_shard_first_local_segment_index,
407 ));
408 }
409 }
410 cmp::Ordering::Greater => {
411 return false;
412 }
413 }
414 } else {
415 last_intermediate_shard_info.replace((
416 shard_index,
417 intermediate_shard_block.header.prefix.number,
418 intermediate_shard_block
419 .own_segments
420 .as_ref()
421 .map(|own_segments| own_segments.first_local_segment_index),
422 ));
423 }
424
425 let mut last_leaf_shard_info = None::<(ShardIndex, LocalSegmentIndex)>;
426
427 if !intermediate_shard_block.leaf_shards_segments().all(
429 |(leaf_shard_index, own_segments)| {
430 if !leaf_shard_index.is_child_of(shard_index) {
431 return false;
432 }
433
434 if let Some((
435 last_leaf_shard_index,
436 last_leaf_shard_first_local_segment_index,
437 )) = last_leaf_shard_info
438 {
439 match last_leaf_shard_index.cmp(&leaf_shard_index) {
440 cmp::Ordering::Less => {
441 }
443 cmp::Ordering::Equal => {
444 if last_leaf_shard_first_local_segment_index
445 >= own_segments.first_local_segment_index
446 {
447 return false;
448 }
449 }
450 cmp::Ordering::Greater => {
451 return false;
452 }
453 }
454 }
455 last_leaf_shard_info
456 .replace((leaf_shard_index, own_segments.first_local_segment_index));
457
458 true
459 },
460 ) {
461 return false;
462 }
463
464 if intermediate_shard_block.own_segments.is_none()
465 && intermediate_shard_block
466 .leaf_shards_segments()
467 .size_hint()
468 .0
469 == 0
470 {
471 return intermediate_shard_block.segments_proof.is_none();
472 }
473
474 let segments_proof = intermediate_shard_block.segments_proof.unwrap_or(&[0; _]);
475
476 #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
480 #[expect(clippy::eq_op, reason = "Intentional documentation")]
481 const {
482 assert!(u16::MAX == 65535);
483 }
484 let leaf_shards_root = UnbalancedMerkleTree::compute_root_only::<65535, _, _>(
485 intermediate_shard_block.leaf_shards_segments().map(
486 |(shard_index, own_segments)| own_segments.root_with_shard_index(shard_index),
487 ),
488 )
489 .unwrap_or_default();
490
491 BalancedMerkleTree::<2>::verify(
492 &intermediate_shard_block.header.result.body_root,
493 array::from_ref(segments_proof),
494 0,
495 BalancedMerkleTree::compute_root_only(&[
496 *intermediate_shard_block
497 .own_segments
498 .as_ref()
499 .map(OwnSegments::root)
500 .unwrap_or_default(),
501 leaf_shards_root,
502 ]),
503 )
504 })
505 }
506
507 #[inline]
509 pub fn iter(
510 &self,
511 ) -> impl ExactSizeIterator<Item = IntermediateShardBlockInfo<'a>> + TrustedLen + use<'a> {
512 let (mut counts, mut remainder) = unsafe {
514 self.bytes
515 .split_at_unchecked(self.num_blocks * (size_of::<u8>() + size_of::<u16>()))
516 };
517
518 (0..self.num_blocks).map(move |_| {
519 let num_own_segment_roots = usize::from(counts[0]);
520 let num_leaf_shard_blocks_with_segments = counts[1];
521 counts = &counts[2..];
522
523 let header;
525 (header, remainder) = IntermediateShardHeader::try_from_bytes(remainder)
526 .expect("Already checked in constructor; qed");
527
528 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
529 .expect("Already checked in constructor; qed");
530
531 let segments_proof =
532 if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
533 let segments_proof;
534 (segments_proof, remainder) = unsafe { remainder.split_at_unchecked(OUT_LEN) };
536 Some(unsafe {
538 segments_proof
539 .as_ptr()
540 .cast::<[u8; OUT_LEN]>()
541 .as_ref_unchecked()
542 })
543 } else {
544 None
545 };
546
547 let own_segments = if num_own_segment_roots > 0 {
548 let first_local_segment_index;
549 (first_local_segment_index, remainder) =
551 unsafe { remainder.split_at_unchecked(size_of::<LocalSegmentIndex>()) };
552 let first_local_segment_index =
554 *unsafe { LocalSegmentIndex::from_bytes_unchecked(first_local_segment_index) };
555
556 let own_segment_roots;
557 (own_segment_roots, remainder) = unsafe {
559 remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE)
560 };
561 let own_segment_roots = unsafe {
563 slice::from_raw_parts(
564 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
565 num_own_segment_roots,
566 )
567 };
568 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
569
570 Some(OwnSegments {
571 first_local_segment_index,
572 segment_roots: own_segment_roots,
573 })
574 } else {
575 None
576 };
577
578 let leaf_shard_segment_roots_counts = unsafe {
580 remainder.get_unchecked(
581 ..usize::from(num_leaf_shard_blocks_with_segments) * size_of::<u8>(),
582 )
583 };
584
585 let leaf_shards_segments_bytes;
586 (leaf_shards_segments_bytes, remainder) = unsafe {
588 remainder.split_at_unchecked(
589 usize::from(num_leaf_shard_blocks_with_segments) * size_of::<u8>()
590 + usize::from(num_leaf_shard_blocks_with_segments)
591 * (ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize
592 + leaf_shard_segment_roots_counts.iter().fold(
593 0_usize,
594 |acc, &num_own_segment_roots| {
595 acc + usize::from(num_own_segment_roots) * SegmentRoot::SIZE
596 },
597 ),
598 )
599 };
600
601 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
602 .expect("Already checked in constructor; qed");
603
604 IntermediateShardBlockInfo {
605 header,
606 segments_proof,
607 own_segments,
608 num_leaf_shard_blocks_with_segments,
609 leaf_shards_segments_bytes,
610 }
611 })
612 }
613
614 #[inline(always)]
616 pub const fn len(&self) -> usize {
617 self.num_blocks
618 }
619
620 #[inline(always)]
622 pub const fn is_empty(&self) -> bool {
623 self.num_blocks == 0
624 }
625
626 #[inline]
630 pub fn segments_root(&self) -> Blake3Hash {
631 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
632 self.iter().map(|shard_block_info| {
633 #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
637 #[expect(clippy::eq_op, reason = "Intentional documentation")]
638 const {
639 assert!(u32::MAX == 4294967295);
640 }
641 UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
642 shard_block_info
643 .own_segments
644 .as_ref()
645 .map(OwnSegments::root)
646 .into_iter()
647 .chain(shard_block_info.leaf_shards_segments().map(
648 |(shard_index, own_segments)| {
649 own_segments.root_with_shard_index(shard_index)
650 },
651 )),
652 )
653 .unwrap_or_default()
654 }),
655 )
656 .unwrap_or_default();
657
658 Blake3Hash::new(root)
659 }
660
661 #[inline]
665 pub fn headers_root(&self) -> Blake3Hash {
666 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
667 self.iter().map(|shard_block_info| {
669 single_block_hash(shard_block_info.header.root().as_ref())
673 .expect("Less than a single block worth of bytes; qed")
674 }),
675 )
676 .unwrap_or_default();
677
678 Blake3Hash::new(root)
679 }
680}
681
682#[derive(Debug, Copy, Clone, Yokeable)]
684#[non_exhaustive]
686pub struct BeaconChainBody<'a> {
687 own_segments: Option<OwnSegments<'a>>,
689 intermediate_shard_blocks: IntermediateShardBlocksInfo<'a>,
691 pot_checkpoints: &'a [PotCheckpoints],
694}
695
696impl<'a> GenericBlockBody<'a> for BeaconChainBody<'a> {
697 const SHARD_KIND: RealShardKind = RealShardKind::BeaconChain;
698
699 #[cfg(feature = "alloc")]
700 type Owned = OwnedBeaconChainBody;
701
702 #[cfg(feature = "alloc")]
703 #[inline(always)]
704 fn to_owned(self) -> Self::Owned {
705 self.to_owned()
706 }
707
708 #[inline(always)]
709 fn root(&self) -> Blake3Hash {
710 self.root()
711 }
712}
713
714impl<'a> BeaconChainBody<'a> {
715 #[inline]
721 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
722 let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
731 let num_pot_checkpoints =
733 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
734
735 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
736 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
737
738 let own_segments = if num_own_segment_roots > 0 {
739 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
740 let first_local_segment_index = unsafe {
742 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
743 }
744 .as_inner();
745
746 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
747 let own_segment_roots = unsafe {
749 slice::from_raw_parts(
750 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
751 num_own_segment_roots,
752 )
753 };
754 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
755
756 Some(OwnSegments {
757 first_local_segment_index,
758 segment_roots: own_segment_roots,
759 })
760 } else {
761 None
762 };
763
764 let (intermediate_shard_blocks, mut remainder) =
765 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
766
767 let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
768 let pot_checkpoints = unsafe {
770 slice::from_raw_parts(
771 pot_checkpoints
772 .as_ptr()
773 .cast::<[u8; PotCheckpoints::SIZE]>(),
774 num_pot_checkpoints,
775 )
776 };
777 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
778
779 let body = Self {
780 own_segments,
781 intermediate_shard_blocks,
782 pot_checkpoints,
783 };
784
785 if !body.is_internally_consistent() {
786 return None;
787 }
788
789 Some((body, remainder))
790 }
791
792 #[inline]
797 pub fn is_internally_consistent(&self) -> bool {
798 true
800 }
801
802 #[inline]
805 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
806 let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
815 let num_pot_checkpoints =
817 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
818
819 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
820 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
821
822 let own_segments = if num_own_segment_roots > 0 {
823 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
824 let first_local_segment_index = unsafe {
826 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
827 }
828 .as_inner();
829
830 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
831 let own_segment_roots = unsafe {
833 slice::from_raw_parts(
834 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
835 num_own_segment_roots,
836 )
837 };
838 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
839
840 Some(OwnSegments {
841 first_local_segment_index,
842 segment_roots: own_segment_roots,
843 })
844 } else {
845 None
846 };
847
848 let (intermediate_shard_blocks, mut remainder) =
849 IntermediateShardBlocksInfo::try_from_bytes_unchecked(bytes)?;
850
851 let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
852 let pot_checkpoints = unsafe {
854 slice::from_raw_parts(
855 pot_checkpoints
856 .as_ptr()
857 .cast::<[u8; PotCheckpoints::SIZE]>(),
858 num_pot_checkpoints,
859 )
860 };
861 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
862
863 Some((
864 Self {
865 own_segments,
866 intermediate_shard_blocks,
867 pot_checkpoints,
868 },
869 remainder,
870 ))
871 }
872
873 #[cfg(feature = "alloc")]
875 #[inline(always)]
876 pub fn to_owned(self) -> OwnedBeaconChainBody {
877 if let Some(own_segments) = self.own_segments {
878 let first_local_segment_index = own_segments.first_local_segment_index;
879
880 OwnedBeaconChainBody::new(
881 own_segments.segment_roots.iter().copied().enumerate().map(
882 |(index, segment_root)| {
883 (
884 first_local_segment_index + LocalSegmentIndex::from(index as u64),
885 segment_root,
886 )
887 },
888 ),
889 self.intermediate_shard_blocks.iter(),
890 self.pot_checkpoints,
891 )
892 .expect("`self` is always a valid invariant; qed")
893 } else {
894 OwnedBeaconChainBody::new(
895 iter::empty(),
896 self.intermediate_shard_blocks.iter(),
897 self.pot_checkpoints,
898 )
899 .expect("`self` is always a valid invariant; qed")
900 }
901 }
902
903 #[inline(always)]
905 pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
906 self.own_segments
907 }
908
909 #[inline(always)]
911 pub fn intermediate_shard_blocks(&self) -> &IntermediateShardBlocksInfo<'a> {
912 &self.intermediate_shard_blocks
913 }
914
915 #[inline(always)]
918 pub fn pot_checkpoints(&self) -> &'a [PotCheckpoints] {
919 self.pot_checkpoints
920 }
921
922 #[inline]
924 pub fn root(&self) -> Blake3Hash {
925 let root = BalancedMerkleTree::compute_root_only(&[
927 *self
928 .own_segments
929 .as_ref()
930 .map(OwnSegments::root)
931 .unwrap_or_default(),
932 *self.intermediate_shard_blocks.segments_root(),
933 *self.intermediate_shard_blocks.headers_root(),
934 blake3::hash(PotCheckpoints::bytes_from_slice(self.pot_checkpoints).as_flattened())
935 .into(),
936 ]);
937
938 Blake3Hash::new(root)
939 }
940}
941
942#[derive(Debug, Copy, Clone)]
944pub struct LeafShardOwnSegments<'a> {
945 pub segment_roots_proof: &'a [u8; OUT_LEN],
947 pub own_segments: OwnSegments<'a>,
949}
950
951#[derive(Debug, Clone)]
953pub struct LeafShardBlockInfo<'a> {
954 pub header: LeafShardHeader<'a>,
956 pub segments: Option<LeafShardOwnSegments<'a>>,
958}
959
960#[derive(Debug, Copy, Clone)]
962pub struct LeafShardBlocksInfo<'a> {
963 num_blocks: usize,
964 bytes: &'a [u8],
965}
966
967impl<'a> LeafShardBlocksInfo<'a> {
968 #[inline]
974 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
975 let num_blocks = bytes.split_off(..size_of::<u16>())?;
988 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
989
990 let bytes_start = bytes;
991
992 let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
993
994 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
995
996 for _ in 0..num_blocks {
997 let num_own_segment_roots = usize::from(counts[0]);
998 counts = &counts[1..];
999
1000 (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
1001
1002 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
1003
1004 if num_own_segment_roots > 0 {
1005 let _first_local_segment_index =
1006 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
1007 let _segment_roots_proof = remainder.split_off(..OUT_LEN)?;
1008 let _own_segment_roots =
1009 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1010 }
1011 }
1012
1013 let info = Self {
1014 num_blocks,
1015 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1016 };
1017
1018 if !info.is_internally_consistent() {
1019 return None;
1020 }
1021
1022 Some((info, remainder))
1023 }
1024
1025 #[inline]
1028 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1029 let num_blocks = bytes.split_off(..size_of::<u16>())?;
1042 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
1043
1044 let bytes_start = bytes;
1045
1046 let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
1047
1048 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
1049
1050 for _ in 0..num_blocks {
1051 let num_own_segment_roots = usize::from(counts[0]);
1052 counts = &counts[1..];
1053
1054 (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
1055
1056 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
1057
1058 if num_own_segment_roots > 0 {
1059 let _first_local_segment_index =
1060 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
1061 let _segment_roots_proof = remainder.split_off(..OUT_LEN)?;
1062 let _own_segment_roots =
1063 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1064 }
1065 }
1066
1067 Some((
1068 Self {
1069 num_blocks,
1070 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1071 },
1072 remainder,
1073 ))
1074 }
1075
1076 #[inline]
1081 pub fn is_internally_consistent(&self) -> bool {
1082 let mut last_leaf_shard_info = None::<(ShardIndex, BlockNumber, Option<LocalSegmentIndex>)>;
1083
1084 self.iter().all(|leaf_shard_block| {
1085 let shard_index = leaf_shard_block.header.prefix.shard_index;
1086
1087 if let Some((
1089 last_leaf_shard_index,
1090 last_leaf_shard_block_number,
1091 last_leaf_shard_first_local_segment_index,
1092 )) = last_leaf_shard_info
1093 {
1094 match last_leaf_shard_index.cmp(&shard_index) {
1095 cmp::Ordering::Less => {
1096 last_leaf_shard_info.replace((
1097 shard_index,
1098 leaf_shard_block.header.prefix.number,
1099 leaf_shard_block
1100 .segments
1101 .as_ref()
1102 .map(|segments| segments.own_segments.first_local_segment_index),
1103 ));
1104 }
1105 cmp::Ordering::Equal => {
1106 if last_leaf_shard_block_number >= leaf_shard_block.header.prefix.number {
1107 return false;
1108 }
1109 if let Some(leaf_shard_segments) = &leaf_shard_block.segments {
1110 if let Some(last_leaf_shard_first_local_segment_index) =
1111 last_leaf_shard_first_local_segment_index
1112 && last_leaf_shard_first_local_segment_index
1113 >= leaf_shard_segments.own_segments.first_local_segment_index
1114 {
1115 return false;
1116 }
1117
1118 last_leaf_shard_info.replace((
1119 shard_index,
1120 leaf_shard_block.header.prefix.number,
1121 Some(leaf_shard_segments.own_segments.first_local_segment_index),
1122 ));
1123 } else {
1124 last_leaf_shard_info.replace((
1125 shard_index,
1126 leaf_shard_block.header.prefix.number,
1127 last_leaf_shard_first_local_segment_index,
1128 ));
1129 }
1130 }
1131 cmp::Ordering::Greater => {
1132 return false;
1133 }
1134 }
1135 } else {
1136 last_leaf_shard_info.replace((
1137 shard_index,
1138 leaf_shard_block.header.prefix.number,
1139 leaf_shard_block
1140 .segments
1141 .as_ref()
1142 .map(|segments| segments.own_segments.first_local_segment_index),
1143 ));
1144 }
1145
1146 let Some(segments) = leaf_shard_block.segments else {
1147 return true;
1148 };
1149
1150 BalancedMerkleTree::<2>::verify(
1151 &leaf_shard_block.header.result.body_root,
1152 array::from_ref(segments.segment_roots_proof),
1153 0,
1154 *segments.own_segments.root(),
1155 )
1156 })
1157 }
1158
1159 #[inline]
1161 pub fn iter(
1162 &self,
1163 ) -> impl ExactSizeIterator<Item = LeafShardBlockInfo<'a>> + TrustedLen + use<'a> {
1164 let (mut counts, mut remainder) = unsafe {
1166 self.bytes
1167 .split_at_unchecked(self.num_blocks * size_of::<u8>())
1168 };
1169
1170 (0..self.num_blocks).map(move |_| {
1171 let num_own_segment_roots = usize::from(counts[0]);
1172 counts = &counts[1..];
1173
1174 let header;
1176 (header, remainder) = LeafShardHeader::try_from_bytes(remainder)
1177 .expect("Already checked in constructor; qed");
1178
1179 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
1180 .expect("Already checked in constructor; qed");
1181
1182 let segments = if num_own_segment_roots > 0 {
1183 let first_local_segment_index;
1184 (first_local_segment_index, remainder) =
1186 unsafe { remainder.split_at_unchecked(LocalSegmentIndex::SIZE as usize) };
1187 let first_local_segment_index =
1189 *unsafe { LocalSegmentIndex::from_bytes_unchecked(first_local_segment_index) };
1190
1191 let segment_roots_proof;
1192 (segment_roots_proof, remainder) = unsafe { remainder.split_at_unchecked(OUT_LEN) };
1194 let segment_roots_proof = unsafe {
1196 segment_roots_proof
1197 .as_ptr()
1198 .cast::<[u8; OUT_LEN]>()
1199 .as_ref_unchecked()
1200 };
1201
1202 let own_segment_roots;
1203 (own_segment_roots, remainder) = unsafe {
1205 remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE)
1206 };
1207 let own_segment_roots = unsafe {
1209 slice::from_raw_parts(
1210 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1211 num_own_segment_roots,
1212 )
1213 };
1214 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1215
1216 Some(LeafShardOwnSegments {
1217 segment_roots_proof,
1218 own_segments: OwnSegments {
1219 first_local_segment_index,
1220 segment_roots: own_segment_roots,
1221 },
1222 })
1223 } else {
1224 None
1225 };
1226
1227 LeafShardBlockInfo { header, segments }
1228 })
1229 }
1230
1231 #[inline(always)]
1233 pub const fn len(&self) -> usize {
1234 self.num_blocks
1235 }
1236
1237 #[inline(always)]
1239 pub const fn is_empty(&self) -> bool {
1240 self.num_blocks == 0
1241 }
1242
1243 #[inline]
1247 pub fn segments_root(&self) -> Blake3Hash {
1248 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
1249 self.iter().map(|shard_block_info| {
1250 shard_block_info
1251 .segments
1252 .map(|own_segments| {
1253 own_segments
1254 .own_segments
1255 .root_with_shard_index(shard_block_info.header.prefix.shard_index)
1256 })
1257 .unwrap_or_default()
1258 }),
1259 )
1260 .unwrap_or_default();
1261
1262 Blake3Hash::new(root)
1263 }
1264
1265 #[inline]
1269 pub fn headers_root(&self) -> Blake3Hash {
1270 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
1271 self.iter().map(|shard_block_info| {
1272 single_block_hash(shard_block_info.header.root().as_ref())
1276 .expect("Less than a single block worth of bytes; qed")
1277 }),
1278 )
1279 .unwrap_or_default();
1280
1281 Blake3Hash::new(root)
1282 }
1283}
1284
1285#[derive(Debug, Copy, Clone, Yokeable)]
1287#[non_exhaustive]
1289pub struct IntermediateShardBody<'a> {
1290 own_segments: Option<OwnSegments<'a>>,
1292 leaf_shard_blocks: LeafShardBlocksInfo<'a>,
1294}
1295
1296impl<'a> GenericBlockBody<'a> for IntermediateShardBody<'a> {
1297 const SHARD_KIND: RealShardKind = RealShardKind::IntermediateShard;
1298
1299 #[cfg(feature = "alloc")]
1300 type Owned = OwnedIntermediateShardBody;
1301
1302 #[cfg(feature = "alloc")]
1303 #[inline(always)]
1304 fn to_owned(self) -> Self::Owned {
1305 self.to_owned()
1306 }
1307
1308 #[inline(always)]
1309 fn root(&self) -> Blake3Hash {
1310 self.root()
1311 }
1312}
1313
1314impl<'a> IntermediateShardBody<'a> {
1315 #[inline]
1321 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1322 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1329 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1330
1331 let own_segments = if num_own_segment_roots > 0 {
1332 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1333 let first_local_segment_index = unsafe {
1335 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1336 }
1337 .as_inner();
1338
1339 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1340 let own_segment_roots = unsafe {
1342 slice::from_raw_parts(
1343 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1344 num_own_segment_roots,
1345 )
1346 };
1347 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1348
1349 Some(OwnSegments {
1350 first_local_segment_index,
1351 segment_roots: own_segment_roots,
1352 })
1353 } else {
1354 None
1355 };
1356
1357 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
1358
1359 let body = Self {
1360 own_segments,
1361 leaf_shard_blocks,
1362 };
1363
1364 if !body.is_internally_consistent() {
1365 return None;
1366 }
1367
1368 Some((body, remainder))
1369 }
1370
1371 #[inline]
1376 pub fn is_internally_consistent(&self) -> bool {
1377 true
1379 }
1380
1381 #[inline]
1384 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1385 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1392 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1393
1394 let own_segments = if num_own_segment_roots > 0 {
1395 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1396 let first_local_segment_index = unsafe {
1398 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1399 }
1400 .as_inner();
1401
1402 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1403 let own_segment_roots = unsafe {
1405 slice::from_raw_parts(
1406 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1407 num_own_segment_roots,
1408 )
1409 };
1410 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1411
1412 Some(OwnSegments {
1413 first_local_segment_index,
1414 segment_roots: own_segment_roots,
1415 })
1416 } else {
1417 None
1418 };
1419
1420 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes_unchecked(bytes)?;
1421
1422 Some((
1423 Self {
1424 own_segments,
1425 leaf_shard_blocks,
1426 },
1427 remainder,
1428 ))
1429 }
1430
1431 #[inline(always)]
1433 pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
1434 self.own_segments
1435 }
1436
1437 #[inline(always)]
1439 pub fn leaf_shard_blocks(&self) -> &LeafShardBlocksInfo<'a> {
1440 &self.leaf_shard_blocks
1441 }
1442
1443 #[cfg(feature = "alloc")]
1445 #[inline(always)]
1446 pub fn to_owned(self) -> OwnedIntermediateShardBody {
1447 if let Some(own_segments) = self.own_segments {
1448 let first_local_segment_index = own_segments.first_local_segment_index;
1449
1450 OwnedIntermediateShardBody::new(
1451 own_segments.segment_roots.iter().copied().enumerate().map(
1452 |(index, segment_root)| {
1453 (
1454 first_local_segment_index + LocalSegmentIndex::from(index as u64),
1455 segment_root,
1456 )
1457 },
1458 ),
1459 self.leaf_shard_blocks.iter(),
1460 )
1461 .expect("`self` is always a valid invariant; qed")
1462 } else {
1463 OwnedIntermediateShardBody::new(iter::empty(), self.leaf_shard_blocks.iter())
1464 .expect("`self` is always a valid invariant; qed")
1465 }
1466 }
1467
1468 #[inline]
1470 pub fn root(&self) -> Blake3Hash {
1471 let root = BalancedMerkleTree::compute_root_only(&[
1473 BalancedMerkleTree::compute_root_only(&[
1474 *self
1475 .own_segments
1476 .as_ref()
1477 .map(OwnSegments::root)
1478 .unwrap_or_default(),
1479 *self.leaf_shard_blocks.segments_root(),
1480 ]),
1481 *self.leaf_shard_blocks.headers_root(),
1482 ]);
1483
1484 Blake3Hash::new(root)
1485 }
1486}
1487
1488#[derive(Debug, Copy, Clone)]
1490pub struct Transactions<'a> {
1491 num_transactions: usize,
1492 bytes: &'a [u8],
1493}
1494
1495impl<'a> Transactions<'a> {
1496 #[inline]
1502 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1503 let num_transactions = bytes.split_off(..size_of::<u32>())?;
1511 let num_transactions = u32::from_le_bytes([
1512 num_transactions[0],
1513 num_transactions[1],
1514 num_transactions[2],
1515 num_transactions[3],
1516 ]) as usize;
1517
1518 let mut remainder = align_to_and_ensure_zero_padding::<u128>(bytes)?;
1519 let bytes_start = remainder;
1520
1521 for _ in 0..num_transactions {
1522 (_, remainder) = Transaction::try_from_bytes(bytes)?;
1523 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)?;
1524 }
1525
1526 Some((
1527 Self {
1528 num_transactions,
1529 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1530 },
1531 remainder,
1532 ))
1533 }
1534
1535 #[inline]
1537 pub fn iter(&self) -> impl ExactSizeIterator<Item = Transaction<'a>> + TrustedLen + use<'a> {
1538 let mut remainder = self.bytes;
1539
1540 (0..self.num_transactions).map(move |_| {
1541 let transaction = unsafe { Transaction::from_bytes_unchecked(remainder) };
1543
1544 remainder = &remainder[transaction.encoded_size()..];
1545 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)
1546 .expect("Already checked in constructor; qed");
1547
1548 transaction
1549 })
1550 }
1551
1552 #[inline(always)]
1554 pub const fn len(&self) -> usize {
1555 self.num_transactions
1556 }
1557
1558 #[inline(always)]
1560 pub const fn is_empty(&self) -> bool {
1561 self.num_transactions == 0
1562 }
1563
1564 #[inline]
1568 pub fn root(&self) -> Blake3Hash {
1569 let root = UnbalancedMerkleTree::compute_root_only::<{ u32::MAX as u64 }, _, _>(
1570 self.iter().map(|transaction| {
1571 single_block_hash(transaction.hash().as_ref())
1575 .expect("Less than a single block worth of bytes; qed")
1576 }),
1577 )
1578 .unwrap_or_default();
1579
1580 Blake3Hash::new(root)
1581 }
1582}
1583
1584#[derive(Debug, Copy, Clone, Yokeable)]
1586#[non_exhaustive]
1588pub struct LeafShardBody<'a> {
1589 own_segments: Option<OwnSegments<'a>>,
1591 transactions: Transactions<'a>,
1593}
1594
1595impl<'a> GenericBlockBody<'a> for LeafShardBody<'a> {
1596 const SHARD_KIND: RealShardKind = RealShardKind::LeafShard;
1597
1598 #[cfg(feature = "alloc")]
1599 type Owned = OwnedLeafShardBody;
1600
1601 #[cfg(feature = "alloc")]
1602 #[inline(always)]
1603 fn to_owned(self) -> Self::Owned {
1604 self.to_owned()
1605 }
1606
1607 #[inline(always)]
1608 fn root(&self) -> Blake3Hash {
1609 self.root()
1610 }
1611}
1612
1613impl<'a> LeafShardBody<'a> {
1614 #[inline]
1620 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1621 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1628 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1629
1630 let own_segments = if num_own_segment_roots > 0 {
1631 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1632 let first_local_segment_index = unsafe {
1634 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1635 }
1636 .as_inner();
1637
1638 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1639 let own_segment_roots = unsafe {
1641 slice::from_raw_parts(
1642 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1643 num_own_segment_roots,
1644 )
1645 };
1646 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1647
1648 Some(OwnSegments {
1649 first_local_segment_index,
1650 segment_roots: own_segment_roots,
1651 })
1652 } else {
1653 None
1654 };
1655
1656 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1657
1658 let body = Self {
1659 own_segments,
1660 transactions,
1661 };
1662
1663 if !body.is_internally_consistent() {
1664 return None;
1665 }
1666
1667 Some((body, remainder))
1668 }
1669
1670 #[inline]
1675 pub fn is_internally_consistent(&self) -> bool {
1676 true
1678 }
1679
1680 #[inline]
1683 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1684 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1691 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1692
1693 let own_segments = if num_own_segment_roots > 0 {
1694 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1695 let first_local_segment_index = unsafe {
1697 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1698 }
1699 .as_inner();
1700
1701 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1702 let own_segment_roots = unsafe {
1704 slice::from_raw_parts(
1705 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1706 num_own_segment_roots,
1707 )
1708 };
1709 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1710
1711 Some(OwnSegments {
1712 first_local_segment_index,
1713 segment_roots: own_segment_roots,
1714 })
1715 } else {
1716 None
1717 };
1718
1719 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1720
1721 Some((
1722 Self {
1723 own_segments,
1724 transactions,
1725 },
1726 remainder,
1727 ))
1728 }
1729
1730 #[inline(always)]
1732 pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
1733 self.own_segments
1734 }
1735
1736 #[inline(always)]
1738 pub fn transactions(&self) -> &Transactions<'a> {
1739 &self.transactions
1740 }
1741
1742 #[cfg(feature = "alloc")]
1744 #[inline(always)]
1745 pub fn to_owned(self) -> OwnedLeafShardBody {
1746 let mut builder = if let Some(own_segments) = self.own_segments {
1747 let first_local_segment_index = own_segments.first_local_segment_index;
1748
1749 OwnedLeafShardBody::init(own_segments.segment_roots.iter().copied().enumerate().map(
1750 |(index, segment_root)| {
1751 (
1752 first_local_segment_index + LocalSegmentIndex::from(index as u64),
1753 segment_root,
1754 )
1755 },
1756 ))
1757 .expect("`self` is always a valid invariant; qed")
1758 } else {
1759 OwnedLeafShardBody::init(iter::empty())
1760 .expect("`self` is always a valid invariant; qed")
1761 };
1762 for transaction in self.transactions.iter() {
1763 builder
1764 .add_transaction(transaction)
1765 .expect("`self` is always a valid invariant; qed");
1766 }
1767
1768 builder.finish()
1769 }
1770
1771 #[inline]
1773 pub fn root(&self) -> Blake3Hash {
1774 let root = BalancedMerkleTree::compute_root_only(&[
1775 *self
1776 .own_segments
1777 .as_ref()
1778 .map(OwnSegments::root)
1779 .unwrap_or_default(),
1780 *self.transactions.root(),
1781 ]);
1782
1783 Blake3Hash::new(root)
1784 }
1785}
1786
1787#[derive(Debug, Copy, Clone, From)]
1792pub enum BlockBody<'a> {
1793 BeaconChain(BeaconChainBody<'a>),
1795 IntermediateShard(IntermediateShardBody<'a>),
1797 LeafShard(LeafShardBody<'a>),
1799}
1800
1801impl<'a> BlockBody<'a> {
1802 #[inline]
1809 pub fn try_from_bytes(bytes: &'a [u8], shard_kind: RealShardKind) -> Option<(Self, &'a [u8])> {
1810 match shard_kind {
1811 RealShardKind::BeaconChain => {
1812 let (body, remainder) = BeaconChainBody::try_from_bytes(bytes)?;
1813 Some((Self::BeaconChain(body), remainder))
1814 }
1815 RealShardKind::IntermediateShard => {
1816 let (body, remainder) = IntermediateShardBody::try_from_bytes(bytes)?;
1817 Some((Self::IntermediateShard(body), remainder))
1818 }
1819 RealShardKind::LeafShard => {
1820 let (body, remainder) = LeafShardBody::try_from_bytes(bytes)?;
1821 Some((Self::LeafShard(body), remainder))
1822 }
1823 }
1824 }
1825
1826 #[inline]
1831 pub fn is_internally_consistent(&self) -> bool {
1832 match self {
1833 Self::BeaconChain(body) => body.is_internally_consistent(),
1834 Self::IntermediateShard(body) => body.is_internally_consistent(),
1835 Self::LeafShard(body) => body.is_internally_consistent(),
1836 }
1837 }
1838
1839 #[inline]
1842 pub fn try_from_bytes_unchecked(
1843 bytes: &'a [u8],
1844 shard_kind: RealShardKind,
1845 ) -> Option<(Self, &'a [u8])> {
1846 match shard_kind {
1847 RealShardKind::BeaconChain => {
1848 let (body, remainder) = BeaconChainBody::try_from_bytes_unchecked(bytes)?;
1849 Some((Self::BeaconChain(body), remainder))
1850 }
1851 RealShardKind::IntermediateShard => {
1852 let (body, remainder) = IntermediateShardBody::try_from_bytes_unchecked(bytes)?;
1853 Some((Self::IntermediateShard(body), remainder))
1854 }
1855 RealShardKind::LeafShard => {
1856 let (body, remainder) = LeafShardBody::try_from_bytes_unchecked(bytes)?;
1857 Some((Self::LeafShard(body), remainder))
1858 }
1859 }
1860 }
1861
1862 #[cfg(feature = "alloc")]
1864 #[inline(always)]
1865 pub fn to_owned(self) -> OwnedBlockBody {
1866 match self {
1867 Self::BeaconChain(body) => body.to_owned().into(),
1868 Self::IntermediateShard(body) => body.to_owned().into(),
1869 Self::LeafShard(body) => body.to_owned().into(),
1870 }
1871 }
1872
1873 #[inline]
1878 pub fn root(&self) -> Blake3Hash {
1879 match self {
1880 Self::BeaconChain(body) => body.root(),
1881 Self::IntermediateShard(body) => body.root(),
1882 Self::LeafShard(body) => body.root(),
1883 }
1884 }
1885}