ab_core_primitives/pieces/
flat_pieces.rs1use crate::pieces::cow_bytes::CowBytes;
2use crate::pieces::{Piece, PieceArray};
3use crate::segments::RecordedHistorySegment;
4use alloc::boxed::Box;
5use bytes::{Bytes, BytesMut};
6use core::ops::{Deref, DerefMut};
7use core::{fmt, slice};
8#[cfg(feature = "parallel")]
9use rayon::prelude::*;
10
11#[derive(Clone, PartialEq, Eq)]
13pub struct FlatPieces(CowBytes);
14
15impl fmt::Debug for FlatPieces {
16 #[inline]
17 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
18 f.debug_struct("FlatPieces").finish_non_exhaustive()
19 }
20}
21
22impl Deref for FlatPieces {
23 type Target = [PieceArray];
24
25 #[inline]
26 fn deref(&self) -> &Self::Target {
27 let bytes = self.0.as_ref();
28 let pieces = unsafe {
31 slice::from_raw_parts(
32 bytes.as_ptr().cast::<[u8; Piece::SIZE]>(),
33 bytes.len() / Piece::SIZE,
34 )
35 };
36 PieceArray::slice_from_repr(pieces)
37 }
38}
39
40impl DerefMut for FlatPieces {
41 #[inline]
42 fn deref_mut(&mut self) -> &mut Self::Target {
43 let bytes = self.0.as_mut();
44 let pieces = unsafe {
47 slice::from_raw_parts_mut(
48 bytes.as_mut_ptr().cast::<[u8; Piece::SIZE]>(),
49 bytes.len() / Piece::SIZE,
50 )
51 };
52 PieceArray::slice_mut_from_repr(pieces)
53 }
54}
55
56impl FlatPieces {
57 #[inline]
59 pub fn new(piece_count: usize) -> Self {
60 Self(CowBytes::Owned(BytesMut::zeroed(piece_count * Piece::SIZE)))
61 }
62
63 #[inline]
71 pub fn pieces(&self) -> Box<dyn ExactSizeIterator<Item = Piece> + '_> {
72 match &self.0 {
73 CowBytes::Shared(bytes) => Box::new(
74 bytes
75 .chunks_exact(Piece::SIZE)
76 .map(|slice| Piece(CowBytes::Shared(bytes.slice_ref(slice)))),
77 ),
78 CowBytes::Owned(bytes) => Box::new(
79 bytes
80 .chunks_exact(Piece::SIZE)
81 .map(|slice| Piece(CowBytes::Shared(Bytes::copy_from_slice(slice)))),
82 ),
83 }
84 }
85
86 #[inline]
88 pub fn source_pieces(&self) -> impl ExactSizeIterator<Item = Piece> + '_ {
89 self.pieces().take(RecordedHistorySegment::NUM_RAW_RECORDS)
90 }
91
92 #[inline]
94 pub fn source(&self) -> impl ExactSizeIterator<Item = &'_ PieceArray> + '_ {
95 self.iter().take(RecordedHistorySegment::NUM_RAW_RECORDS)
96 }
97
98 #[inline]
100 pub fn source_mut(&mut self) -> impl ExactSizeIterator<Item = &'_ mut PieceArray> + '_ {
101 self.iter_mut()
102 .take(RecordedHistorySegment::NUM_RAW_RECORDS)
103 }
104
105 #[inline]
107 pub fn parity_pieces(&self) -> impl ExactSizeIterator<Item = Piece> + '_ {
108 self.pieces().skip(RecordedHistorySegment::NUM_RAW_RECORDS)
109 }
110
111 #[inline]
113 pub fn parity(&self) -> impl ExactSizeIterator<Item = &'_ PieceArray> + '_ {
114 self.iter().skip(RecordedHistorySegment::NUM_RAW_RECORDS)
115 }
116
117 #[inline]
119 pub fn parity_mut(&mut self) -> impl ExactSizeIterator<Item = &'_ mut PieceArray> + '_ {
120 self.iter_mut()
121 .skip(RecordedHistorySegment::NUM_RAW_RECORDS)
122 }
123
124 pub fn to_shared(self) -> Self {
130 Self(match self.0 {
131 CowBytes::Shared(bytes) => CowBytes::Shared(bytes),
132 CowBytes::Owned(bytes) => CowBytes::Shared(bytes.freeze()),
133 })
134 }
135}
136
137#[cfg(feature = "parallel")]
138impl FlatPieces {
139 #[inline]
141 pub fn par_source(&self) -> impl IndexedParallelIterator<Item = &'_ PieceArray> + '_ {
142 self.par_iter()
143 .take(RecordedHistorySegment::NUM_RAW_RECORDS)
144 }
145
146 #[inline]
148 pub fn par_source_mut(
149 &mut self,
150 ) -> impl IndexedParallelIterator<Item = &'_ mut PieceArray> + '_ {
151 self.par_iter_mut()
152 .take(RecordedHistorySegment::NUM_RAW_RECORDS)
153 }
154
155 #[inline]
157 pub fn par_parity(&self) -> impl IndexedParallelIterator<Item = &'_ PieceArray> + '_ {
158 self.par_iter()
159 .skip(RecordedHistorySegment::NUM_RAW_RECORDS)
160 }
161
162 #[inline]
164 pub fn par_parity_mut(
165 &mut self,
166 ) -> impl IndexedParallelIterator<Item = &'_ mut PieceArray> + '_ {
167 self.par_iter_mut()
168 .skip(RecordedHistorySegment::NUM_RAW_RECORDS)
169 }
170}