1use crate::impls::{fill_via_u32_chunks, fill_via_u64_chunks};
57use crate::{CryptoRng, RngCore, SeedableRng, TryRngCore};
58use core::fmt;
59#[cfg(feature = "serde")]
60use serde::{Deserialize, Serialize};
61
62pub trait BlockRngCore {
68 type Item;
70
71 type Results: AsRef<[Self::Item]> + AsMut<[Self::Item]> + Default;
74
75 fn generate(&mut self, results: &mut Self::Results);
77}
78
79pub trait CryptoBlockRng: BlockRngCore {}
84
85#[derive(Clone)]
119#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
120#[cfg_attr(
121 feature = "serde",
122 serde(
123 bound = "for<'x> R: Serialize + Deserialize<'x>, for<'x> R::Results: Serialize + Deserialize<'x>"
124 )
125)]
126pub struct BlockRng<R: BlockRngCore> {
127 results: R::Results,
128 index: usize,
129 pub core: R,
131}
132
133impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng<R> {
135 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
136 fmt.debug_struct("BlockRng")
137 .field("core", &self.core)
138 .field("result_len", &self.results.as_ref().len())
139 .field("index", &self.index)
140 .finish()
141 }
142}
143
144impl<R: BlockRngCore> BlockRng<R> {
145 #[inline]
148 pub fn new(core: R) -> BlockRng<R> {
149 let results_empty = R::Results::default();
150 BlockRng {
151 core,
152 index: results_empty.as_ref().len(),
153 results: results_empty,
154 }
155 }
156
157 #[inline(always)]
163 pub fn index(&self) -> usize {
164 self.index
165 }
166
167 #[inline]
170 pub fn reset(&mut self) {
171 self.index = self.results.as_ref().len();
172 }
173
174 #[inline]
177 pub fn generate_and_set(&mut self, index: usize) {
178 assert!(index < self.results.as_ref().len());
179 self.core.generate(&mut self.results);
180 self.index = index;
181 }
182}
183
184impl<R: BlockRngCore<Item = u32>> RngCore for BlockRng<R> {
185 #[inline]
186 fn next_u32(&mut self) -> u32 {
187 if self.index >= self.results.as_ref().len() {
188 self.generate_and_set(0);
189 }
190
191 let value = self.results.as_ref()[self.index];
192 self.index += 1;
193 value
194 }
195
196 #[inline]
197 fn next_u64(&mut self) -> u64 {
198 let read_u64 = |results: &[u32], index| {
199 let data = &results[index..=index + 1];
200 (u64::from(data[1]) << 32) | u64::from(data[0])
201 };
202
203 let len = self.results.as_ref().len();
204
205 let index = self.index;
206 if index < len - 1 {
207 self.index += 2;
208 read_u64(self.results.as_ref(), index)
210 } else if index >= len {
211 self.generate_and_set(2);
212 read_u64(self.results.as_ref(), 0)
213 } else {
214 let x = u64::from(self.results.as_ref()[len - 1]);
215 self.generate_and_set(1);
216 let y = u64::from(self.results.as_ref()[0]);
217 (y << 32) | x
218 }
219 }
220
221 #[inline]
222 fn fill_bytes(&mut self, dest: &mut [u8]) {
223 let mut read_len = 0;
224 while read_len < dest.len() {
225 if self.index >= self.results.as_ref().len() {
226 self.generate_and_set(0);
227 }
228 let (consumed_u32, filled_u8) = fill_via_u32_chunks(
229 &mut self.results.as_mut()[self.index..],
230 &mut dest[read_len..],
231 );
232
233 self.index += consumed_u32;
234 read_len += filled_u8;
235 }
236 }
237}
238
239impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng<R> {
240 type Seed = R::Seed;
241
242 #[inline(always)]
243 fn from_seed(seed: Self::Seed) -> Self {
244 Self::new(R::from_seed(seed))
245 }
246
247 #[inline(always)]
248 fn seed_from_u64(seed: u64) -> Self {
249 Self::new(R::seed_from_u64(seed))
250 }
251
252 #[inline(always)]
253 fn from_rng(rng: &mut impl RngCore) -> Self {
254 Self::new(R::from_rng(rng))
255 }
256
257 #[inline(always)]
258 fn try_from_rng<S: TryRngCore>(rng: &mut S) -> Result<Self, S::Error> {
259 R::try_from_rng(rng).map(Self::new)
260 }
261}
262
263impl<R: CryptoBlockRng + BlockRngCore<Item = u32>> CryptoRng for BlockRng<R> {}
264
265#[derive(Clone)]
286#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
287pub struct BlockRng64<R: BlockRngCore + ?Sized> {
288 results: R::Results,
289 index: usize,
290 half_used: bool, pub core: R,
293}
294
295impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng64<R> {
297 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
298 fmt.debug_struct("BlockRng64")
299 .field("core", &self.core)
300 .field("result_len", &self.results.as_ref().len())
301 .field("index", &self.index)
302 .field("half_used", &self.half_used)
303 .finish()
304 }
305}
306
307impl<R: BlockRngCore> BlockRng64<R> {
308 #[inline]
311 pub fn new(core: R) -> BlockRng64<R> {
312 let results_empty = R::Results::default();
313 BlockRng64 {
314 core,
315 index: results_empty.as_ref().len(),
316 half_used: false,
317 results: results_empty,
318 }
319 }
320
321 #[inline(always)]
327 pub fn index(&self) -> usize {
328 self.index
329 }
330
331 #[inline]
334 pub fn reset(&mut self) {
335 self.index = self.results.as_ref().len();
336 self.half_used = false;
337 }
338
339 #[inline]
342 pub fn generate_and_set(&mut self, index: usize) {
343 assert!(index < self.results.as_ref().len());
344 self.core.generate(&mut self.results);
345 self.index = index;
346 self.half_used = false;
347 }
348}
349
350impl<R: BlockRngCore<Item = u64>> RngCore for BlockRng64<R> {
351 #[inline]
352 fn next_u32(&mut self) -> u32 {
353 let mut index = self.index - self.half_used as usize;
354 if index >= self.results.as_ref().len() {
355 self.core.generate(&mut self.results);
356 self.index = 0;
357 index = 0;
358 self.half_used = false;
360 }
361
362 let shift = 32 * (self.half_used as usize);
363
364 self.half_used = !self.half_used;
365 self.index += self.half_used as usize;
366
367 (self.results.as_ref()[index] >> shift) as u32
368 }
369
370 #[inline]
371 fn next_u64(&mut self) -> u64 {
372 if self.index >= self.results.as_ref().len() {
373 self.core.generate(&mut self.results);
374 self.index = 0;
375 }
376
377 let value = self.results.as_ref()[self.index];
378 self.index += 1;
379 self.half_used = false;
380 value
381 }
382
383 #[inline]
384 fn fill_bytes(&mut self, dest: &mut [u8]) {
385 let mut read_len = 0;
386 self.half_used = false;
387 while read_len < dest.len() {
388 if self.index >= self.results.as_ref().len() {
389 self.core.generate(&mut self.results);
390 self.index = 0;
391 }
392
393 let (consumed_u64, filled_u8) = fill_via_u64_chunks(
394 &mut self.results.as_mut()[self.index..],
395 &mut dest[read_len..],
396 );
397
398 self.index += consumed_u64;
399 read_len += filled_u8;
400 }
401 }
402}
403
404impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng64<R> {
405 type Seed = R::Seed;
406
407 #[inline(always)]
408 fn from_seed(seed: Self::Seed) -> Self {
409 Self::new(R::from_seed(seed))
410 }
411
412 #[inline(always)]
413 fn seed_from_u64(seed: u64) -> Self {
414 Self::new(R::seed_from_u64(seed))
415 }
416
417 #[inline(always)]
418 fn from_rng(rng: &mut impl RngCore) -> Self {
419 Self::new(R::from_rng(rng))
420 }
421
422 #[inline(always)]
423 fn try_from_rng<S: TryRngCore>(rng: &mut S) -> Result<Self, S::Error> {
424 R::try_from_rng(rng).map(Self::new)
425 }
426}
427
428impl<R: CryptoBlockRng + BlockRngCore<Item = u64>> CryptoRng for BlockRng64<R> {}
429
430#[cfg(test)]
431mod test {
432 use crate::block::{BlockRng, BlockRng64, BlockRngCore};
433 use crate::{RngCore, SeedableRng};
434
435 #[derive(Debug, Clone)]
436 struct DummyRng {
437 counter: u32,
438 }
439
440 impl BlockRngCore for DummyRng {
441 type Item = u32;
442 type Results = [u32; 16];
443
444 fn generate(&mut self, results: &mut Self::Results) {
445 for r in results {
446 *r = self.counter;
447 self.counter = self.counter.wrapping_add(3511615421);
448 }
449 }
450 }
451
452 impl SeedableRng for DummyRng {
453 type Seed = [u8; 4];
454
455 fn from_seed(seed: Self::Seed) -> Self {
456 DummyRng {
457 counter: u32::from_le_bytes(seed),
458 }
459 }
460 }
461
462 #[test]
463 fn blockrng_next_u32_vs_next_u64() {
464 let mut rng1 = BlockRng::<DummyRng>::from_seed([1, 2, 3, 4]);
465 let mut rng2 = rng1.clone();
466 let mut rng3 = rng1.clone();
467
468 let mut a = [0; 16];
469 a[..4].copy_from_slice(&rng1.next_u32().to_le_bytes());
470 a[4..12].copy_from_slice(&rng1.next_u64().to_le_bytes());
471 a[12..].copy_from_slice(&rng1.next_u32().to_le_bytes());
472
473 let mut b = [0; 16];
474 b[..4].copy_from_slice(&rng2.next_u32().to_le_bytes());
475 b[4..8].copy_from_slice(&rng2.next_u32().to_le_bytes());
476 b[8..].copy_from_slice(&rng2.next_u64().to_le_bytes());
477 assert_eq!(a, b);
478
479 let mut c = [0; 16];
480 c[..8].copy_from_slice(&rng3.next_u64().to_le_bytes());
481 c[8..12].copy_from_slice(&rng3.next_u32().to_le_bytes());
482 c[12..].copy_from_slice(&rng3.next_u32().to_le_bytes());
483 assert_eq!(a, c);
484 }
485
486 #[derive(Debug, Clone)]
487 struct DummyRng64 {
488 counter: u64,
489 }
490
491 impl BlockRngCore for DummyRng64 {
492 type Item = u64;
493 type Results = [u64; 8];
494
495 fn generate(&mut self, results: &mut Self::Results) {
496 for r in results {
497 *r = self.counter;
498 self.counter = self.counter.wrapping_add(2781463553396133981);
499 }
500 }
501 }
502
503 impl SeedableRng for DummyRng64 {
504 type Seed = [u8; 8];
505
506 fn from_seed(seed: Self::Seed) -> Self {
507 DummyRng64 {
508 counter: u64::from_le_bytes(seed),
509 }
510 }
511 }
512
513 #[test]
514 fn blockrng64_next_u32_vs_next_u64() {
515 let mut rng1 = BlockRng64::<DummyRng64>::from_seed([1, 2, 3, 4, 5, 6, 7, 8]);
516 let mut rng2 = rng1.clone();
517 let mut rng3 = rng1.clone();
518
519 let mut a = [0; 16];
520 a[..4].copy_from_slice(&rng1.next_u32().to_le_bytes());
521 a[4..12].copy_from_slice(&rng1.next_u64().to_le_bytes());
522 a[12..].copy_from_slice(&rng1.next_u32().to_le_bytes());
523
524 let mut b = [0; 16];
525 b[..4].copy_from_slice(&rng2.next_u32().to_le_bytes());
526 b[4..8].copy_from_slice(&rng2.next_u32().to_le_bytes());
527 b[8..].copy_from_slice(&rng2.next_u64().to_le_bytes());
528 assert_ne!(a, b);
529 assert_eq!(&a[..4], &b[..4]);
530 assert_eq!(&a[4..12], &b[8..]);
531
532 let mut c = [0; 16];
533 c[..8].copy_from_slice(&rng3.next_u64().to_le_bytes());
534 c[8..12].copy_from_slice(&rng3.next_u32().to_le_bytes());
535 c[12..].copy_from_slice(&rng3.next_u32().to_le_bytes());
536 assert_eq!(b, c);
537 }
538}