1// Copyright 2018 Developers of the Rand project.
2//
3// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
5// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
6// option. This file may not be copied, modified, or distributed
7// except according to those terms.
89//! The `BlockRngCore` trait and implementation helpers
10//!
11//! The [`BlockRngCore`] trait exists to assist in the implementation of RNGs
12//! which generate a block of data in a cache instead of returning generated
13//! values directly.
14//!
15//! Usage of this trait is optional, but provides two advantages:
16//! implementations only need to concern themselves with generation of the
17//! block, not the various [`RngCore`] methods (especially [`fill_bytes`], where
18//! the optimal implementations are not trivial), and this allows
19//! `ReseedingRng` (see [`rand`](https://docs.rs/rand) crate) perform periodic
20//! reseeding with very low overhead.
21//!
22//! # Example
23//!
24//! ```no_run
25//! use rand_core::{RngCore, SeedableRng};
26//! use rand_core::block::{BlockRngCore, BlockRng};
27//!
28//! struct MyRngCore;
29//!
30//! impl BlockRngCore for MyRngCore {
31//! type Item = u32;
32//! type Results = [u32; 16];
33//!
34//! fn generate(&mut self, results: &mut Self::Results) {
35//! unimplemented!()
36//! }
37//! }
38//!
39//! impl SeedableRng for MyRngCore {
40//! type Seed = [u8; 32];
41//! fn from_seed(seed: Self::Seed) -> Self {
42//! unimplemented!()
43//! }
44//! }
45//!
46//! // optionally, also implement CryptoBlockRng for MyRngCore
47//!
48//! // Final RNG.
49//! let mut rng = BlockRng::<MyRngCore>::seed_from_u64(0);
50//! println!("First value: {}", rng.next_u32());
51//! ```
52//!
53//! [`BlockRngCore`]: crate::block::BlockRngCore
54//! [`fill_bytes`]: RngCore::fill_bytes
5556use crate::impls::{fill_via_u32_chunks, fill_via_u64_chunks};
57use crate::{CryptoRng, RngCore, SeedableRng, TryRngCore};
58use core::fmt;
59#[cfg(feature = "serde")]
60use serde::{Deserialize, Serialize};
6162/// A trait for RNGs which do not generate random numbers individually, but in
63/// blocks (typically `[u32; N]`). This technique is commonly used by
64/// cryptographic RNGs to improve performance.
65///
66/// See the [module][crate::block] documentation for details.
67pub trait BlockRngCore {
68/// Results element type, e.g. `u32`.
69type Item;
7071/// Results type. This is the 'block' an RNG implementing `BlockRngCore`
72 /// generates, which will usually be an array like `[u32; 16]`.
73type Results: AsRef<[Self::Item]> + AsMut<[Self::Item]> + Default;
7475/// Generate a new block of results.
76fn generate(&mut self, results: &mut Self::Results);
77}
7879/// A marker trait used to indicate that an [`RngCore`] implementation is
80/// supposed to be cryptographically secure.
81///
82/// See [`CryptoRng`] docs for more information.
83pub trait CryptoBlockRng: BlockRngCore {}
8485/// A wrapper type implementing [`RngCore`] for some type implementing
86/// [`BlockRngCore`] with `u32` array buffer; i.e. this can be used to implement
87/// a full RNG from just a `generate` function.
88///
89/// The `core` field may be accessed directly but the results buffer may not.
90/// PRNG implementations can simply use a type alias
91/// (`pub type MyRng = BlockRng<MyRngCore>;`) but might prefer to use a
92/// wrapper type (`pub struct MyRng(BlockRng<MyRngCore>);`); the latter must
93/// re-implement `RngCore` but hides the implementation details and allows
94/// extra functionality to be defined on the RNG
95/// (e.g. `impl MyRng { fn set_stream(...){...} }`).
96///
97/// `BlockRng` has heavily optimized implementations of the [`RngCore`] methods
98/// reading values from the results buffer, as well as
99/// calling [`BlockRngCore::generate`] directly on the output array when
100/// [`fill_bytes`] is called on a large array. These methods also handle
101/// the bookkeeping of when to generate a new batch of values.
102///
103/// No whole generated `u32` values are thrown away and all values are consumed
104/// in-order. [`next_u32`] simply takes the next available `u32` value.
105/// [`next_u64`] is implemented by combining two `u32` values, least
106/// significant first. [`fill_bytes`] consume a whole number of `u32` values,
107/// converting each `u32` to a byte slice in little-endian order. If the requested byte
108/// length is not a multiple of 4, some bytes will be discarded.
109///
110/// See also [`BlockRng64`] which uses `u64` array buffers. Currently there is
111/// no direct support for other buffer types.
112///
113/// For easy initialization `BlockRng` also implements [`SeedableRng`].
114///
115/// [`next_u32`]: RngCore::next_u32
116/// [`next_u64`]: RngCore::next_u64
117/// [`fill_bytes`]: RngCore::fill_bytes
118#[derive(Clone)]
119#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
120#[cfg_attr(
121 feature = "serde",
122 serde(
123 bound = "for<'x> R: Serialize + Deserialize<'x>, for<'x> R::Results: Serialize + Deserialize<'x>"
124)
125)]
126pub struct BlockRng<R: BlockRngCore> {
127 results: R::Results,
128 index: usize,
129/// The *core* part of the RNG, implementing the `generate` function.
130pub core: R,
131}
132133// Custom Debug implementation that does not expose the contents of `results`.
134impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng<R> {
135fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
136 fmt.debug_struct("BlockRng")
137 .field("core", &self.core)
138 .field("result_len", &self.results.as_ref().len())
139 .field("index", &self.index)
140 .finish()
141 }
142}
143144impl<R: BlockRngCore> BlockRng<R> {
145/// Create a new `BlockRng` from an existing RNG implementing
146 /// `BlockRngCore`. Results will be generated on first use.
147#[inline]
148pub fn new(core: R) -> BlockRng<R> {
149let results_empty = R::Results::default();
150 BlockRng {
151 core,
152 index: results_empty.as_ref().len(),
153 results: results_empty,
154 }
155 }
156157/// Get the index into the result buffer.
158 ///
159 /// If this is equal to or larger than the size of the result buffer then
160 /// the buffer is "empty" and `generate()` must be called to produce new
161 /// results.
162#[inline(always)]
163pub fn index(&self) -> usize {
164self.index
165 }
166167/// Reset the number of available results.
168 /// This will force a new set of results to be generated on next use.
169#[inline]
170pub fn reset(&mut self) {
171self.index = self.results.as_ref().len();
172 }
173174/// Generate a new set of results immediately, setting the index to the
175 /// given value.
176#[inline]
177pub fn generate_and_set(&mut self, index: usize) {
178assert!(index < self.results.as_ref().len());
179self.core.generate(&mut self.results);
180self.index = index;
181 }
182}
183184impl<R: BlockRngCore<Item = u32>> RngCore for BlockRng<R> {
185#[inline]
186fn next_u32(&mut self) -> u32 {
187if self.index >= self.results.as_ref().len() {
188self.generate_and_set(0);
189 }
190191let value = self.results.as_ref()[self.index];
192self.index += 1;
193 value
194 }
195196#[inline]
197fn next_u64(&mut self) -> u64 {
198let read_u64 = |results: &[u32], index| {
199let data = &results[index..=index + 1];
200 u64::from(data[1]) << 32 | u64::from(data[0])
201 };
202203let len = self.results.as_ref().len();
204205let index = self.index;
206if index < len - 1 {
207self.index += 2;
208// Read an u64 from the current index
209read_u64(self.results.as_ref(), index)
210 } else if index >= len {
211self.generate_and_set(2);
212 read_u64(self.results.as_ref(), 0)
213 } else {
214let x = u64::from(self.results.as_ref()[len - 1]);
215self.generate_and_set(1);
216let y = u64::from(self.results.as_ref()[0]);
217 (y << 32) | x
218 }
219 }
220221#[inline]
222fn fill_bytes(&mut self, dest: &mut [u8]) {
223let mut read_len = 0;
224while read_len < dest.len() {
225if self.index >= self.results.as_ref().len() {
226self.generate_and_set(0);
227 }
228let (consumed_u32, filled_u8) = fill_via_u32_chunks(
229&mut self.results.as_mut()[self.index..],
230&mut dest[read_len..],
231 );
232233self.index += consumed_u32;
234 read_len += filled_u8;
235 }
236 }
237}
238239impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng<R> {
240type Seed = R::Seed;
241242#[inline(always)]
243fn from_seed(seed: Self::Seed) -> Self {
244Self::new(R::from_seed(seed))
245 }
246247#[inline(always)]
248fn seed_from_u64(seed: u64) -> Self {
249Self::new(R::seed_from_u64(seed))
250 }
251252#[inline(always)]
253fn from_rng(rng: &mut impl RngCore) -> Self {
254Self::new(R::from_rng(rng))
255 }
256257#[inline(always)]
258fn try_from_rng<S: TryRngCore>(rng: &mut S) -> Result<Self, S::Error> {
259 R::try_from_rng(rng).map(Self::new)
260 }
261}
262263impl<R: CryptoBlockRng + BlockRngCore<Item = u32>> CryptoRng for BlockRng<R> {}
264265/// A wrapper type implementing [`RngCore`] for some type implementing
266/// [`BlockRngCore`] with `u64` array buffer; i.e. this can be used to implement
267/// a full RNG from just a `generate` function.
268///
269/// This is similar to [`BlockRng`], but specialized for algorithms that operate
270/// on `u64` values.
271///
272/// No whole generated `u64` values are thrown away and all values are consumed
273/// in-order. [`next_u64`] simply takes the next available `u64` value.
274/// [`next_u32`] is however a bit special: half of a `u64` is consumed, leaving
275/// the other half in the buffer. If the next function called is [`next_u32`]
276/// then the other half is then consumed, however both [`next_u64`] and
277/// [`fill_bytes`] discard the rest of any half-consumed `u64`s when called.
278///
279/// [`fill_bytes`] consumes a whole number of `u64` values. If the requested length
280/// is not a multiple of 8, some bytes will be discarded.
281///
282/// [`next_u32`]: RngCore::next_u32
283/// [`next_u64`]: RngCore::next_u64
284/// [`fill_bytes`]: RngCore::fill_bytes
285#[derive(Clone)]
286#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
287pub struct BlockRng64<R: BlockRngCore + ?Sized> {
288 results: R::Results,
289 index: usize,
290 half_used: bool, // true if only half of the previous result is used
291/// The *core* part of the RNG, implementing the `generate` function.
292pub core: R,
293}
294295// Custom Debug implementation that does not expose the contents of `results`.
296impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng64<R> {
297fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
298 fmt.debug_struct("BlockRng64")
299 .field("core", &self.core)
300 .field("result_len", &self.results.as_ref().len())
301 .field("index", &self.index)
302 .field("half_used", &self.half_used)
303 .finish()
304 }
305}
306307impl<R: BlockRngCore> BlockRng64<R> {
308/// Create a new `BlockRng` from an existing RNG implementing
309 /// `BlockRngCore`. Results will be generated on first use.
310#[inline]
311pub fn new(core: R) -> BlockRng64<R> {
312let results_empty = R::Results::default();
313 BlockRng64 {
314 core,
315 index: results_empty.as_ref().len(),
316 half_used: false,
317 results: results_empty,
318 }
319 }
320321/// Get the index into the result buffer.
322 ///
323 /// If this is equal to or larger than the size of the result buffer then
324 /// the buffer is "empty" and `generate()` must be called to produce new
325 /// results.
326#[inline(always)]
327pub fn index(&self) -> usize {
328self.index
329 }
330331/// Reset the number of available results.
332 /// This will force a new set of results to be generated on next use.
333#[inline]
334pub fn reset(&mut self) {
335self.index = self.results.as_ref().len();
336self.half_used = false;
337 }
338339/// Generate a new set of results immediately, setting the index to the
340 /// given value.
341#[inline]
342pub fn generate_and_set(&mut self, index: usize) {
343assert!(index < self.results.as_ref().len());
344self.core.generate(&mut self.results);
345self.index = index;
346self.half_used = false;
347 }
348}
349350impl<R: BlockRngCore<Item = u64>> RngCore for BlockRng64<R> {
351#[inline]
352fn next_u32(&mut self) -> u32 {
353let mut index = self.index - self.half_used as usize;
354if index >= self.results.as_ref().len() {
355self.core.generate(&mut self.results);
356self.index = 0;
357 index = 0;
358// `self.half_used` is by definition `false`
359self.half_used = false;
360 }
361362let shift = 32 * (self.half_used as usize);
363364self.half_used = !self.half_used;
365self.index += self.half_used as usize;
366367 (self.results.as_ref()[index] >> shift) as u32
368 }
369370#[inline]
371fn next_u64(&mut self) -> u64 {
372if self.index >= self.results.as_ref().len() {
373self.core.generate(&mut self.results);
374self.index = 0;
375 }
376377let value = self.results.as_ref()[self.index];
378self.index += 1;
379self.half_used = false;
380 value
381 }
382383#[inline]
384fn fill_bytes(&mut self, dest: &mut [u8]) {
385let mut read_len = 0;
386self.half_used = false;
387while read_len < dest.len() {
388if self.index >= self.results.as_ref().len() {
389self.core.generate(&mut self.results);
390self.index = 0;
391 }
392393let (consumed_u64, filled_u8) = fill_via_u64_chunks(
394&mut self.results.as_mut()[self.index..],
395&mut dest[read_len..],
396 );
397398self.index += consumed_u64;
399 read_len += filled_u8;
400 }
401 }
402}
403404impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng64<R> {
405type Seed = R::Seed;
406407#[inline(always)]
408fn from_seed(seed: Self::Seed) -> Self {
409Self::new(R::from_seed(seed))
410 }
411412#[inline(always)]
413fn seed_from_u64(seed: u64) -> Self {
414Self::new(R::seed_from_u64(seed))
415 }
416417#[inline(always)]
418fn from_rng(rng: &mut impl RngCore) -> Self {
419Self::new(R::from_rng(rng))
420 }
421422#[inline(always)]
423fn try_from_rng<S: TryRngCore>(rng: &mut S) -> Result<Self, S::Error> {
424 R::try_from_rng(rng).map(Self::new)
425 }
426}
427428impl<R: CryptoBlockRng + BlockRngCore<Item = u64>> CryptoRng for BlockRng64<R> {}
429430#[cfg(test)]
431mod test {
432use crate::block::{BlockRng, BlockRng64, BlockRngCore};
433use crate::{RngCore, SeedableRng};
434435#[derive(Debug, Clone)]
436struct DummyRng {
437 counter: u32,
438 }
439440impl BlockRngCore for DummyRng {
441type Item = u32;
442type Results = [u32; 16];
443444fn generate(&mut self, results: &mut Self::Results) {
445for r in results {
446*r = self.counter;
447self.counter = self.counter.wrapping_add(3511615421);
448 }
449 }
450 }
451452impl SeedableRng for DummyRng {
453type Seed = [u8; 4];
454455fn from_seed(seed: Self::Seed) -> Self {
456 DummyRng {
457 counter: u32::from_le_bytes(seed),
458 }
459 }
460 }
461462#[test]
463fn blockrng_next_u32_vs_next_u64() {
464let mut rng1 = BlockRng::<DummyRng>::from_seed([1, 2, 3, 4]);
465let mut rng2 = rng1.clone();
466let mut rng3 = rng1.clone();
467468let mut a = [0; 16];
469 a[..4].copy_from_slice(&rng1.next_u32().to_le_bytes());
470 a[4..12].copy_from_slice(&rng1.next_u64().to_le_bytes());
471 a[12..].copy_from_slice(&rng1.next_u32().to_le_bytes());
472473let mut b = [0; 16];
474 b[..4].copy_from_slice(&rng2.next_u32().to_le_bytes());
475 b[4..8].copy_from_slice(&rng2.next_u32().to_le_bytes());
476 b[8..].copy_from_slice(&rng2.next_u64().to_le_bytes());
477assert_eq!(a, b);
478479let mut c = [0; 16];
480 c[..8].copy_from_slice(&rng3.next_u64().to_le_bytes());
481 c[8..12].copy_from_slice(&rng3.next_u32().to_le_bytes());
482 c[12..].copy_from_slice(&rng3.next_u32().to_le_bytes());
483assert_eq!(a, c);
484 }
485486#[derive(Debug, Clone)]
487struct DummyRng64 {
488 counter: u64,
489 }
490491impl BlockRngCore for DummyRng64 {
492type Item = u64;
493type Results = [u64; 8];
494495fn generate(&mut self, results: &mut Self::Results) {
496for r in results {
497*r = self.counter;
498self.counter = self.counter.wrapping_add(2781463553396133981);
499 }
500 }
501 }
502503impl SeedableRng for DummyRng64 {
504type Seed = [u8; 8];
505506fn from_seed(seed: Self::Seed) -> Self {
507 DummyRng64 {
508 counter: u64::from_le_bytes(seed),
509 }
510 }
511 }
512513#[test]
514fn blockrng64_next_u32_vs_next_u64() {
515let mut rng1 = BlockRng64::<DummyRng64>::from_seed([1, 2, 3, 4, 5, 6, 7, 8]);
516let mut rng2 = rng1.clone();
517let mut rng3 = rng1.clone();
518519let mut a = [0; 16];
520 a[..4].copy_from_slice(&rng1.next_u32().to_le_bytes());
521 a[4..12].copy_from_slice(&rng1.next_u64().to_le_bytes());
522 a[12..].copy_from_slice(&rng1.next_u32().to_le_bytes());
523524let mut b = [0; 16];
525 b[..4].copy_from_slice(&rng2.next_u32().to_le_bytes());
526 b[4..8].copy_from_slice(&rng2.next_u32().to_le_bytes());
527 b[8..].copy_from_slice(&rng2.next_u64().to_le_bytes());
528assert_ne!(a, b);
529assert_eq!(&a[..4], &b[..4]);
530assert_eq!(&a[4..12], &b[8..]);
531532let mut c = [0; 16];
533 c[..8].copy_from_slice(&rng3.next_u64().to_le_bytes());
534 c[8..12].copy_from_slice(&rng3.next_u32().to_le_bytes());
535 c[12..].copy_from_slice(&rng3.next_u32().to_le_bytes());
536assert_eq!(b, c);
537 }
538}