Broadcast final shred for slots that are interrupted (#6269)

* Broadcast final shred for slots that are interrupted
This commit is contained in:
carllin
2019-10-09 16:07:18 -07:00
committed by GitHub
parent de82e60c64
commit dd66d16fdb
9 changed files with 388 additions and 183 deletions

View File

@ -4,7 +4,7 @@ extern crate test;
use solana_core::entry::create_ticks;
use solana_core::shred::{
max_ticks_per_shred, Shredder, RECOMMENDED_FEC_RATE, SIZE_OF_DATA_SHRED_HEADER,
max_ticks_per_n_shreds, Shredder, RECOMMENDED_FEC_RATE, SIZE_OF_DATA_SHRED_HEADER,
};
use solana_sdk::hash::Hash;
use solana_sdk::packet::PACKET_DATA_SIZE;
@ -18,7 +18,7 @@ fn bench_shredder(bencher: &mut Bencher) {
let shred_size = PACKET_DATA_SIZE - *SIZE_OF_DATA_SHRED_HEADER;
let num_shreds = ((1000 * 1000) + (shred_size - 1)) / shred_size;
// ~1Mb
let num_ticks = max_ticks_per_shred() * num_shreds as u64;
let num_ticks = max_ticks_per_n_shreds(1) * num_shreds as u64;
let entries = create_ticks(num_ticks, Hash::default());
bencher.iter(|| {
let shredder = Shredder::new(1, 0, RECOMMENDED_FEC_RATE, kp.clone()).unwrap();
@ -32,7 +32,7 @@ fn bench_deshredder(bencher: &mut Bencher) {
let shred_size = PACKET_DATA_SIZE - *SIZE_OF_DATA_SHRED_HEADER;
// ~10Mb
let num_shreds = ((10000 * 1000) + (shred_size - 1)) / shred_size;
let num_ticks = max_ticks_per_shred() * num_shreds as u64;
let num_ticks = max_ticks_per_n_shreds(1) * num_shreds as u64;
let entries = create_ticks(num_ticks, Hash::default());
let shredder = Shredder::new(1, 0, RECOMMENDED_FEC_RATE, kp).unwrap();
let data_shreds = shredder.entries_to_shreds(&entries, true, 0).0;