Add random distribution for avalanche peers (#4493)

* Add random distribution for avalanche peers

* fix clippy warnings

* bug fixes

* nits
This commit is contained in:
Pankaj Garg
2019-06-01 07:55:43 -07:00
committed by GitHub
parent e15246746d
commit 3574469052
9 changed files with 188 additions and 49 deletions

View File

@ -24,9 +24,12 @@ use crate::repair_service::RepairType;
use crate::result::Result;
use crate::staking_utils;
use crate::streamer::{BlobReceiver, BlobSender};
use crate::weighted_shuffle::weighted_shuffle;
use bincode::{deserialize, serialize};
use core::cmp;
use itertools::Itertools;
use rand::{thread_rng, Rng};
use rand_chacha::ChaChaRng;
use rayon::prelude::*;
use solana_metrics::{datapoint_debug, inc_new_counter_debug, inc_new_counter_error};
use solana_netutil::{
@ -489,38 +492,48 @@ impl ClusterInfo {
&& !ContactInfo::is_valid_address(&contact_info.tpu)
}
fn sort_by_stake<S: std::hash::BuildHasher>(
fn stake_weighted_shuffle<S: std::hash::BuildHasher>(
peers: &[ContactInfo],
stakes: Option<&HashMap<Pubkey, u64, S>>,
rng: ChaChaRng,
) -> Vec<(u64, ContactInfo)> {
let mut peers_with_stakes: Vec<_> = peers
let (stake_weights, peers_with_stakes): (Vec<_>, Vec<_>) = peers
.iter()
.map(|c| {
(
stakes.map_or(0, |stakes| *stakes.get(&c.id).unwrap_or(&0)),
c.clone(),
)
let stake = stakes.map_or(0, |stakes| *stakes.get(&c.id).unwrap_or(&0));
// For stake weighted shuffle a valid weight is atleast 1. Weight 0 is
// assumed to be missing entry. So let's make sure stake weights are atleast 1
(cmp::max(1, stake), (stake, c.clone()))
})
.sorted_by(|(_, (l_stake, l_info)), (_, (r_stake, r_info))| {
if r_stake == l_stake {
r_info.id.cmp(&l_info.id)
} else {
r_stake.cmp(&l_stake)
}
})
.unzip();
let shuffle = weighted_shuffle(stake_weights, rng);
let mut out: Vec<(u64, ContactInfo)> = shuffle
.iter()
.map(|x| peers_with_stakes[*x].clone())
.collect();
peers_with_stakes.sort_unstable_by(|(l_stake, l_info), (r_stake, r_info)| {
if r_stake == l_stake {
r_info.id.cmp(&l_info.id)
} else {
r_stake.cmp(&l_stake)
}
});
peers_with_stakes.dedup();
peers_with_stakes
out.dedup();
out
}
/// Return sorted Retransmit peers and index of `Self.id()` as if it were in that list
fn sorted_peers_and_index<S: std::hash::BuildHasher>(
fn shuffle_peers_and_index<S: std::hash::BuildHasher>(
&self,
stakes: Option<&HashMap<Pubkey, u64, S>>,
rng: ChaChaRng,
) -> (usize, Vec<ContactInfo>) {
let mut peers = self.retransmit_peers();
peers.push(self.lookup(&self.id()).unwrap().clone());
let contacts_and_stakes: Vec<_> = ClusterInfo::sort_by_stake(&peers, stakes);
let contacts_and_stakes: Vec<_> = ClusterInfo::stake_weighted_shuffle(&peers, stakes, rng);
let mut index = 0;
let peers: Vec<_> = contacts_and_stakes
.into_iter()
@ -537,9 +550,13 @@ impl ClusterInfo {
(index, peers)
}
pub fn sorted_tvu_peers(&self, stakes: Option<&HashMap<Pubkey, u64>>) -> Vec<ContactInfo> {
pub fn sorted_tvu_peers(
&self,
stakes: Option<&HashMap<Pubkey, u64>>,
rng: ChaChaRng,
) -> Vec<ContactInfo> {
let peers = self.tvu_peers();
let peers_with_stakes: Vec<_> = ClusterInfo::sort_by_stake(&peers, stakes);
let peers_with_stakes: Vec<_> = ClusterInfo::stake_weighted_shuffle(&peers, stakes, rng);
peers_with_stakes
.iter()
.map(|(_, peer)| (*peer).clone())
@ -1498,8 +1515,12 @@ pub fn compute_retransmit_peers<S: std::hash::BuildHasher>(
stakes: Option<&HashMap<Pubkey, u64, S>>,
cluster_info: &Arc<RwLock<ClusterInfo>>,
fanout: usize,
rng: ChaChaRng,
) -> (Vec<ContactInfo>, Vec<ContactInfo>) {
let (my_index, peers) = cluster_info.read().unwrap().sorted_peers_and_index(stakes);
let (my_index, peers) = cluster_info
.read()
.unwrap()
.shuffle_peers_and_index(stakes, rng);
//calc num_layers and num_neighborhoods using the total number of nodes
let (num_layers, layer_indices) = ClusterInfo::describe_data_plane(peers.len(), fanout);