Revert "Revert "Ledger-tool: only require ledger dir when necessary (backport #21575) (#21578)""

This reverts commit eae3166bdc.
This commit is contained in:
Tyera Eulberg
2021-12-16 13:57:24 -07:00
committed by Tyera Eulberg
parent 704d05f52d
commit 9fff4aa8b8
3 changed files with 1535 additions and 1481 deletions

View File

@ -1,23 +1,26 @@
/// The `bigtable` subcommand
use clap::{
use {
crate::ledger_path::canonicalize_ledger_path,
clap::{
value_t, value_t_or_exit, values_t_or_exit, App, AppSettings, Arg, ArgMatches, SubCommand,
};
use solana_clap_utils::{
},
solana_clap_utils::{
input_parsers::pubkey_of,
input_validators::{is_slot, is_valid_pubkey},
};
use solana_cli_output::{
},
solana_cli_output::{
display::println_transaction, CliBlock, CliTransaction, CliTransactionConfirmation,
OutputFormat,
};
use solana_ledger::{blockstore::Blockstore, blockstore_db::AccessType};
use solana_sdk::{clock::Slot, pubkey::Pubkey, signature::Signature};
use solana_transaction_status::{ConfirmedBlock, EncodedTransaction, UiTransactionEncoding};
use std::{
},
solana_ledger::{blockstore::Blockstore, blockstore_db::AccessType},
solana_sdk::{clock::Slot, pubkey::Pubkey, signature::Signature},
solana_transaction_status::{ConfirmedBlock, EncodedTransaction, UiTransactionEncoding},
std::{
path::Path,
process::exit,
result::Result,
sync::{atomic::AtomicBool, Arc},
},
};
async fn upload(
@ -426,8 +429,11 @@ pub fn bigtable_process_command(ledger_path: &Path, matches: &ArgMatches<'_>) {
let ending_slot = value_t!(arg_matches, "ending_slot", Slot).ok();
let allow_missing_metadata = arg_matches.is_present("allow_missing_metadata");
let force_reupload = arg_matches.is_present("force_reupload");
let blockstore =
crate::open_blockstore(ledger_path, AccessType::TryPrimaryThenSecondary, None);
let blockstore = crate::open_blockstore(
&canonicalize_ledger_path(ledger_path),
AccessType::TryPrimaryThenSecondary,
None,
);
runtime.block_on(upload(
blockstore,

View File

@ -0,0 +1,30 @@
use {
clap::{value_t, ArgMatches},
std::{
fs,
path::{Path, PathBuf},
process::exit,
},
};
pub fn parse_ledger_path(matches: &ArgMatches<'_>, name: &str) -> PathBuf {
PathBuf::from(value_t!(matches, name, String).unwrap_or_else(|_err| {
eprintln!(
"Error: Missing --ledger <DIR> argument.\n\n{}",
matches.usage()
);
exit(1);
}))
}
// Canonicalize ledger path to avoid issues with symlink creation
pub fn canonicalize_ledger_path(ledger_path: &Path) -> PathBuf {
fs::canonicalize(&ledger_path).unwrap_or_else(|err| {
eprintln!(
"Unable to access ledger path '{}': {}",
ledger_path.display(),
err
);
exit(1);
})
}

View File

@ -1,30 +1,31 @@
#![allow(clippy::integer_arithmetic)]
use clap::{
crate_description, crate_name, value_t, value_t_or_exit, values_t_or_exit, App, AppSettings,
Arg, ArgMatches, SubCommand,
};
use dashmap::DashMap;
use itertools::Itertools;
use log::*;
use regex::Regex;
use serde::Serialize;
use serde_json::json;
use solana_clap_utils::{
use {
clap::{
crate_description, crate_name, value_t, value_t_or_exit, values_t_or_exit, App,
AppSettings, Arg, ArgMatches, SubCommand,
},
dashmap::DashMap,
itertools::Itertools,
log::*,
regex::Regex,
serde::Serialize,
serde_json::json,
solana_clap_utils::{
input_parsers::{cluster_type_of, pubkey_of, pubkeys_of},
input_validators::{
is_parsable, is_pubkey, is_pubkey_or_keypair, is_slot, is_valid_percentage,
},
};
use solana_ledger::entry::Entry;
use solana_ledger::{
},
solana_ledger::entry::Entry,
solana_ledger::{
ancestor_iterator::AncestorIterator,
bank_forks_utils,
blockstore::{create_new_ledger, Blockstore, PurgeType},
blockstore_db::{self, AccessType, BlockstoreRecoveryMode, Column, Database},
blockstore_processor::ProcessOptions,
shred::Shred,
};
use solana_runtime::{
},
solana_runtime::{
bank::{Bank, RewardCalculationEvent},
bank_forks::{ArchiveFormat, BankForks, SnapshotConfig},
cost_model::CostModel,
@ -32,8 +33,8 @@ use solana_runtime::{
hardened_unpack::{open_genesis_config, MAX_GENESIS_ARCHIVE_UNPACKED_SIZE},
snapshot_utils,
snapshot_utils::{SnapshotVersion, DEFAULT_MAX_SNAPSHOTS_TO_RETAIN},
};
use solana_sdk::{
},
solana_sdk::{
account::{AccountSharedData, ReadableAccount, WritableAccount},
account_utils::StateMut,
clock::{Epoch, Slot},
@ -46,25 +47,28 @@ use solana_sdk::{
shred_version::compute_shred_version,
stake::{self, state::StakeState},
system_program,
};
use solana_stake_program::stake_state::{self, PointValue};
use solana_vote_program::{
},
solana_stake_program::stake_state::{self, PointValue},
solana_vote_program::{
self,
vote_state::{self, VoteState},
};
use std::{
},
std::{
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
ffi::OsStr,
fs::{self, File},
fs::File,
io::{self, stdout, BufRead, BufReader, Write},
path::{Path, PathBuf},
process::{exit, Command, Stdio},
str::FromStr,
sync::{Arc, RwLock},
},
};
mod bigtable;
use bigtable::*;
mod ledger_path;
use ledger_path::*;
#[derive(PartialEq)]
enum LedgerOutputMethod {
@ -1498,25 +1502,7 @@ fn main() {
info!("{} {}", crate_name!(), solana_version::version!());
let ledger_path = PathBuf::from(value_t!(matches, "ledger_path", String).unwrap_or_else(
|_err| {
eprintln!(
"Error: Missing --ledger <DIR> argument.\n\n{}",
matches.usage()
);
exit(1);
},
));
// Canonicalize ledger path to avoid issues with symlink creation
let ledger_path = fs::canonicalize(&ledger_path).unwrap_or_else(|err| {
eprintln!(
"Unable to access ledger path '{}': {}",
ledger_path.display(),
err
);
exit(1);
});
let ledger_path = parse_ledger_path(&matches, "ledger_path");
let snapshot_archive_path = value_t!(matches, "snapshot_archive_path", String)
.ok()
@ -1527,8 +1513,12 @@ fn main() {
.map(BlockstoreRecoveryMode::from);
let verbose_level = matches.occurrences_of("verbose");
if let ("bigtable", Some(arg_matches)) = matches.subcommand() {
bigtable_process_command(&ledger_path, arg_matches)
} else {
let ledger_path = canonicalize_ledger_path(&ledger_path);
match matches.subcommand() {
("bigtable", Some(arg_matches)) => bigtable_process_command(&ledger_path, arg_matches),
("print", Some(arg_matches)) => {
let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot);
let ending_slot = value_t!(arg_matches, "ending_slot", Slot).unwrap_or(Slot::MAX);
@ -1554,7 +1544,8 @@ fn main() {
let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot);
let ending_slot = value_t_or_exit!(arg_matches, "ending_slot", Slot);
let target_db = PathBuf::from(value_t_or_exit!(arg_matches, "target_db", String));
let source = open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None);
let source =
open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None);
let target = open_blockstore(&target_db, AccessType::PrimaryOnly, None);
for (slot, _meta) in source.slot_meta_iterator(starting_slot).unwrap() {
if slot > ending_slot {
@ -1578,7 +1569,8 @@ fn main() {
}
("modify-genesis", Some(arg_matches)) => {
let mut genesis_config = open_genesis_config_by(&ledger_path, arg_matches);
let output_directory = PathBuf::from(arg_matches.value_of("output_directory").unwrap());
let output_directory =
PathBuf::from(arg_matches.value_of("output_directory").unwrap());
if let Some(cluster_type) = cluster_type_of(arg_matches, "cluster_type") {
genesis_config.cluster_type = cluster_type;
@ -1654,7 +1646,8 @@ fn main() {
}
let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot);
let ending_slot = value_t!(arg_matches, "ending_slot", Slot).unwrap_or(Slot::MAX);
let ledger = open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None);
let ledger =
open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None);
for (slot, _meta) in ledger
.slot_meta_iterator(starting_slot)
.unwrap()
@ -1844,7 +1837,9 @@ fn main() {
if ancestors.contains(&slot) && !map.contains_key(&slot) {
map.insert(slot, line);
}
if slot == ending_slot && frozen.contains_key(&slot) && full.contains_key(&slot)
if slot == ending_slot
&& frozen.contains_key(&slot)
&& full.contains_key(&slot)
{
break;
}
@ -1928,7 +1923,8 @@ fn main() {
snapshot_archive_path,
) {
Ok((bank_forks, _leader_schedule_cache, _snapshot_hash)) => {
let dot = graph_forks(&bank_forks, arg_matches.is_present("include_all_votes"));
let dot =
graph_forks(&bank_forks, arg_matches.is_present("include_all_votes"));
let extension = Path::new(&output_file).extension();
let result = if extension == Some(OsStr::new("pdf")) {
@ -1987,15 +1983,15 @@ fn main() {
.unwrap_or_default()
.into_iter()
.collect();
let snapshot_version =
arg_matches
.value_of("snapshot_version")
.map_or(SnapshotVersion::default(), |s| {
let snapshot_version = arg_matches.value_of("snapshot_version").map_or(
SnapshotVersion::default(),
|s| {
s.parse::<SnapshotVersion>().unwrap_or_else(|e| {
eprintln!("Error: {}", e);
exit(1)
})
});
},
);
let maximum_snapshots_to_retain =
value_t_or_exit!(arg_matches, "maximum_snapshots_to_retain", usize);
@ -2063,7 +2059,9 @@ fn main() {
child_bank.set_hashes_per_tick(match hashes_per_tick {
// Note: Unlike `solana-genesis`, "auto" is not supported here.
"sleep" => None,
_ => Some(value_t_or_exit!(arg_matches, "hashes_per_tick", u64)),
_ => {
Some(value_t_or_exit!(arg_matches, "hashes_per_tick", u64))
}
});
}
bank = Arc::new(child_bank);
@ -2107,7 +2105,8 @@ fn main() {
.into_iter()
{
if let Ok(StakeState::Stake(meta, stake)) = account.state() {
if vote_accounts_to_destake.contains(&stake.delegation.voter_pubkey)
if vote_accounts_to_destake
.contains(&stake.delegation.voter_pubkey)
{
if verbose_level > 0 {
warn!(
@ -2153,7 +2152,8 @@ fn main() {
let mut bootstrap_validator_pubkeys_iter =
bootstrap_validator_pubkeys.iter();
loop {
let identity_pubkey = match bootstrap_validator_pubkeys_iter.next() {
let identity_pubkey = match bootstrap_validator_pubkeys_iter.next()
{
None => break,
Some(identity_pubkey) => identity_pubkey,
};
@ -2321,7 +2321,10 @@ fn main() {
println!(" - slot: {}", slot);
println!(" - rent_epoch: {}", account.rent_epoch());
if !exclude_account_data {
println!(" - data: '{}'", bs58::encode(account.data()).into_string());
println!(
" - data: '{}'",
bs58::encode(account.data()).into_string()
);
}
println!(" - data_len: {}", data_len);
}
@ -2364,16 +2367,21 @@ fn main() {
println!("Recalculating capitalization");
let old_capitalization = bank.set_capitalization();
if old_capitalization == bank.capitalization() {
eprintln!("Capitalization was identical: {}", Sol(old_capitalization));
eprintln!(
"Capitalization was identical: {}",
Sol(old_capitalization)
);
}
}
if arg_matches.is_present("warp_epoch") {
let base_bank = bank;
let raw_warp_epoch = value_t!(arg_matches, "warp_epoch", String).unwrap();
let raw_warp_epoch =
value_t!(arg_matches, "warp_epoch", String).unwrap();
let warp_epoch = if raw_warp_epoch.starts_with('+') {
base_bank.epoch() + value_t!(arg_matches, "warp_epoch", Epoch).unwrap()
base_bank.epoch()
+ value_t!(arg_matches, "warp_epoch", Epoch).unwrap()
} else {
value_t!(arg_matches, "warp_epoch", Epoch).unwrap()
};
@ -2679,7 +2687,10 @@ fn main() {
for point_detail in point_details {
let record = InflationRecord {
cluster_type: format!("{:?}", base_bank.cluster_type()),
cluster_type: format!(
"{:?}",
base_bank.cluster_type()
),
rewarded_epoch: base_bank.epoch(),
account: format!("{}", pubkey),
owner: format!("{}", base_account.owner()),
@ -2710,7 +2721,9 @@ fn main() {
deactivation_epoch: format_or_na(
detail.and_then(|d| d.deactivation_epoch),
),
earned_epochs: format_or_na(detail.map(|d| d.epochs)),
earned_epochs: format_or_na(
detail.map(|d| d.epochs),
),
epoch: format_or_na(point_detail.map(|d| d.epoch)),
epoch_credits: format_or_na(
point_detail.map(|d| d.credits),
@ -2736,7 +2749,9 @@ fn main() {
vote_rewards: format_or_na(
detail.map(|d| d.vote_rewards),
),
commission: format_or_na(detail.map(|d| d.commission)),
commission: format_or_na(
detail.map(|d| d.commission),
),
cluster_rewards: format_or_na(
last_point_value
.read()
@ -2767,7 +2782,9 @@ fn main() {
}
} else {
if arg_matches.is_present("recalculate_capitalization") {
eprintln!("Capitalization isn't verified because it's recalculated");
eprintln!(
"Capitalization isn't verified because it's recalculated"
);
}
if arg_matches.is_present("inflation") {
eprintln!(
@ -2963,8 +2980,8 @@ fn main() {
);
exit(1);
}
let ancestor_iterator =
AncestorIterator::new(start_root, &blockstore).take_while(|&slot| slot >= end_root);
let ancestor_iterator = AncestorIterator::new(start_root, &blockstore)
.take_while(|&slot| slot >= end_root);
let roots_to_fix: Vec<_> = ancestor_iterator
.filter(|slot| !blockstore.is_root(*slot))
.collect();
@ -3085,4 +3102,5 @@ fn main() {
}
_ => unreachable!(),
};
}
}