Ledger-tool: only require ledger dir when necessary (#21575)
* Don't canonicalize ledger_path unless ledger_path will be used * Single use statement
This commit is contained in:
@ -1,23 +1,26 @@
|
||||
/// The `bigtable` subcommand
|
||||
use clap::{
|
||||
use {
|
||||
crate::ledger_path::canonicalize_ledger_path,
|
||||
clap::{
|
||||
value_t, value_t_or_exit, values_t_or_exit, App, AppSettings, Arg, ArgMatches, SubCommand,
|
||||
};
|
||||
use solana_clap_utils::{
|
||||
},
|
||||
solana_clap_utils::{
|
||||
input_parsers::pubkey_of,
|
||||
input_validators::{is_slot, is_valid_pubkey},
|
||||
};
|
||||
use solana_cli_output::{
|
||||
},
|
||||
solana_cli_output::{
|
||||
display::println_transaction, CliBlock, CliTransaction, CliTransactionConfirmation,
|
||||
OutputFormat,
|
||||
};
|
||||
use solana_ledger::{blockstore::Blockstore, blockstore_db::AccessType};
|
||||
use solana_sdk::{clock::Slot, pubkey::Pubkey, signature::Signature};
|
||||
use solana_transaction_status::{ConfirmedBlock, EncodedTransaction, UiTransactionEncoding};
|
||||
use std::{
|
||||
},
|
||||
solana_ledger::{blockstore::Blockstore, blockstore_db::AccessType},
|
||||
solana_sdk::{clock::Slot, pubkey::Pubkey, signature::Signature},
|
||||
solana_transaction_status::{ConfirmedBlock, EncodedTransaction, UiTransactionEncoding},
|
||||
std::{
|
||||
path::Path,
|
||||
process::exit,
|
||||
result::Result,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
},
|
||||
};
|
||||
|
||||
async fn upload(
|
||||
@ -426,8 +429,11 @@ pub fn bigtable_process_command(ledger_path: &Path, matches: &ArgMatches<'_>) {
|
||||
let ending_slot = value_t!(arg_matches, "ending_slot", Slot).ok();
|
||||
let allow_missing_metadata = arg_matches.is_present("allow_missing_metadata");
|
||||
let force_reupload = arg_matches.is_present("force_reupload");
|
||||
let blockstore =
|
||||
crate::open_blockstore(ledger_path, AccessType::TryPrimaryThenSecondary, None);
|
||||
let blockstore = crate::open_blockstore(
|
||||
&canonicalize_ledger_path(ledger_path),
|
||||
AccessType::TryPrimaryThenSecondary,
|
||||
None,
|
||||
);
|
||||
|
||||
runtime.block_on(upload(
|
||||
blockstore,
|
||||
|
30
ledger-tool/src/ledger_path.rs
Normal file
30
ledger-tool/src/ledger_path.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use {
|
||||
clap::{value_t, ArgMatches},
|
||||
std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
process::exit,
|
||||
},
|
||||
};
|
||||
|
||||
pub fn parse_ledger_path(matches: &ArgMatches<'_>, name: &str) -> PathBuf {
|
||||
PathBuf::from(value_t!(matches, name, String).unwrap_or_else(|_err| {
|
||||
eprintln!(
|
||||
"Error: Missing --ledger <DIR> argument.\n\n{}",
|
||||
matches.usage()
|
||||
);
|
||||
exit(1);
|
||||
}))
|
||||
}
|
||||
|
||||
// Canonicalize ledger path to avoid issues with symlink creation
|
||||
pub fn canonicalize_ledger_path(ledger_path: &Path) -> PathBuf {
|
||||
fs::canonicalize(&ledger_path).unwrap_or_else(|err| {
|
||||
eprintln!(
|
||||
"Unable to access ledger path '{}': {}",
|
||||
ledger_path.display(),
|
||||
err
|
||||
);
|
||||
exit(1);
|
||||
})
|
||||
}
|
@ -1,32 +1,33 @@
|
||||
#![allow(clippy::integer_arithmetic)]
|
||||
use clap::{
|
||||
crate_description, crate_name, value_t, value_t_or_exit, values_t_or_exit, App, AppSettings,
|
||||
Arg, ArgMatches, SubCommand,
|
||||
};
|
||||
use dashmap::DashMap;
|
||||
use itertools::Itertools;
|
||||
use log::*;
|
||||
use regex::Regex;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use solana_clap_utils::{
|
||||
use {
|
||||
clap::{
|
||||
crate_description, crate_name, value_t, value_t_or_exit, values_t_or_exit, App,
|
||||
AppSettings, Arg, ArgMatches, SubCommand,
|
||||
},
|
||||
dashmap::DashMap,
|
||||
itertools::Itertools,
|
||||
log::*,
|
||||
regex::Regex,
|
||||
serde::Serialize,
|
||||
serde_json::json,
|
||||
solana_clap_utils::{
|
||||
input_parsers::{cluster_type_of, pubkey_of, pubkeys_of},
|
||||
input_validators::{
|
||||
is_parsable, is_pow2, is_pubkey, is_pubkey_or_keypair, is_slot, is_valid_percentage,
|
||||
},
|
||||
};
|
||||
use solana_core::system_monitor_service::SystemMonitorService;
|
||||
use solana_entry::entry::Entry;
|
||||
use solana_ledger::{
|
||||
},
|
||||
solana_core::system_monitor_service::SystemMonitorService,
|
||||
solana_entry::entry::Entry,
|
||||
solana_ledger::{
|
||||
ancestor_iterator::AncestorIterator,
|
||||
bank_forks_utils,
|
||||
blockstore::{create_new_ledger, Blockstore, PurgeType},
|
||||
blockstore_db::{self, AccessType, BlockstoreRecoveryMode, Column, Database},
|
||||
blockstore_processor::ProcessOptions,
|
||||
shred::Shred,
|
||||
};
|
||||
use solana_measure::measure::Measure;
|
||||
use solana_runtime::{
|
||||
},
|
||||
solana_measure::measure::Measure,
|
||||
solana_runtime::{
|
||||
accounts_db::AccountsDbConfig,
|
||||
accounts_index::{AccountsIndexConfig, ScanConfig},
|
||||
bank::{Bank, RewardCalculationEvent},
|
||||
@ -40,8 +41,8 @@ use solana_runtime::{
|
||||
self, ArchiveFormat, SnapshotVersion, DEFAULT_MAX_FULL_SNAPSHOT_ARCHIVES_TO_RETAIN,
|
||||
DEFAULT_MAX_INCREMENTAL_SNAPSHOT_ARCHIVES_TO_RETAIN,
|
||||
},
|
||||
};
|
||||
use solana_sdk::{
|
||||
},
|
||||
solana_sdk::{
|
||||
account::{AccountSharedData, ReadableAccount, WritableAccount},
|
||||
account_utils::StateMut,
|
||||
clock::{Epoch, Slot},
|
||||
@ -55,16 +56,16 @@ use solana_sdk::{
|
||||
stake::{self, state::StakeState},
|
||||
system_program,
|
||||
transaction::{SanitizedTransaction, TransactionError},
|
||||
};
|
||||
use solana_stake_program::stake_state::{self, PointValue};
|
||||
use solana_vote_program::{
|
||||
},
|
||||
solana_stake_program::stake_state::{self, PointValue},
|
||||
solana_vote_program::{
|
||||
self,
|
||||
vote_state::{self, VoteState},
|
||||
};
|
||||
use std::{
|
||||
},
|
||||
std::{
|
||||
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
|
||||
ffi::OsStr,
|
||||
fs::{self, File},
|
||||
fs::File,
|
||||
io::{self, stdout, BufRead, BufReader, Write},
|
||||
path::{Path, PathBuf},
|
||||
process::{exit, Command, Stdio},
|
||||
@ -74,10 +75,13 @@ use std::{
|
||||
mpsc::channel,
|
||||
Arc, RwLock,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mod bigtable;
|
||||
use bigtable::*;
|
||||
mod ledger_path;
|
||||
use ledger_path::*;
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum LedgerOutputMethod {
|
||||
@ -1625,25 +1629,7 @@ fn main() {
|
||||
|
||||
info!("{} {}", crate_name!(), solana_version::version!());
|
||||
|
||||
let ledger_path = PathBuf::from(value_t!(matches, "ledger_path", String).unwrap_or_else(
|
||||
|_err| {
|
||||
eprintln!(
|
||||
"Error: Missing --ledger <DIR> argument.\n\n{}",
|
||||
matches.usage()
|
||||
);
|
||||
exit(1);
|
||||
},
|
||||
));
|
||||
|
||||
// Canonicalize ledger path to avoid issues with symlink creation
|
||||
let ledger_path = fs::canonicalize(&ledger_path).unwrap_or_else(|err| {
|
||||
eprintln!(
|
||||
"Unable to access ledger path '{}': {}",
|
||||
ledger_path.display(),
|
||||
err
|
||||
);
|
||||
exit(1);
|
||||
});
|
||||
let ledger_path = parse_ledger_path(&matches, "ledger_path");
|
||||
|
||||
let snapshot_archive_path = value_t!(matches, "snapshot_archive_path", String)
|
||||
.ok()
|
||||
@ -1654,8 +1640,12 @@ fn main() {
|
||||
.map(BlockstoreRecoveryMode::from);
|
||||
let verbose_level = matches.occurrences_of("verbose");
|
||||
|
||||
if let ("bigtable", Some(arg_matches)) = matches.subcommand() {
|
||||
bigtable_process_command(&ledger_path, arg_matches)
|
||||
} else {
|
||||
let ledger_path = canonicalize_ledger_path(&ledger_path);
|
||||
|
||||
match matches.subcommand() {
|
||||
("bigtable", Some(arg_matches)) => bigtable_process_command(&ledger_path, arg_matches),
|
||||
("print", Some(arg_matches)) => {
|
||||
let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot);
|
||||
let ending_slot = value_t!(arg_matches, "ending_slot", Slot).unwrap_or(Slot::MAX);
|
||||
@ -1681,7 +1671,8 @@ fn main() {
|
||||
let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot);
|
||||
let ending_slot = value_t_or_exit!(arg_matches, "ending_slot", Slot);
|
||||
let target_db = PathBuf::from(value_t_or_exit!(arg_matches, "target_db", String));
|
||||
let source = open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None);
|
||||
let source =
|
||||
open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None);
|
||||
let target = open_blockstore(&target_db, AccessType::PrimaryOnly, None);
|
||||
for (slot, _meta) in source.slot_meta_iterator(starting_slot).unwrap() {
|
||||
if slot > ending_slot {
|
||||
@ -1705,7 +1696,8 @@ fn main() {
|
||||
}
|
||||
("modify-genesis", Some(arg_matches)) => {
|
||||
let mut genesis_config = open_genesis_config_by(&ledger_path, arg_matches);
|
||||
let output_directory = PathBuf::from(arg_matches.value_of("output_directory").unwrap());
|
||||
let output_directory =
|
||||
PathBuf::from(arg_matches.value_of("output_directory").unwrap());
|
||||
|
||||
if let Some(cluster_type) = cluster_type_of(arg_matches, "cluster_type") {
|
||||
genesis_config.cluster_type = cluster_type;
|
||||
@ -1782,7 +1774,8 @@ fn main() {
|
||||
}
|
||||
let starting_slot = value_t_or_exit!(arg_matches, "starting_slot", Slot);
|
||||
let ending_slot = value_t!(arg_matches, "ending_slot", Slot).unwrap_or(Slot::MAX);
|
||||
let ledger = open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None);
|
||||
let ledger =
|
||||
open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None);
|
||||
for (slot, _meta) in ledger
|
||||
.slot_meta_iterator(starting_slot)
|
||||
.unwrap()
|
||||
@ -1973,7 +1966,9 @@ fn main() {
|
||||
if ancestors.contains(&slot) && !map.contains_key(&slot) {
|
||||
map.insert(slot, line);
|
||||
}
|
||||
if slot == ending_slot && frozen.contains_key(&slot) && full.contains_key(&slot)
|
||||
if slot == ending_slot
|
||||
&& frozen.contains_key(&slot)
|
||||
&& full.contains_key(&slot)
|
||||
{
|
||||
break;
|
||||
}
|
||||
@ -1995,9 +1990,11 @@ fn main() {
|
||||
}
|
||||
|
||||
let exit_signal = Arc::new(AtomicBool::new(false));
|
||||
let system_monitor_service = SystemMonitorService::new(Arc::clone(&exit_signal), false);
|
||||
let system_monitor_service =
|
||||
SystemMonitorService::new(Arc::clone(&exit_signal), false);
|
||||
|
||||
if let Some(limit) = value_t!(arg_matches, "accounts_index_memory_limit_mb", usize).ok()
|
||||
if let Some(limit) =
|
||||
value_t!(arg_matches, "accounts_index_memory_limit_mb", usize).ok()
|
||||
{
|
||||
accounts_index_config.index_limit_mb = Some(limit);
|
||||
}
|
||||
@ -2018,7 +2015,8 @@ fn main() {
|
||||
accounts_index_config.drives = Some(accounts_index_paths);
|
||||
}
|
||||
|
||||
let filler_account_count = value_t!(arg_matches, "accounts_filler_count", usize).ok();
|
||||
let filler_account_count =
|
||||
value_t!(arg_matches, "accounts_filler_count", usize).ok();
|
||||
|
||||
let accounts_db_config = Some(AccountsDbConfig {
|
||||
index: Some(accounts_index_config),
|
||||
@ -2100,7 +2098,8 @@ fn main() {
|
||||
snapshot_archive_path,
|
||||
) {
|
||||
Ok((bank_forks, ..)) => {
|
||||
let dot = graph_forks(&bank_forks, arg_matches.is_present("include_all_votes"));
|
||||
let dot =
|
||||
graph_forks(&bank_forks, arg_matches.is_present("include_all_votes"));
|
||||
|
||||
let extension = Path::new(&output_file).extension();
|
||||
let result = if extension == Some(OsStr::new("pdf")) {
|
||||
@ -2159,15 +2158,15 @@ fn main() {
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.collect();
|
||||
let snapshot_version =
|
||||
arg_matches
|
||||
.value_of("snapshot_version")
|
||||
.map_or(SnapshotVersion::default(), |s| {
|
||||
let snapshot_version = arg_matches.value_of("snapshot_version").map_or(
|
||||
SnapshotVersion::default(),
|
||||
|s| {
|
||||
s.parse::<SnapshotVersion>().unwrap_or_else(|e| {
|
||||
eprintln!("Error: {}", e);
|
||||
exit(1)
|
||||
})
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
let maximum_full_snapshot_archives_to_retain =
|
||||
value_t_or_exit!(arg_matches, "maximum_full_snapshots_to_retain", usize);
|
||||
@ -2242,7 +2241,9 @@ fn main() {
|
||||
child_bank.set_hashes_per_tick(match hashes_per_tick {
|
||||
// Note: Unlike `solana-genesis`, "auto" is not supported here.
|
||||
"sleep" => None,
|
||||
_ => Some(value_t_or_exit!(arg_matches, "hashes_per_tick", u64)),
|
||||
_ => {
|
||||
Some(value_t_or_exit!(arg_matches, "hashes_per_tick", u64))
|
||||
}
|
||||
});
|
||||
}
|
||||
bank = Arc::new(child_bank);
|
||||
@ -2286,7 +2287,8 @@ fn main() {
|
||||
.into_iter()
|
||||
{
|
||||
if let Ok(StakeState::Stake(meta, stake)) = account.state() {
|
||||
if vote_accounts_to_destake.contains(&stake.delegation.voter_pubkey)
|
||||
if vote_accounts_to_destake
|
||||
.contains(&stake.delegation.voter_pubkey)
|
||||
{
|
||||
if verbose_level > 0 {
|
||||
warn!(
|
||||
@ -2335,7 +2337,8 @@ fn main() {
|
||||
let mut bootstrap_validator_pubkeys_iter =
|
||||
bootstrap_validator_pubkeys.iter();
|
||||
loop {
|
||||
let identity_pubkey = match bootstrap_validator_pubkeys_iter.next() {
|
||||
let identity_pubkey = match bootstrap_validator_pubkeys_iter.next()
|
||||
{
|
||||
None => break,
|
||||
Some(identity_pubkey) => identity_pubkey,
|
||||
};
|
||||
@ -2600,16 +2603,21 @@ fn main() {
|
||||
println!("Recalculating capitalization");
|
||||
let old_capitalization = bank.set_capitalization();
|
||||
if old_capitalization == bank.capitalization() {
|
||||
eprintln!("Capitalization was identical: {}", Sol(old_capitalization));
|
||||
eprintln!(
|
||||
"Capitalization was identical: {}",
|
||||
Sol(old_capitalization)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if arg_matches.is_present("warp_epoch") {
|
||||
let base_bank = bank;
|
||||
|
||||
let raw_warp_epoch = value_t!(arg_matches, "warp_epoch", String).unwrap();
|
||||
let raw_warp_epoch =
|
||||
value_t!(arg_matches, "warp_epoch", String).unwrap();
|
||||
let warp_epoch = if raw_warp_epoch.starts_with('+') {
|
||||
base_bank.epoch() + value_t!(arg_matches, "warp_epoch", Epoch).unwrap()
|
||||
base_bank.epoch()
|
||||
+ value_t!(arg_matches, "warp_epoch", Epoch).unwrap()
|
||||
} else {
|
||||
value_t!(arg_matches, "warp_epoch", Epoch).unwrap()
|
||||
};
|
||||
@ -2915,7 +2923,10 @@ fn main() {
|
||||
|
||||
for point_detail in point_details {
|
||||
let record = InflationRecord {
|
||||
cluster_type: format!("{:?}", base_bank.cluster_type()),
|
||||
cluster_type: format!(
|
||||
"{:?}",
|
||||
base_bank.cluster_type()
|
||||
),
|
||||
rewarded_epoch: base_bank.epoch(),
|
||||
account: format!("{}", pubkey),
|
||||
owner: format!("{}", base_account.owner()),
|
||||
@ -2946,7 +2957,9 @@ fn main() {
|
||||
deactivation_epoch: format_or_na(
|
||||
detail.and_then(|d| d.deactivation_epoch),
|
||||
),
|
||||
earned_epochs: format_or_na(detail.map(|d| d.epochs)),
|
||||
earned_epochs: format_or_na(
|
||||
detail.map(|d| d.epochs),
|
||||
),
|
||||
epoch: format_or_na(point_detail.map(|d| d.epoch)),
|
||||
epoch_credits: format_or_na(
|
||||
point_detail.map(|d| d.credits),
|
||||
@ -2972,7 +2985,9 @@ fn main() {
|
||||
vote_rewards: format_or_na(
|
||||
detail.map(|d| d.vote_rewards),
|
||||
),
|
||||
commission: format_or_na(detail.map(|d| d.commission)),
|
||||
commission: format_or_na(
|
||||
detail.map(|d| d.commission),
|
||||
),
|
||||
cluster_rewards: format_or_na(
|
||||
last_point_value
|
||||
.read()
|
||||
@ -3003,7 +3018,9 @@ fn main() {
|
||||
}
|
||||
} else {
|
||||
if arg_matches.is_present("recalculate_capitalization") {
|
||||
eprintln!("Capitalization isn't verified because it's recalculated");
|
||||
eprintln!(
|
||||
"Capitalization isn't verified because it's recalculated"
|
||||
);
|
||||
}
|
||||
if arg_matches.is_present("inflation") {
|
||||
eprintln!(
|
||||
@ -3199,8 +3216,8 @@ fn main() {
|
||||
);
|
||||
exit(1);
|
||||
}
|
||||
let ancestor_iterator =
|
||||
AncestorIterator::new(start_root, &blockstore).take_while(|&slot| slot >= end_root);
|
||||
let ancestor_iterator = AncestorIterator::new(start_root, &blockstore)
|
||||
.take_while(|&slot| slot >= end_root);
|
||||
let roots_to_fix: Vec<_> = ancestor_iterator
|
||||
.filter(|slot| !blockstore.is_root(*slot))
|
||||
.collect();
|
||||
@ -3321,4 +3338,5 @@ fn main() {
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user