Compare commits
178 Commits
dependabot
...
v1.10.2
Author | SHA1 | Date | |
---|---|---|---|
|
11be3fb7fa | ||
|
e8a8f4e9e2 | ||
|
accc64ebcf | ||
|
bc9882a78b | ||
|
01b6f97f0b | ||
|
7758c32035 | ||
|
bf57252298 | ||
|
0d369616e7 | ||
|
48d4c8eb5c | ||
|
e2e54ef64b | ||
|
1e20bd8f9a | ||
|
b1da7cff66 | ||
|
4e02ec342c | ||
|
3cf31fa9b8 | ||
|
ccff006948 | ||
|
3eca66a1f8 | ||
|
4d08234603 | ||
|
5d75ef4766 | ||
|
7ee7fc6f58 | ||
|
d20dd21600 | ||
|
2bff36dfba | ||
|
4f18d73281 | ||
|
043086081f | ||
|
3d021cffa3 | ||
|
9b591286d7 | ||
|
99f1a22b9d | ||
|
e6a67cd091 | ||
|
631be1ffdd | ||
|
021135978d | ||
|
b444836a97 | ||
|
83f5f8bfc3 | ||
|
ddd9d5a5a5 | ||
|
ead8cc4366 | ||
|
7b238b3645 | ||
|
35d1235ed0 | ||
|
3c6840050c | ||
|
58c0db9704 | ||
|
37189f20c5 | ||
|
588414a776 | ||
|
72af687aa6 | ||
|
f68c5a274d | ||
|
9f71958d7d | ||
|
17b00ad3a4 | ||
|
1fe0d6eeeb | ||
|
e60c9b97c9 | ||
|
ea1bcd3d59 | ||
|
1eddb6d1e9 | ||
|
26ef6111bb | ||
|
9bbccbe27c | ||
|
fb974489a5 | ||
|
ba54b30101 | ||
|
a1c45d5acb | ||
|
176fd23002 | ||
|
3688ac4eae | ||
|
cc55684f5f | ||
|
e2bc326d58 | ||
|
8abaa5d350 | ||
|
949006b5a2 | ||
|
afc41c7b11 | ||
|
7a9884c831 | ||
|
8a4b019ded | ||
|
249d926d1b | ||
|
65e2d9b2f2 | ||
|
5c722519cf | ||
|
5a0cd05866 | ||
|
9acbfa5eb1 | ||
|
c878c9e2cb | ||
|
0a17edcc1f | ||
|
cc4d75a16f | ||
|
b719d6a2ad | ||
|
8438366d1b | ||
|
46ec5d563b | ||
|
9b80452c7c | ||
|
c2ec294401 | ||
|
536a99705b | ||
|
00558227be | ||
|
5599bd9442 | ||
|
9cfa21f7d1 | ||
|
12337d8daf | ||
|
3114c199bd | ||
|
e790d0fc53 | ||
|
7933c7fc24 | ||
|
ddbf5c782f | ||
|
38d8bbb19c | ||
|
181fffb916 | ||
|
08c9a650db | ||
|
3a0271c113 | ||
|
463cd564cf | ||
|
b8b7163b66 | ||
|
ba771cdc45 | ||
|
d2b23da9ea | ||
|
09b58e1cfb | ||
|
e23c6ce62b | ||
|
38db1dead4 | ||
|
36ad59673c | ||
|
0d33b54d74 | ||
|
93c8e04d51 | ||
|
b28acd2d4d | ||
|
360f6466a3 | ||
|
aad73f1f2e | ||
|
afda8c4020 | ||
|
62d2a4cd88 | ||
|
8d53ea81e9 | ||
|
f2fa49a771 | ||
|
7b7160448b | ||
|
7f608965ef | ||
|
ddfd4f86f3 | ||
|
a99fd09c16 | ||
|
4b59bfe6d8 | ||
|
5ac3466f26 | ||
|
2e750722c7 | ||
|
a9fd807f61 | ||
|
011472a8e8 | ||
|
b4480e6b70 | ||
|
61d7bdd66f | ||
|
43347f3da6 | ||
|
634f4eb37d | ||
|
39387e8446 | ||
|
79a515e88e | ||
|
e87b941a51 | ||
|
fe7604589d | ||
|
e9912744ef | ||
|
8184f755ae | ||
|
97d40ba3da | ||
|
82cb61dc36 | ||
|
1a99251498 | ||
|
41ab690a61 | ||
|
7dbde2247d | ||
|
da00d29de0 | ||
|
e88da2ec0a | ||
|
26aa18b3f3 | ||
|
e630eb73d7 | ||
|
ef8b7d9c62 | ||
|
d909b7c80b | ||
|
8eefe60c44 | ||
|
d43786edcf | ||
|
9ec514f6c5 | ||
|
3ddd018452 | ||
|
41f78b9925 | ||
|
4f0070a5c6 | ||
|
8de88d0a55 | ||
|
86e2f728c3 | ||
|
7b7fdb42d9 | ||
|
c8cb940b4e | ||
|
a4f4ac5279 | ||
|
d3ebe8d8f5 | ||
|
7d1a090cfb | ||
|
c69e3b73ff | ||
|
2a17a661e6 | ||
|
a0d68ef60e | ||
|
7d1810bbcc | ||
|
6c56eb9663 | ||
|
d0ba914d2b | ||
|
7943e8a1c3 | ||
|
3e48cc4e00 | ||
|
ce4d579499 | ||
|
f6a06826d8 | ||
|
93c5642f9f | ||
|
1282277126 | ||
|
454e82683e | ||
|
6b2683f7da | ||
|
ec798f5aad | ||
|
3b5b71ce44 | ||
|
19448ba078 | ||
|
e3fa55f88d | ||
|
5877e38baa | ||
|
0de7b757d0 | ||
|
6dfd1b9883 | ||
|
6666f23c01 | ||
|
f0a235d16f | ||
|
4eeb9f4648 | ||
|
f814c4a082 | ||
|
911c5a8362 | ||
|
22d2a40133 | ||
|
611d745241 | ||
|
ee3fc39f1c | ||
|
fe18ea35a2 | ||
|
30dafc7135 |
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,5 +1,9 @@
|
||||
#### Problem
|
||||
|
||||
|
||||
|
||||
#### Summary of Changes
|
||||
|
||||
|
||||
|
||||
Fixes #
|
||||
|
22
.github/workflows/explorer_preview.yml
vendored
22
.github/workflows/explorer_preview.yml
vendored
@@ -17,8 +17,7 @@ jobs:
|
||||
vercel-token: ${{ secrets.VERCEL_TOKEN }} # Required
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }} #Optional
|
||||
vercel-org-id: ${{ secrets.ORG_ID}} #Required
|
||||
vercel-project-id: ${{ secrets.PROJECT_ID}} #Required
|
||||
working-directory: ./explorer
|
||||
vercel-project-id: ${{ secrets.PROJECT_ID}} #Required
|
||||
scope: ${{ secrets.TEAM_ID }}
|
||||
|
||||
- name: vercel url
|
||||
@@ -36,17 +35,24 @@ jobs:
|
||||
#filtered_url=$(cat vercelfile2.txt )
|
||||
#echo "$filtered_url" >> .env.preview1
|
||||
|
||||
|
||||
- name: Run tests
|
||||
- name: Fetching Vercel Preview Deployment Link
|
||||
uses: mathiasvr/command-output@v1
|
||||
id: tests2
|
||||
id: test1
|
||||
with:
|
||||
run: |
|
||||
echo "$(cat .env.preview1)"
|
||||
- name: Fetching PR commit URL
|
||||
uses: mathiasvr/command-output@v1
|
||||
id: test2
|
||||
with:
|
||||
run: |
|
||||
HEAD_SHA=$(curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/solana-labs/solana/pulls | jq .[0] | jq -r .head.sha)
|
||||
USER_NAME=$(curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/solana-labs/solana/pulls | jq .[0] | jq -r .head.user.login)
|
||||
echo "github.com/$USER_NAME/solana/commit/$HEAD_SHA"
|
||||
|
||||
- name: Slack Notification1
|
||||
- name: Slack Notification4
|
||||
uses: rtCamp/action-slack-notify@master
|
||||
env:
|
||||
SLACK_MESSAGE: ${{ steps.tests2.outputs.stdout }}
|
||||
SLACK_TITLE: Vercel "Explorer" Preview Deployment Link
|
||||
SLACK_MESSAGE: ' Vercel Link: ${{ steps.test1.outputs.stdout }} PR Commit: ${{steps.test2.outputs.stdout}}'
|
||||
SLACK_TITLE: Vercel "Explorer" Preview Deployment Link , PR Commit
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
|
6
.github/workflows/explorer_production.yml
vendored
6
.github/workflows/explorer_production.yml
vendored
@@ -30,7 +30,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: explorer
|
||||
working-directory: explorer
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
@@ -38,9 +39,8 @@ jobs:
|
||||
- uses: amondnet/vercel-action@v20
|
||||
with:
|
||||
vercel-token: ${{ secrets.VERCEL_TOKEN }} # Required
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }} #Optional
|
||||
github-token: ${{ secrets.PAT }} #Optional
|
||||
vercel-args: '--prod' #for production
|
||||
vercel-org-id: ${{ secrets.ORG_ID}} #Required
|
||||
vercel-project-id: ${{ secrets.PROJECT_ID}} #Required
|
||||
working-directory: ./explorer
|
||||
scope: ${{ secrets.TEAM_ID }}
|
||||
|
1445
Cargo.lock
generated
1445
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
46
Cargo.toml
46
Cargo.toml
@@ -1,29 +1,30 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"account-decoder",
|
||||
"accounts-bench",
|
||||
"accounts-cluster-bench",
|
||||
"accountsdb-plugin-interface",
|
||||
"accountsdb-plugin-manager",
|
||||
"accounts-cluster-bench",
|
||||
"bench-streamer",
|
||||
"bench-tps",
|
||||
"accounts-bench",
|
||||
"banking-bench",
|
||||
"banks-client",
|
||||
"banks-interface",
|
||||
"banks-server",
|
||||
"bucket_map",
|
||||
"bench-streamer",
|
||||
"bench-tps",
|
||||
"bloom",
|
||||
"bucket_map",
|
||||
"clap-utils",
|
||||
"cli",
|
||||
"cli-config",
|
||||
"cli-output",
|
||||
"client",
|
||||
"client-test",
|
||||
"core",
|
||||
"dos",
|
||||
"download-utils",
|
||||
"entry",
|
||||
"faucet",
|
||||
"frozen-abi",
|
||||
"perf",
|
||||
"validator",
|
||||
"genesis",
|
||||
"genesis-utils",
|
||||
"gossip",
|
||||
@@ -32,32 +33,36 @@ members = [
|
||||
"ledger",
|
||||
"ledger-tool",
|
||||
"local-cluster",
|
||||
"logger",
|
||||
"log-analyzer",
|
||||
"logger",
|
||||
"measure",
|
||||
"merkle-root-bench",
|
||||
"merkle-tree",
|
||||
"storage-bigtable",
|
||||
"storage-proto",
|
||||
"streamer",
|
||||
"measure",
|
||||
"metrics",
|
||||
"net-shaper",
|
||||
"net-utils",
|
||||
"notifier",
|
||||
"perf",
|
||||
"poh",
|
||||
"poh-bench",
|
||||
"program-test",
|
||||
"programs/address-lookup-table",
|
||||
"programs/address-lookup-table-tests",
|
||||
"programs/ed25519-tests",
|
||||
"programs/bpf_loader",
|
||||
"programs/bpf_loader/gen-syscall-list",
|
||||
"programs/compute-budget",
|
||||
"programs/config",
|
||||
"programs/ed25519-tests",
|
||||
"programs/stake",
|
||||
"programs/vote",
|
||||
"programs/zk-token-proof",
|
||||
"rayon-threadlimit",
|
||||
"rbpf-cli",
|
||||
"remote-wallet",
|
||||
"replica-lib",
|
||||
"replica-node",
|
||||
"rpc",
|
||||
"rpc-test",
|
||||
"runtime",
|
||||
"runtime/store-tool",
|
||||
"sdk",
|
||||
@@ -65,24 +70,19 @@ members = [
|
||||
"sdk/cargo-test-bpf",
|
||||
"send-transaction-service",
|
||||
"stake-accounts",
|
||||
"storage-bigtable",
|
||||
"storage-proto",
|
||||
"streamer",
|
||||
"sys-tuner",
|
||||
"test-validator",
|
||||
"tokens",
|
||||
"transaction-dos",
|
||||
"transaction-status",
|
||||
"account-decoder",
|
||||
"upload-perf",
|
||||
"net-utils",
|
||||
"validator",
|
||||
"version",
|
||||
"cli",
|
||||
"rayon-threadlimit",
|
||||
"watchtower",
|
||||
"replica-node",
|
||||
"replica-lib",
|
||||
"test-validator",
|
||||
"rpc-test",
|
||||
"client-test",
|
||||
"zk-token-sdk",
|
||||
"programs/zk-token-proof",
|
||||
]
|
||||
|
||||
exclude = [
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-account-decoder"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana account decoder"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -10,21 +10,21 @@ license = "Apache-2.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.12.3"
|
||||
Inflector = "0.11.4"
|
||||
base64 = "0.13.0"
|
||||
bincode = "1.3.3"
|
||||
bs58 = "0.4.0"
|
||||
bv = "0.11.1"
|
||||
Inflector = "0.11.4"
|
||||
lazy_static = "1.4.0"
|
||||
serde = "1.0.136"
|
||||
serde_derive = "1.0.103"
|
||||
serde_json = "1.0.78"
|
||||
solana-config-program = { path = "../programs/config", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.0" }
|
||||
serde_json = "1.0.79"
|
||||
solana-config-program = { path = "../programs/config", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.2" }
|
||||
spl-token = { version = "=3.2.0", features = ["no-entrypoint"] }
|
||||
thiserror = "1.0"
|
||||
zstd = "0.10.0"
|
||||
zstd = "0.11.0"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -33,7 +33,7 @@ pub type StringDecimals = String;
|
||||
pub const MAX_BASE58_BYTES: usize = 128;
|
||||
|
||||
/// A duplicate representation of an Account for pretty JSON serialization
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UiAccount {
|
||||
pub lamports: u64,
|
||||
|
@@ -2,21 +2,21 @@
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-accounts-bench"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
clap = "2.33.1"
|
||||
log = "0.4.14"
|
||||
rayon = "1.5.1"
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
clap = "2.33.1"
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -2,7 +2,7 @@
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-accounts-cluster-bench"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
@@ -13,25 +13,25 @@ clap = "2.33.1"
|
||||
log = "0.4.14"
|
||||
rand = "0.7.0"
|
||||
rayon = "1.5.1"
|
||||
solana-account-decoder = { path = "../account-decoder", version = "=1.10.0" }
|
||||
solana-clap-utils = { path = "../clap-utils", version = "=1.10.0" }
|
||||
solana-client = { path = "../client", version = "=1.10.0" }
|
||||
solana-faucet = { path = "../faucet", version = "=1.10.0" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.0" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.0" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
solana-account-decoder = { path = "../account-decoder", version = "=1.10.2" }
|
||||
solana-clap-utils = { path = "../clap-utils", version = "=1.10.2" }
|
||||
solana-client = { path = "../client", version = "=1.10.2" }
|
||||
solana-faucet = { path = "../faucet", version = "=1.10.2" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.2" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.2" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.2" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
spl-token = { version = "=3.2.0", features = ["no-entrypoint"] }
|
||||
|
||||
[dev-dependencies]
|
||||
solana-core = { path = "../core", version = "=1.10.0" }
|
||||
solana-local-cluster = { path = "../local-cluster", version = "=1.10.0" }
|
||||
solana-test-validator = { path = "../test-validator", version = "=1.10.0" }
|
||||
solana-core = { path = "../core", version = "=1.10.2" }
|
||||
solana-local-cluster = { path = "../local-cluster", version = "=1.10.2" }
|
||||
solana-test-validator = { path = "../test-validator", version = "=1.10.2" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -3,7 +3,7 @@ authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-accountsdb-plugin-interface"
|
||||
description = "The Solana AccountsDb plugin interface."
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
@@ -11,9 +11,9 @@ documentation = "https://docs.rs/solana-accountsdb-plugin-interface"
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.11"
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.2" }
|
||||
thiserror = "1.0.30"
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.0" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -9,9 +9,7 @@ use {
|
||||
thiserror::Error,
|
||||
};
|
||||
|
||||
impl Eq for ReplicaAccountInfo<'_> {}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
/// Information about an account being updated
|
||||
pub struct ReplicaAccountInfo<'a> {
|
||||
/// The Pubkey for the account
|
||||
@@ -112,7 +110,7 @@ pub enum AccountsDbPluginError {
|
||||
}
|
||||
|
||||
/// The current status of a slot
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum SlotStatus {
|
||||
/// The highest slot of the heaviest fork processed by the node. Ledger state at this slot is
|
||||
/// not derived from a confirmed or finalized block, but if multiple forks are present, is from
|
||||
|
@@ -3,7 +3,7 @@ authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-accountsdb-plugin-manager"
|
||||
description = "The Solana AccountsDb plugin manager."
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
@@ -15,14 +15,14 @@ crossbeam-channel = "0.5"
|
||||
json5 = "0.4.1"
|
||||
libloading = "0.7.3"
|
||||
log = "0.4.11"
|
||||
serde_json = "1.0.78"
|
||||
solana-accountsdb-plugin-interface = { path = "../accountsdb-plugin-interface", version = "=1.10.0" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.0" }
|
||||
solana-rpc = { path = "../rpc", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.0" }
|
||||
serde_json = "1.0.79"
|
||||
solana-accountsdb-plugin-interface = { path = "../accountsdb-plugin-interface", version = "=1.10.2" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.2" }
|
||||
solana-rpc = { path = "../rpc", version = "=1.10.2" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.2" }
|
||||
thiserror = "1.0.30"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
|
@@ -52,7 +52,7 @@ impl SlotStatusNotifierImpl {
|
||||
|
||||
for plugin in plugin_manager.plugins.iter_mut() {
|
||||
let mut measure = Measure::start("accountsdb-plugin-update-slot");
|
||||
match plugin.update_slot_status(slot, parent, slot_status.clone()) {
|
||||
match plugin.update_slot_status(slot, parent, slot_status) {
|
||||
Err(err) => {
|
||||
error!(
|
||||
"Failed to update slot status at slot {}, error: {} to plugin {}",
|
||||
|
@@ -2,7 +2,7 @@
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-banking-bench"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
@@ -14,17 +14,17 @@ crossbeam-channel = "0.5"
|
||||
log = "0.4.14"
|
||||
rand = "0.7.0"
|
||||
rayon = "1.5.1"
|
||||
solana-core = { path = "../core", version = "=1.10.0" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.0" }
|
||||
solana-ledger = { path = "../ledger", version = "=1.10.0" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.0" }
|
||||
solana-poh = { path = "../poh", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
solana-core = { path = "../core", version = "=1.10.2" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.2" }
|
||||
solana-ledger = { path = "../ledger", version = "=1.10.2" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.2" }
|
||||
solana-poh = { path = "../poh", version = "=1.10.2" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-banks-client"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana banks client"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -12,17 +12,17 @@ edition = "2021"
|
||||
[dependencies]
|
||||
borsh = "0.9.3"
|
||||
futures = "0.3"
|
||||
solana-banks-interface = { path = "../banks-interface", version = "=1.10.0" }
|
||||
solana-program = { path = "../sdk/program", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-banks-interface = { path = "../banks-interface", version = "=1.10.2" }
|
||||
solana-program = { path = "../sdk/program", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
tarpc = { version = "0.27.2", features = ["full"] }
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-serde = { version = "0.8", features = ["bincode"] }
|
||||
|
||||
[dev-dependencies]
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-banks-server = { path = "../banks-server", version = "=1.10.0" }
|
||||
solana-banks-server = { path = "../banks-server", version = "=1.10.2" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
|
||||
[lib]
|
||||
crate-type = ["lib"]
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-banks-interface"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana banks RPC interface"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -11,7 +11,7 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
tarpc = { version = "0.27.2", features = ["full"] }
|
||||
|
||||
[lib]
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-banks-server"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana banks server"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -13,10 +13,10 @@ edition = "2021"
|
||||
bincode = "1.3.3"
|
||||
crossbeam-channel = "0.5"
|
||||
futures = "0.3"
|
||||
solana-banks-interface = { path = "../banks-interface", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-send-transaction-service = { path = "../send-transaction-service", version = "=1.10.0" }
|
||||
solana-banks-interface = { path = "../banks-interface", version = "=1.10.2" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-send-transaction-service = { path = "../send-transaction-service", version = "=1.10.2" }
|
||||
tarpc = { version = "0.27.2", features = ["full"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-serde = { version = "0.8", features = ["bincode"] }
|
||||
|
@@ -2,18 +2,18 @@
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-bench-streamer"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
crossbeam-channel = "0.5"
|
||||
clap = "2.33.1"
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.0" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
crossbeam-channel = "0.5"
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.2" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -2,7 +2,7 @@
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-bench-tps"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
@@ -13,25 +13,25 @@ clap = "2.33.1"
|
||||
crossbeam-channel = "0.5"
|
||||
log = "0.4.14"
|
||||
rayon = "1.5.1"
|
||||
serde_json = "1.0.78"
|
||||
serde_json = "1.0.79"
|
||||
serde_yaml = "0.8.23"
|
||||
solana-core = { path = "../core", version = "=1.10.0" }
|
||||
solana-genesis = { path = "../genesis", version = "=1.10.0" }
|
||||
solana-client = { path = "../client", version = "=1.10.0" }
|
||||
solana-faucet = { path = "../faucet", version = "=1.10.0" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.0" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.0" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
solana-client = { path = "../client", version = "=1.10.2" }
|
||||
solana-core = { path = "../core", version = "=1.10.2" }
|
||||
solana-faucet = { path = "../faucet", version = "=1.10.2" }
|
||||
solana-genesis = { path = "../genesis", version = "=1.10.2" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.2" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.2" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.2" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
|
||||
[dev-dependencies]
|
||||
serial_test = "0.5.1"
|
||||
solana-local-cluster = { path = "../local-cluster", version = "=1.10.0" }
|
||||
serial_test = "0.6.0"
|
||||
solana-local-cluster = { path = "../local-cluster", version = "=1.10.2" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -475,6 +475,7 @@ fn do_tx_transfers<T: Client>(
|
||||
let tx_len = txs0.len();
|
||||
let transfer_start = Instant::now();
|
||||
let mut old_transactions = false;
|
||||
let mut transactions = Vec::<_>::new();
|
||||
for tx in txs0 {
|
||||
let now = timestamp();
|
||||
// Transactions that are too old will be rejected by the cluster Don't bother
|
||||
@@ -483,10 +484,13 @@ fn do_tx_transfers<T: Client>(
|
||||
old_transactions = true;
|
||||
continue;
|
||||
}
|
||||
client
|
||||
.async_send_transaction(tx.0)
|
||||
.expect("async_send_transaction in do_tx_transfers");
|
||||
transactions.push(tx.0);
|
||||
}
|
||||
|
||||
if let Err(error) = client.async_send_batch(transactions) {
|
||||
warn!("send_batch_sync in do_tx_transfers failed: {}", error);
|
||||
}
|
||||
|
||||
if old_transactions {
|
||||
let mut shared_txs_wl = shared_txs.write().expect("write lock in do_tx_transfers");
|
||||
shared_txs_wl.clear();
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-bloom"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana bloom filter"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -12,14 +12,14 @@ edition = "2021"
|
||||
[dependencies]
|
||||
bv = { version = "0.11.1", features = ["serde"] }
|
||||
fnv = "1.0.7"
|
||||
rand = "0.7.0"
|
||||
serde = { version = "1.0.136", features = ["rc"] }
|
||||
rayon = "1.5.1"
|
||||
serde_derive = "1.0.103"
|
||||
solana-frozen-abi = { path = "../frozen-abi", version = "=1.10.0" }
|
||||
solana-frozen-abi-macro = { path = "../frozen-abi/macro", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
log = "0.4.14"
|
||||
rand = "0.7.0"
|
||||
rayon = "1.5.1"
|
||||
serde = { version = "1.0.136", features = ["rc"] }
|
||||
serde_derive = "1.0.103"
|
||||
solana-frozen-abi = { path = "../frozen-abi", version = "=1.10.2" }
|
||||
solana-frozen-abi-macro = { path = "../frozen-abi/macro", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
|
||||
[lib]
|
||||
crate-type = ["lib"]
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-bucket-map"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "solana-bucket-map"
|
||||
homepage = "https://solana.com/"
|
||||
documentation = "https://docs.rs/solana-bucket-map"
|
||||
@@ -11,18 +11,18 @@ license = "Apache-2.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
memmap2 = "0.5.2"
|
||||
log = { version = "0.4.11" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
rand = "0.7.0"
|
||||
tempfile = "3.3.0"
|
||||
memmap2 = "0.5.3"
|
||||
modular-bitfield = "0.11.2"
|
||||
rand = "0.7.0"
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
tempfile = "3.3.0"
|
||||
|
||||
[dev-dependencies]
|
||||
fs_extra = "1.2.0"
|
||||
rayon = "1.5.0"
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
|
||||
[lib]
|
||||
crate-type = ["lib"]
|
||||
|
@@ -102,6 +102,8 @@ command_step() {
|
||||
command: "$2"
|
||||
timeout_in_minutes: $3
|
||||
artifact_paths: "log-*.txt"
|
||||
agents:
|
||||
- "queue=solana"
|
||||
EOF
|
||||
}
|
||||
|
||||
@@ -168,7 +170,7 @@ all_test_steps() {
|
||||
timeout_in_minutes: 20
|
||||
artifact_paths: "bpf-dumps.tar.bz2"
|
||||
agents:
|
||||
- "queue=default"
|
||||
- "queue=solana"
|
||||
EOF
|
||||
else
|
||||
annotate --style info \
|
||||
@@ -221,6 +223,8 @@ EOF
|
||||
- command: "scripts/build-downstream-projects.sh"
|
||||
name: "downstream-projects"
|
||||
timeout_in_minutes: 30
|
||||
agents:
|
||||
- "queue=solana"
|
||||
EOF
|
||||
else
|
||||
annotate --style info \
|
||||
@@ -246,6 +250,8 @@ EOF
|
||||
- command: "scripts/build-downstream-anchor-projects.sh"
|
||||
name: "downstream-anchor-projects"
|
||||
timeout_in_minutes: 10
|
||||
agents:
|
||||
- "queue=solana"
|
||||
EOF
|
||||
else
|
||||
annotate --style info \
|
||||
|
@@ -8,11 +8,6 @@ src_root="$(readlink -f "${here}/..")"
|
||||
cd "${src_root}"
|
||||
|
||||
cargo_audit_ignores=(
|
||||
# failure is officially deprecated/unmaintained
|
||||
#
|
||||
# Blocked on multiple upstream crates removing their `failure` dependency.
|
||||
--ignore RUSTSEC-2020-0036
|
||||
|
||||
# `net2` crate has been deprecated; use `socket2` instead
|
||||
#
|
||||
# Blocked on https://github.com/paritytech/jsonrpc/issues/575
|
||||
@@ -30,22 +25,10 @@ cargo_audit_ignores=(
|
||||
|
||||
# generic-array: arr! macro erases lifetimes
|
||||
#
|
||||
# Blocked on libsecp256k1 releasing with upgraded dependencies
|
||||
# https://github.com/paritytech/libsecp256k1/issues/66
|
||||
# Blocked on new spl dependencies on solana-program v1.9
|
||||
# due to curve25519-dalek dependency
|
||||
--ignore RUSTSEC-2020-0146
|
||||
|
||||
# hyper: Lenient `hyper` header parsing of `Content-Length` could allow request smuggling
|
||||
#
|
||||
# Blocked on jsonrpc removing dependency on unmaintained `websocket`
|
||||
# https://github.com/paritytech/jsonrpc/issues/605
|
||||
--ignore RUSTSEC-2021-0078
|
||||
|
||||
# hyper: Integer overflow in `hyper`'s parsing of the `Transfer-Encoding` header leads to data loss
|
||||
#
|
||||
# Blocked on jsonrpc removing dependency on unmaintained `websocket`
|
||||
# https://github.com/paritytech/jsonrpc/issues/605
|
||||
--ignore RUSTSEC-2021-0079
|
||||
|
||||
# chrono: Potential segfault in `localtime_r` invocations
|
||||
#
|
||||
# Blocked due to no safe upgrade
|
||||
|
@@ -57,26 +57,20 @@ if [[ $CI_BASE_BRANCH = "$EDGE_CHANNEL" ]]; then
|
||||
exit "$check_status"
|
||||
fi
|
||||
|
||||
# Ensure nightly and --benches
|
||||
# Ensure nightly and --benches
|
||||
_ scripts/cargo-for-all-lock-files.sh nightly check --locked --all-targets
|
||||
else
|
||||
echo "Note: cargo-for-all-lock-files.sh skipped because $CI_BASE_BRANCH != $EDGE_CHANNEL"
|
||||
fi
|
||||
|
||||
_ ci/order-crates-for-publishing.py
|
||||
_ ci/order-crates-for-publishing.py
|
||||
|
||||
# -Z... is needed because of clippy bug: https://github.com/rust-lang/rust-clippy/issues/4612
|
||||
# run nightly clippy for `sdk/` as there's a moderate amount of nightly-only code there
|
||||
_ "$cargo" nightly clippy -Zunstable-options --workspace --all-targets -- --deny=warnings --deny=clippy::integer_arithmetic
|
||||
_ scripts/cargo-for-all-lock-files.sh -- nightly clippy -Zunstable-options --all-targets -- --deny=warnings --deny=clippy::integer_arithmetic
|
||||
|
||||
_ "$cargo" nightly fmt --all -- --check
|
||||
_ scripts/cargo-for-all-lock-files.sh -- nightly fmt --all -- --check
|
||||
|
||||
_ ci/do-audit.sh
|
||||
|
||||
{
|
||||
cd programs/bpf
|
||||
_ "$cargo" nightly clippy --all -- --deny=warnings --allow=clippy::missing_safety_doc
|
||||
_ "$cargo" nightly fmt --all -- --check
|
||||
}
|
||||
_ ci/do-audit.sh
|
||||
|
||||
echo --- ok
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-clap-utils"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana utilities for the clap"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -10,16 +10,16 @@ documentation = "https://docs.rs/solana-clap-utils"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4"
|
||||
clap = "2.33.0"
|
||||
rpassword = "5.0"
|
||||
solana-perf = { path = "../perf", version = "=1.10.0" }
|
||||
solana-remote-wallet = { path = "../remote-wallet", version = "=1.10.0", default-features = false}
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.2" }
|
||||
solana-remote-wallet = { path = "../remote-wallet", version = "=1.10.2", default-features = false }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
thiserror = "1.0.30"
|
||||
tiny-bip39 = "0.8.2"
|
||||
uriparse = "0.6.3"
|
||||
url = "2.2.2"
|
||||
chrono = "0.4"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.3.0"
|
||||
|
@@ -3,7 +3,7 @@ authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-cli-config"
|
||||
description = "Blockchain, Rebuilt for Scale"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
@@ -18,7 +18,7 @@ serde_yaml = "0.8.23"
|
||||
url = "2.2.2"
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = "1.0.53"
|
||||
anyhow = "1.0.56"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -3,29 +3,32 @@ authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-cli-output"
|
||||
description = "Blockchain, Rebuilt for Scale"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
documentation = "https://docs.rs/solana-cli-output"
|
||||
|
||||
[dependencies]
|
||||
Inflector = "0.11.4"
|
||||
base64 = "0.13.0"
|
||||
chrono = { version = "0.4.11", features = ["serde"] }
|
||||
clap = "2.33.0"
|
||||
console = "0.15.0"
|
||||
humantime = "2.0.1"
|
||||
Inflector = "0.11.4"
|
||||
indicatif = "0.16.2"
|
||||
serde = "1.0.136"
|
||||
serde_json = "1.0.78"
|
||||
solana-account-decoder = { path = "../account-decoder", version = "=1.10.0" }
|
||||
solana-clap-utils = { path = "../clap-utils", version = "=1.10.0" }
|
||||
solana-client = { path = "../client", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.0" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.0" }
|
||||
serde_json = "1.0.79"
|
||||
solana-account-decoder = { path = "../account-decoder", version = "=1.10.2" }
|
||||
solana-clap-utils = { path = "../clap-utils", version = "=1.10.2" }
|
||||
solana-client = { path = "../client", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.2" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.2" }
|
||||
spl-memo = { version = "=3.0.1", features = ["no-entrypoint"] }
|
||||
|
||||
[dev-dependencies]
|
||||
ed25519-dalek = "=1.0.1"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -2335,7 +2335,7 @@ impl fmt::Display for CliBlock {
|
||||
writeln_transaction(
|
||||
f,
|
||||
&transaction_with_meta.transaction.decode().unwrap(),
|
||||
&transaction_with_meta.meta,
|
||||
transaction_with_meta.meta.as_ref(),
|
||||
" ",
|
||||
None,
|
||||
None,
|
||||
@@ -2369,7 +2369,7 @@ impl fmt::Display for CliTransaction {
|
||||
writeln_transaction(
|
||||
f,
|
||||
&self.decoded_transaction,
|
||||
&self.meta,
|
||||
self.meta.as_ref(),
|
||||
&self.prefix,
|
||||
if !self.sigverify_status.is_empty() {
|
||||
Some(&self.sigverify_status)
|
||||
|
@@ -4,10 +4,17 @@ use {
|
||||
console::style,
|
||||
indicatif::{ProgressBar, ProgressStyle},
|
||||
solana_sdk::{
|
||||
clock::UnixTimestamp, hash::Hash, message::Message, native_token::lamports_to_sol,
|
||||
program_utils::limited_deserialize, pubkey::Pubkey, stake, transaction::Transaction,
|
||||
clock::UnixTimestamp,
|
||||
hash::Hash,
|
||||
instruction::CompiledInstruction,
|
||||
native_token::lamports_to_sol,
|
||||
program_utils::limited_deserialize,
|
||||
pubkey::Pubkey,
|
||||
signature::Signature,
|
||||
stake,
|
||||
transaction::{Transaction, TransactionError},
|
||||
},
|
||||
solana_transaction_status::UiTransactionStatusMeta,
|
||||
solana_transaction_status::{Rewards, UiTransactionStatusMeta},
|
||||
spl_memo::{id as spl_memo_id, v1::id as spl_memo_v1_id},
|
||||
std::{collections::HashMap, fmt, io},
|
||||
};
|
||||
@@ -131,22 +138,28 @@ pub fn println_signers(
|
||||
println!();
|
||||
}
|
||||
|
||||
fn format_account_mode(message: &Message, index: usize) -> String {
|
||||
struct CliAccountMeta {
|
||||
is_signer: bool,
|
||||
is_writable: bool,
|
||||
is_invoked: bool,
|
||||
}
|
||||
|
||||
fn format_account_mode(meta: CliAccountMeta) -> String {
|
||||
format!(
|
||||
"{}r{}{}", // accounts are always readable...
|
||||
if message.is_signer(index) {
|
||||
if meta.is_signer {
|
||||
"s" // stands for signer
|
||||
} else {
|
||||
"-"
|
||||
},
|
||||
if message.is_writable(index) {
|
||||
if meta.is_writable {
|
||||
"w" // comment for consistent rust fmt (no joking; lol)
|
||||
} else {
|
||||
"-"
|
||||
},
|
||||
// account may be executable on-chain while not being
|
||||
// designated as a program-id in the message
|
||||
if message.maybe_executable(index) {
|
||||
if meta.is_invoked {
|
||||
"x"
|
||||
} else {
|
||||
// programs to be executed via CPI cannot be identified as
|
||||
@@ -156,202 +169,66 @@ fn format_account_mode(message: &Message, index: usize) -> String {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn write_transaction<W: io::Write>(
|
||||
fn write_transaction<W: io::Write>(
|
||||
w: &mut W,
|
||||
transaction: &Transaction,
|
||||
transaction_status: &Option<UiTransactionStatusMeta>,
|
||||
transaction_status: Option<&UiTransactionStatusMeta>,
|
||||
prefix: &str,
|
||||
sigverify_status: Option<&[CliSignatureVerificationStatus]>,
|
||||
block_time: Option<UnixTimestamp>,
|
||||
timezone: CliTimezone,
|
||||
) -> io::Result<()> {
|
||||
write_block_time(w, block_time, timezone, prefix)?;
|
||||
|
||||
let message = &transaction.message;
|
||||
if let Some(block_time) = block_time {
|
||||
writeln!(
|
||||
w,
|
||||
"{}Block Time: {:?}",
|
||||
prefix,
|
||||
Local.timestamp(block_time, 0)
|
||||
)?;
|
||||
}
|
||||
writeln!(
|
||||
w,
|
||||
"{}Recent Blockhash: {:?}",
|
||||
prefix, message.recent_blockhash
|
||||
)?;
|
||||
let sigverify_statuses = if let Some(sigverify_status) = sigverify_status {
|
||||
sigverify_status
|
||||
.iter()
|
||||
.map(|s| format!(" ({})", s))
|
||||
.collect()
|
||||
} else {
|
||||
vec!["".to_string(); transaction.signatures.len()]
|
||||
};
|
||||
for (signature_index, (signature, sigverify_status)) in transaction
|
||||
.signatures
|
||||
.iter()
|
||||
.zip(&sigverify_statuses)
|
||||
.enumerate()
|
||||
{
|
||||
writeln!(
|
||||
w,
|
||||
"{}Signature {}: {:?}{}",
|
||||
prefix, signature_index, signature, sigverify_status,
|
||||
)?;
|
||||
}
|
||||
write_recent_blockhash(w, &message.recent_blockhash, prefix)?;
|
||||
write_signatures(w, &transaction.signatures, sigverify_status, prefix)?;
|
||||
|
||||
let mut fee_payer_index = None;
|
||||
for (account_index, account) in message.account_keys.iter().enumerate() {
|
||||
if fee_payer_index.is_none() && message.is_non_loader_key(account_index) {
|
||||
fee_payer_index = Some(account_index)
|
||||
}
|
||||
writeln!(
|
||||
|
||||
let account_meta = CliAccountMeta {
|
||||
is_signer: message.is_signer(account_index),
|
||||
is_writable: message.is_writable(account_index),
|
||||
is_invoked: message.maybe_executable(account_index),
|
||||
};
|
||||
|
||||
write_account(
|
||||
w,
|
||||
"{}Account {}: {} {}{}",
|
||||
prefix,
|
||||
account_index,
|
||||
format_account_mode(message, account_index),
|
||||
account,
|
||||
if Some(account_index) == fee_payer_index {
|
||||
" (fee payer)"
|
||||
} else {
|
||||
""
|
||||
},
|
||||
format_account_mode(account_meta),
|
||||
Some(account_index) == fee_payer_index,
|
||||
prefix,
|
||||
)?;
|
||||
}
|
||||
|
||||
for (instruction_index, instruction) in message.instructions.iter().enumerate() {
|
||||
let program_pubkey = message.account_keys[instruction.program_id_index as usize];
|
||||
writeln!(w, "{}Instruction {}", prefix, instruction_index)?;
|
||||
writeln!(
|
||||
let instruction_accounts = instruction.accounts.iter().map(|account_index| {
|
||||
let account_pubkey = &message.account_keys[*account_index as usize];
|
||||
(account_pubkey, *account_index)
|
||||
});
|
||||
|
||||
write_instruction(
|
||||
w,
|
||||
"{} Program: {} ({})",
|
||||
prefix, program_pubkey, instruction.program_id_index
|
||||
instruction_index,
|
||||
&program_pubkey,
|
||||
instruction,
|
||||
instruction_accounts,
|
||||
prefix,
|
||||
)?;
|
||||
for (account_index, account) in instruction.accounts.iter().enumerate() {
|
||||
let account_pubkey = message.account_keys[*account as usize];
|
||||
writeln!(
|
||||
w,
|
||||
"{} Account {}: {} ({})",
|
||||
prefix, account_index, account_pubkey, account
|
||||
)?;
|
||||
}
|
||||
|
||||
let mut raw = true;
|
||||
if program_pubkey == solana_vote_program::id() {
|
||||
if let Ok(vote_instruction) = limited_deserialize::<
|
||||
solana_vote_program::vote_instruction::VoteInstruction,
|
||||
>(&instruction.data)
|
||||
{
|
||||
writeln!(w, "{} {:?}", prefix, vote_instruction)?;
|
||||
raw = false;
|
||||
}
|
||||
} else if program_pubkey == stake::program::id() {
|
||||
if let Ok(stake_instruction) =
|
||||
limited_deserialize::<stake::instruction::StakeInstruction>(&instruction.data)
|
||||
{
|
||||
writeln!(w, "{} {:?}", prefix, stake_instruction)?;
|
||||
raw = false;
|
||||
}
|
||||
} else if program_pubkey == solana_sdk::system_program::id() {
|
||||
if let Ok(system_instruction) = limited_deserialize::<
|
||||
solana_sdk::system_instruction::SystemInstruction,
|
||||
>(&instruction.data)
|
||||
{
|
||||
writeln!(w, "{} {:?}", prefix, system_instruction)?;
|
||||
raw = false;
|
||||
}
|
||||
} else if is_memo_program(&program_pubkey) {
|
||||
if let Ok(s) = std::str::from_utf8(&instruction.data) {
|
||||
writeln!(w, "{} Data: \"{}\"", prefix, s)?;
|
||||
raw = false;
|
||||
}
|
||||
}
|
||||
|
||||
if raw {
|
||||
writeln!(w, "{} Data: {:?}", prefix, instruction.data)?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(transaction_status) = transaction_status {
|
||||
writeln!(
|
||||
w,
|
||||
"{}Status: {}",
|
||||
prefix,
|
||||
match &transaction_status.status {
|
||||
Ok(_) => "Ok".into(),
|
||||
Err(err) => err.to_string(),
|
||||
}
|
||||
)?;
|
||||
writeln!(
|
||||
w,
|
||||
"{} Fee: ◎{}",
|
||||
prefix,
|
||||
lamports_to_sol(transaction_status.fee)
|
||||
)?;
|
||||
assert_eq!(
|
||||
transaction_status.pre_balances.len(),
|
||||
transaction_status.post_balances.len()
|
||||
);
|
||||
for (i, (pre, post)) in transaction_status
|
||||
.pre_balances
|
||||
.iter()
|
||||
.zip(transaction_status.post_balances.iter())
|
||||
.enumerate()
|
||||
{
|
||||
if pre == post {
|
||||
writeln!(
|
||||
w,
|
||||
"{} Account {} balance: ◎{}",
|
||||
prefix,
|
||||
i,
|
||||
lamports_to_sol(*pre)
|
||||
)?;
|
||||
} else {
|
||||
writeln!(
|
||||
w,
|
||||
"{} Account {} balance: ◎{} -> ◎{}",
|
||||
prefix,
|
||||
i,
|
||||
lamports_to_sol(*pre),
|
||||
lamports_to_sol(*post)
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(log_messages) = &transaction_status.log_messages {
|
||||
if !log_messages.is_empty() {
|
||||
writeln!(w, "{}Log Messages:", prefix,)?;
|
||||
for log_message in log_messages {
|
||||
writeln!(w, "{} {}", prefix, log_message)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(rewards) = &transaction_status.rewards {
|
||||
if !rewards.is_empty() {
|
||||
writeln!(w, "{}Rewards:", prefix,)?;
|
||||
writeln!(
|
||||
w,
|
||||
"{} {:<44} {:^15} {:<15} {:<20}",
|
||||
prefix, "Address", "Type", "Amount", "New Balance"
|
||||
)?;
|
||||
for reward in rewards {
|
||||
let sign = if reward.lamports < 0 { "-" } else { "" };
|
||||
writeln!(
|
||||
w,
|
||||
"{} {:<44} {:^15} {}◎{:<14.9} ◎{:<18.9}",
|
||||
prefix,
|
||||
reward.pubkey,
|
||||
if let Some(reward_type) = reward.reward_type {
|
||||
format!("{}", reward_type)
|
||||
} else {
|
||||
"-".to_string()
|
||||
},
|
||||
sign,
|
||||
lamports_to_sol(reward.lamports.abs() as u64),
|
||||
lamports_to_sol(reward.post_balance)
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
write_status(w, &transaction_status.status, prefix)?;
|
||||
write_fees(w, transaction_status.fee, prefix)?;
|
||||
write_balances(w, transaction_status, prefix)?;
|
||||
write_log_messages(w, transaction_status.log_messages.as_ref(), prefix)?;
|
||||
write_rewards(w, transaction_status.rewards.as_ref(), prefix)?;
|
||||
} else {
|
||||
writeln!(w, "{}Status: Unavailable", prefix)?;
|
||||
}
|
||||
@@ -359,9 +236,252 @@ pub fn write_transaction<W: io::Write>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
enum CliTimezone {
|
||||
Local,
|
||||
#[allow(dead_code)]
|
||||
Utc,
|
||||
}
|
||||
|
||||
fn write_block_time<W: io::Write>(
|
||||
w: &mut W,
|
||||
block_time: Option<UnixTimestamp>,
|
||||
timezone: CliTimezone,
|
||||
prefix: &str,
|
||||
) -> io::Result<()> {
|
||||
if let Some(block_time) = block_time {
|
||||
let block_time_output = match timezone {
|
||||
CliTimezone::Local => format!("{:?}", Local.timestamp(block_time, 0)),
|
||||
CliTimezone::Utc => format!("{:?}", Utc.timestamp(block_time, 0)),
|
||||
};
|
||||
writeln!(w, "{}Block Time: {}", prefix, block_time_output,)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_recent_blockhash<W: io::Write>(
|
||||
w: &mut W,
|
||||
recent_blockhash: &Hash,
|
||||
prefix: &str,
|
||||
) -> io::Result<()> {
|
||||
writeln!(w, "{}Recent Blockhash: {:?}", prefix, recent_blockhash)
|
||||
}
|
||||
|
||||
fn write_signatures<W: io::Write>(
|
||||
w: &mut W,
|
||||
signatures: &[Signature],
|
||||
sigverify_status: Option<&[CliSignatureVerificationStatus]>,
|
||||
prefix: &str,
|
||||
) -> io::Result<()> {
|
||||
let sigverify_statuses = if let Some(sigverify_status) = sigverify_status {
|
||||
sigverify_status
|
||||
.iter()
|
||||
.map(|s| format!(" ({})", s))
|
||||
.collect()
|
||||
} else {
|
||||
vec!["".to_string(); signatures.len()]
|
||||
};
|
||||
for (signature_index, (signature, sigverify_status)) in
|
||||
signatures.iter().zip(&sigverify_statuses).enumerate()
|
||||
{
|
||||
writeln!(
|
||||
w,
|
||||
"{}Signature {}: {:?}{}",
|
||||
prefix, signature_index, signature, sigverify_status,
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_account<W: io::Write>(
|
||||
w: &mut W,
|
||||
account_index: usize,
|
||||
account_address: &Pubkey,
|
||||
account_mode: String,
|
||||
is_fee_payer: bool,
|
||||
prefix: &str,
|
||||
) -> io::Result<()> {
|
||||
writeln!(
|
||||
w,
|
||||
"{}Account {}: {} {}{}",
|
||||
prefix,
|
||||
account_index,
|
||||
account_mode,
|
||||
account_address,
|
||||
if is_fee_payer { " (fee payer)" } else { "" },
|
||||
)
|
||||
}
|
||||
|
||||
fn write_instruction<'a, W: io::Write>(
|
||||
w: &mut W,
|
||||
instruction_index: usize,
|
||||
program_pubkey: &Pubkey,
|
||||
instruction: &CompiledInstruction,
|
||||
instruction_accounts: impl Iterator<Item = (&'a Pubkey, u8)>,
|
||||
prefix: &str,
|
||||
) -> io::Result<()> {
|
||||
writeln!(w, "{}Instruction {}", prefix, instruction_index)?;
|
||||
writeln!(
|
||||
w,
|
||||
"{} Program: {} ({})",
|
||||
prefix, program_pubkey, instruction.program_id_index
|
||||
)?;
|
||||
for (index, (account_address, account_index)) in instruction_accounts.enumerate() {
|
||||
writeln!(
|
||||
w,
|
||||
"{} Account {}: {} ({})",
|
||||
prefix, index, account_address, account_index
|
||||
)?;
|
||||
}
|
||||
|
||||
let mut raw = true;
|
||||
if program_pubkey == &solana_vote_program::id() {
|
||||
if let Ok(vote_instruction) = limited_deserialize::<
|
||||
solana_vote_program::vote_instruction::VoteInstruction,
|
||||
>(&instruction.data)
|
||||
{
|
||||
writeln!(w, "{} {:?}", prefix, vote_instruction)?;
|
||||
raw = false;
|
||||
}
|
||||
} else if program_pubkey == &stake::program::id() {
|
||||
if let Ok(stake_instruction) =
|
||||
limited_deserialize::<stake::instruction::StakeInstruction>(&instruction.data)
|
||||
{
|
||||
writeln!(w, "{} {:?}", prefix, stake_instruction)?;
|
||||
raw = false;
|
||||
}
|
||||
} else if program_pubkey == &solana_sdk::system_program::id() {
|
||||
if let Ok(system_instruction) = limited_deserialize::<
|
||||
solana_sdk::system_instruction::SystemInstruction,
|
||||
>(&instruction.data)
|
||||
{
|
||||
writeln!(w, "{} {:?}", prefix, system_instruction)?;
|
||||
raw = false;
|
||||
}
|
||||
} else if is_memo_program(program_pubkey) {
|
||||
if let Ok(s) = std::str::from_utf8(&instruction.data) {
|
||||
writeln!(w, "{} Data: \"{}\"", prefix, s)?;
|
||||
raw = false;
|
||||
}
|
||||
}
|
||||
|
||||
if raw {
|
||||
writeln!(w, "{} Data: {:?}", prefix, instruction.data)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_rewards<W: io::Write>(
|
||||
w: &mut W,
|
||||
rewards: Option<&Rewards>,
|
||||
prefix: &str,
|
||||
) -> io::Result<()> {
|
||||
if let Some(rewards) = rewards {
|
||||
if !rewards.is_empty() {
|
||||
writeln!(w, "{}Rewards:", prefix,)?;
|
||||
writeln!(
|
||||
w,
|
||||
"{} {:<44} {:^15} {:<16} {:<20}",
|
||||
prefix, "Address", "Type", "Amount", "New Balance"
|
||||
)?;
|
||||
for reward in rewards {
|
||||
let sign = if reward.lamports < 0 { "-" } else { "" };
|
||||
writeln!(
|
||||
w,
|
||||
"{} {:<44} {:^15} {}◎{:<14.9} ◎{:<18.9}",
|
||||
prefix,
|
||||
reward.pubkey,
|
||||
if let Some(reward_type) = reward.reward_type {
|
||||
format!("{}", reward_type)
|
||||
} else {
|
||||
"-".to_string()
|
||||
},
|
||||
sign,
|
||||
lamports_to_sol(reward.lamports.abs() as u64),
|
||||
lamports_to_sol(reward.post_balance)
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_status<W: io::Write>(
|
||||
w: &mut W,
|
||||
transaction_status: &Result<(), TransactionError>,
|
||||
prefix: &str,
|
||||
) -> io::Result<()> {
|
||||
writeln!(
|
||||
w,
|
||||
"{}Status: {}",
|
||||
prefix,
|
||||
match transaction_status {
|
||||
Ok(_) => "Ok".into(),
|
||||
Err(err) => err.to_string(),
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
fn write_fees<W: io::Write>(w: &mut W, transaction_fee: u64, prefix: &str) -> io::Result<()> {
|
||||
writeln!(w, "{} Fee: ◎{}", prefix, lamports_to_sol(transaction_fee))
|
||||
}
|
||||
|
||||
fn write_balances<W: io::Write>(
|
||||
w: &mut W,
|
||||
transaction_status: &UiTransactionStatusMeta,
|
||||
prefix: &str,
|
||||
) -> io::Result<()> {
|
||||
assert_eq!(
|
||||
transaction_status.pre_balances.len(),
|
||||
transaction_status.post_balances.len()
|
||||
);
|
||||
for (i, (pre, post)) in transaction_status
|
||||
.pre_balances
|
||||
.iter()
|
||||
.zip(transaction_status.post_balances.iter())
|
||||
.enumerate()
|
||||
{
|
||||
if pre == post {
|
||||
writeln!(
|
||||
w,
|
||||
"{} Account {} balance: ◎{}",
|
||||
prefix,
|
||||
i,
|
||||
lamports_to_sol(*pre)
|
||||
)?;
|
||||
} else {
|
||||
writeln!(
|
||||
w,
|
||||
"{} Account {} balance: ◎{} -> ◎{}",
|
||||
prefix,
|
||||
i,
|
||||
lamports_to_sol(*pre),
|
||||
lamports_to_sol(*post)
|
||||
)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_log_messages<W: io::Write>(
|
||||
w: &mut W,
|
||||
log_messages: Option<&Vec<String>>,
|
||||
prefix: &str,
|
||||
) -> io::Result<()> {
|
||||
if let Some(log_messages) = log_messages {
|
||||
if !log_messages.is_empty() {
|
||||
writeln!(w, "{}Log Messages:", prefix,)?;
|
||||
for log_message in log_messages {
|
||||
writeln!(w, "{} {}", prefix, log_message)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn println_transaction(
|
||||
transaction: &Transaction,
|
||||
transaction_status: &Option<UiTransactionStatusMeta>,
|
||||
transaction_status: Option<&UiTransactionStatusMeta>,
|
||||
prefix: &str,
|
||||
sigverify_status: Option<&[CliSignatureVerificationStatus]>,
|
||||
block_time: Option<UnixTimestamp>,
|
||||
@@ -374,6 +494,7 @@ pub fn println_transaction(
|
||||
prefix,
|
||||
sigverify_status,
|
||||
block_time,
|
||||
CliTimezone::Local,
|
||||
)
|
||||
.is_ok()
|
||||
{
|
||||
@@ -386,22 +507,23 @@ pub fn println_transaction(
|
||||
pub fn writeln_transaction(
|
||||
f: &mut dyn fmt::Write,
|
||||
transaction: &Transaction,
|
||||
transaction_status: &Option<UiTransactionStatusMeta>,
|
||||
transaction_status: Option<&UiTransactionStatusMeta>,
|
||||
prefix: &str,
|
||||
sigverify_status: Option<&[CliSignatureVerificationStatus]>,
|
||||
block_time: Option<UnixTimestamp>,
|
||||
) -> fmt::Result {
|
||||
let mut w = Vec::new();
|
||||
if write_transaction(
|
||||
let write_result = write_transaction(
|
||||
&mut w,
|
||||
transaction,
|
||||
transaction_status,
|
||||
prefix,
|
||||
sigverify_status,
|
||||
block_time,
|
||||
)
|
||||
.is_ok()
|
||||
{
|
||||
CliTimezone::Local,
|
||||
);
|
||||
|
||||
if write_result.is_ok() {
|
||||
if let Ok(s) = String::from_utf8(w) {
|
||||
write!(f, "{}", s)?;
|
||||
}
|
||||
@@ -427,7 +549,102 @@ pub fn unix_timestamp_to_string(unix_timestamp: UnixTimestamp) -> String {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use {super::*, solana_sdk::pubkey::Pubkey};
|
||||
use {
|
||||
super::*,
|
||||
solana_sdk::{
|
||||
message::{v0::LoadedAddresses, Message as LegacyMessage, MessageHeader},
|
||||
pubkey::Pubkey,
|
||||
signature::{Keypair, Signer},
|
||||
},
|
||||
solana_transaction_status::{Reward, RewardType, TransactionStatusMeta},
|
||||
std::io::BufWriter,
|
||||
};
|
||||
|
||||
fn test_keypair() -> Keypair {
|
||||
let secret = ed25519_dalek::SecretKey::from_bytes(&[0u8; 32]).unwrap();
|
||||
let public = ed25519_dalek::PublicKey::from(&secret);
|
||||
let keypair = ed25519_dalek::Keypair { secret, public };
|
||||
Keypair::from_bytes(&keypair.to_bytes()).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_transaction() {
|
||||
let keypair = test_keypair();
|
||||
let account_key = Pubkey::new_from_array([1u8; 32]);
|
||||
let transaction = Transaction::new(
|
||||
&[&keypair],
|
||||
LegacyMessage {
|
||||
header: MessageHeader {
|
||||
num_required_signatures: 1,
|
||||
num_readonly_signed_accounts: 0,
|
||||
num_readonly_unsigned_accounts: 1,
|
||||
},
|
||||
recent_blockhash: Hash::default(),
|
||||
account_keys: vec![keypair.pubkey(), account_key],
|
||||
instructions: vec![CompiledInstruction::new_from_raw_parts(1, vec![], vec![0])],
|
||||
},
|
||||
Hash::default(),
|
||||
);
|
||||
|
||||
let sigverify_status = CliSignatureVerificationStatus::verify_transaction(&transaction);
|
||||
let meta = TransactionStatusMeta {
|
||||
status: Ok(()),
|
||||
fee: 5000,
|
||||
pre_balances: vec![5000, 10_000],
|
||||
post_balances: vec![0, 9_900],
|
||||
inner_instructions: None,
|
||||
log_messages: Some(vec!["Test message".to_string()]),
|
||||
pre_token_balances: None,
|
||||
post_token_balances: None,
|
||||
rewards: Some(vec![Reward {
|
||||
pubkey: account_key.to_string(),
|
||||
lamports: -100,
|
||||
post_balance: 9_900,
|
||||
reward_type: Some(RewardType::Rent),
|
||||
commission: None,
|
||||
}]),
|
||||
loaded_addresses: LoadedAddresses::default(),
|
||||
};
|
||||
|
||||
let output = {
|
||||
let mut write_buffer = BufWriter::new(Vec::new());
|
||||
write_transaction(
|
||||
&mut write_buffer,
|
||||
&transaction,
|
||||
Some(&meta.into()),
|
||||
"",
|
||||
Some(&sigverify_status),
|
||||
Some(1628633791),
|
||||
CliTimezone::Utc,
|
||||
)
|
||||
.unwrap();
|
||||
let bytes = write_buffer.into_inner().unwrap();
|
||||
String::from_utf8(bytes).unwrap()
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
output,
|
||||
r#"Block Time: 2021-08-10T22:16:31Z
|
||||
Recent Blockhash: 11111111111111111111111111111111
|
||||
Signature 0: 5pkjrE4VBa3Bu9CMKXgh1U345cT1gGo8QBVRTzHAo6gHeiPae5BTbShP15g6NgqRMNqu8Qrhph1ATmrfC1Ley3rx (pass)
|
||||
Account 0: srw- 4zvwRjXUKGfvwnParsHAS3HuSVzV5cA4McphgmoCtajS (fee payer)
|
||||
Account 1: -r-x 4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi
|
||||
Instruction 0
|
||||
Program: 4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi (1)
|
||||
Account 0: 4zvwRjXUKGfvwnParsHAS3HuSVzV5cA4McphgmoCtajS (0)
|
||||
Data: []
|
||||
Status: Ok
|
||||
Fee: ◎0.000005
|
||||
Account 0 balance: ◎0.000005 -> ◎0
|
||||
Account 1 balance: ◎0.00001 -> ◎0.0000099
|
||||
Log Messages:
|
||||
Test message
|
||||
Rewards:
|
||||
Address Type Amount New Balance \0
|
||||
4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi rent -◎0.000000100 ◎0.000009900 \0
|
||||
"#.replace("\\0", "") // replace marker used to subvert trailing whitespace linter on CI
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_labeled_address() {
|
||||
|
@@ -3,7 +3,7 @@ authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-cli"
|
||||
description = "Blockchain, Rebuilt for Scale"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
@@ -13,43 +13,43 @@ documentation = "https://docs.rs/solana-cli"
|
||||
bincode = "1.3.3"
|
||||
bs58 = "0.4.0"
|
||||
clap = "2.33.1"
|
||||
criterion-stats = "0.3.0"
|
||||
ctrlc = { version = "3.2.1", features = ["termination"] }
|
||||
console = "0.15.0"
|
||||
const_format = "0.2.22"
|
||||
criterion-stats = "0.3.0"
|
||||
crossbeam-channel = "0.5"
|
||||
log = "0.4.14"
|
||||
ctrlc = { version = "3.2.1", features = ["termination"] }
|
||||
humantime = "2.0.1"
|
||||
log = "0.4.14"
|
||||
num-traits = "0.2"
|
||||
pretty-hex = "0.2.1"
|
||||
reqwest = { version = "0.11.9", default-features = false, features = ["blocking", "rustls-tls", "json"] }
|
||||
semver = "1.0.5"
|
||||
semver = "1.0.6"
|
||||
serde = "1.0.136"
|
||||
serde_derive = "1.0.103"
|
||||
serde_json = "1.0.78"
|
||||
solana-account-decoder = { path = "../account-decoder", version = "=1.10.0" }
|
||||
solana-bpf-loader-program = { path = "../programs/bpf_loader", version = "=1.10.0" }
|
||||
solana-clap-utils = { path = "../clap-utils", version = "=1.10.0" }
|
||||
solana-cli-config = { path = "../cli-config", version = "=1.10.0" }
|
||||
solana-cli-output = { path = "../cli-output", version = "=1.10.0" }
|
||||
solana-client = { path = "../client", version = "=1.10.0" }
|
||||
solana-config-program = { path = "../programs/config", version = "=1.10.0" }
|
||||
solana-faucet = { path = "../faucet", version = "=1.10.0" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-program-runtime = { path = "../program-runtime", version = "=1.10.0" }
|
||||
serde_json = "1.0.79"
|
||||
solana-account-decoder = { path = "../account-decoder", version = "=1.10.2" }
|
||||
solana-bpf-loader-program = { path = "../programs/bpf_loader", version = "=1.10.2" }
|
||||
solana-clap-utils = { path = "../clap-utils", version = "=1.10.2" }
|
||||
solana-cli-config = { path = "../cli-config", version = "=1.10.2" }
|
||||
solana-cli-output = { path = "../cli-output", version = "=1.10.2" }
|
||||
solana-client = { path = "../client", version = "=1.10.2" }
|
||||
solana-config-program = { path = "../programs/config", version = "=1.10.2" }
|
||||
solana-faucet = { path = "../faucet", version = "=1.10.2" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
solana-program-runtime = { path = "../program-runtime", version = "=1.10.2" }
|
||||
solana-remote-wallet = { path = "../remote-wallet", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.2" }
|
||||
solana_rbpf = "=0.2.24"
|
||||
solana-remote-wallet = { path = "../remote-wallet", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.0" }
|
||||
spl-memo = { version = "=3.0.1", features = ["no-entrypoint"] }
|
||||
thiserror = "1.0.30"
|
||||
tiny-bip39 = "0.8.2"
|
||||
|
||||
[dev-dependencies]
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.0" }
|
||||
solana-test-validator = { path = "../test-validator", version = "=1.10.0" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.2" }
|
||||
solana-test-validator = { path = "../test-validator", version = "=1.10.2" }
|
||||
tempfile = "3.3.0"
|
||||
|
||||
[[bin]]
|
||||
|
@@ -88,6 +88,7 @@ pub enum CliCommand {
|
||||
timeout: Duration,
|
||||
blockhash: Option<Hash>,
|
||||
print_timestamp: bool,
|
||||
additional_fee: Option<u32>,
|
||||
},
|
||||
Rent {
|
||||
data_length: usize,
|
||||
@@ -977,6 +978,7 @@ pub fn process_command(config: &CliConfig) -> ProcessResult {
|
||||
timeout,
|
||||
blockhash,
|
||||
print_timestamp,
|
||||
additional_fee,
|
||||
} => process_ping(
|
||||
&rpc_client,
|
||||
config,
|
||||
@@ -985,6 +987,7 @@ pub fn process_command(config: &CliConfig) -> ProcessResult {
|
||||
timeout,
|
||||
blockhash,
|
||||
*print_timestamp,
|
||||
additional_fee,
|
||||
),
|
||||
CliCommand::Rent {
|
||||
data_length,
|
||||
|
@@ -33,12 +33,14 @@ use {
|
||||
rpc_request::DELINQUENT_VALIDATOR_SLOT_DISTANCE,
|
||||
rpc_response::SlotInfo,
|
||||
},
|
||||
solana_program_runtime::compute_budget::ComputeBudget,
|
||||
solana_remote_wallet::remote_wallet::RemoteWalletManager,
|
||||
solana_sdk::{
|
||||
account::from_account,
|
||||
account_utils::StateMut,
|
||||
clock::{self, Clock, Slot},
|
||||
commitment_config::CommitmentConfig,
|
||||
compute_budget::ComputeBudgetInstruction,
|
||||
epoch_schedule::Epoch,
|
||||
hash::Hash,
|
||||
message::Message,
|
||||
@@ -269,6 +271,13 @@ impl ClusterQuerySubCommands for App<'_, '_> {
|
||||
.default_value("15")
|
||||
.help("Wait up to timeout seconds for transaction confirmation"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("additional_fee")
|
||||
.long("additional-fee")
|
||||
.value_name("NUMBER")
|
||||
.takes_value(true)
|
||||
.help("Request additional-fee for transaction"),
|
||||
)
|
||||
.arg(blockhash_arg()),
|
||||
)
|
||||
.subcommand(
|
||||
@@ -513,6 +522,7 @@ pub fn parse_cluster_ping(
|
||||
let timeout = Duration::from_secs(value_t_or_exit!(matches, "timeout", u64));
|
||||
let blockhash = value_of(matches, BLOCKHASH_ARG.name);
|
||||
let print_timestamp = matches.is_present("print_timestamp");
|
||||
let additional_fee = value_of(matches, "additional_fee");
|
||||
Ok(CliCommandInfo {
|
||||
command: CliCommand::Ping {
|
||||
interval,
|
||||
@@ -520,6 +530,7 @@ pub fn parse_cluster_ping(
|
||||
timeout,
|
||||
blockhash,
|
||||
print_timestamp,
|
||||
additional_fee,
|
||||
},
|
||||
signers: vec![default_signer.signer_from_path(matches, wallet_manager)?],
|
||||
})
|
||||
@@ -1350,6 +1361,7 @@ pub fn process_ping(
|
||||
timeout: &Duration,
|
||||
fixed_blockhash: &Option<Hash>,
|
||||
print_timestamp: bool,
|
||||
additional_fee: &Option<u32>,
|
||||
) -> ProcessResult {
|
||||
let (signal_sender, signal_receiver) = unbounded();
|
||||
ctrlc::set_handler(move || {
|
||||
@@ -1374,6 +1386,7 @@ pub fn process_ping(
|
||||
blockhash_from_cluster = true;
|
||||
}
|
||||
}
|
||||
|
||||
'mainloop: for seq in 0..count.unwrap_or(std::u64::MAX) {
|
||||
let now = Instant::now();
|
||||
if fixed_blockhash.is_none() && now.duration_since(blockhash_acquired).as_secs() > 60 {
|
||||
@@ -1388,8 +1401,18 @@ pub fn process_ping(
|
||||
lamports += 1;
|
||||
|
||||
let build_message = |lamports| {
|
||||
let ix = system_instruction::transfer(&config.signers[0].pubkey(), &to, lamports);
|
||||
Message::new(&[ix], Some(&config.signers[0].pubkey()))
|
||||
let mut ixs = vec![system_instruction::transfer(
|
||||
&config.signers[0].pubkey(),
|
||||
&to,
|
||||
lamports,
|
||||
)];
|
||||
if let Some(additional_fee) = additional_fee {
|
||||
ixs.push(ComputeBudgetInstruction::request_units(
|
||||
ComputeBudget::new(false).max_units as u32,
|
||||
*additional_fee,
|
||||
));
|
||||
}
|
||||
Message::new(&ixs, Some(&config.signers[0].pubkey()))
|
||||
};
|
||||
let (message, _) = resolve_spend_tx_and_check_account_balance(
|
||||
rpc_client,
|
||||
@@ -2019,6 +2042,7 @@ pub fn process_transaction_history(
|
||||
RpcTransactionConfig {
|
||||
encoding: Some(UiTransactionEncoding::Base64),
|
||||
commitment: Some(CommitmentConfig::confirmed()),
|
||||
max_supported_transaction_version: None,
|
||||
},
|
||||
) {
|
||||
Ok(confirmed_transaction) => {
|
||||
@@ -2028,7 +2052,7 @@ pub fn process_transaction_history(
|
||||
.transaction
|
||||
.decode()
|
||||
.expect("Successful decode"),
|
||||
&confirmed_transaction.transaction.meta,
|
||||
confirmed_transaction.transaction.meta.as_ref(),
|
||||
" ",
|
||||
None,
|
||||
None,
|
||||
@@ -2312,6 +2336,7 @@ mod tests {
|
||||
Hash::from_str("4CCNp28j6AhGq7PkjPDP4wbQWBS8LLbQin2xV5n8frKX").unwrap()
|
||||
),
|
||||
print_timestamp: true,
|
||||
additional_fee: None,
|
||||
},
|
||||
signers: vec![default_keypair.into()],
|
||||
}
|
||||
|
@@ -39,7 +39,9 @@ use {
|
||||
system_program,
|
||||
transaction::Transaction,
|
||||
},
|
||||
solana_transaction_status::{Encodable, EncodedTransaction, UiTransactionEncoding},
|
||||
solana_transaction_status::{
|
||||
Encodable, EncodedTransaction, TransactionBinaryEncoding, UiTransactionEncoding,
|
||||
},
|
||||
std::{fmt::Write as FmtWrite, fs::File, io::Write, sync::Arc},
|
||||
};
|
||||
|
||||
@@ -189,7 +191,7 @@ impl WalletSubCommands for App<'_, '_> {
|
||||
Arg::with_name("encoding")
|
||||
.index(2)
|
||||
.value_name("ENCODING")
|
||||
.possible_values(&["base58", "base64"]) // Subset of `UiTransactionEncoding` enum
|
||||
.possible_values(&["base58", "base64"]) // Variants of `TransactionBinaryEncoding` enum
|
||||
.default_value("base58")
|
||||
.takes_value(true)
|
||||
.required(true)
|
||||
@@ -341,13 +343,13 @@ pub fn parse_balance(
|
||||
|
||||
pub fn parse_decode_transaction(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {
|
||||
let blob = value_t_or_exit!(matches, "transaction", String);
|
||||
let encoding = match matches.value_of("encoding").unwrap() {
|
||||
"base58" => UiTransactionEncoding::Base58,
|
||||
"base64" => UiTransactionEncoding::Base64,
|
||||
let binary_encoding = match matches.value_of("encoding").unwrap() {
|
||||
"base58" => TransactionBinaryEncoding::Base58,
|
||||
"base64" => TransactionBinaryEncoding::Base64,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let encoded_transaction = EncodedTransaction::Binary(blob, encoding);
|
||||
let encoded_transaction = EncodedTransaction::Binary(blob, binary_encoding);
|
||||
if let Some(transaction) = encoded_transaction.decode() {
|
||||
Ok(CliCommandInfo {
|
||||
command: CliCommand::DecodeTransaction(transaction),
|
||||
@@ -559,6 +561,7 @@ pub fn process_confirm(
|
||||
RpcTransactionConfig {
|
||||
encoding: Some(UiTransactionEncoding::Base64),
|
||||
commitment: Some(CommitmentConfig::confirmed()),
|
||||
max_supported_transaction_version: None,
|
||||
},
|
||||
) {
|
||||
Ok(confirmed_transaction) => {
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-client-test"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana RPC Test"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -8,30 +8,31 @@ license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
documentation = "https://docs.rs/solana-client-test"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
futures-util = "0.3.19"
|
||||
serde_json = "1.0.78"
|
||||
serial_test = "0.5.1"
|
||||
solana-client = { path = "../client", version = "=1.10.0" }
|
||||
solana-ledger = { path = "../ledger", version = "=1.10.0" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
solana-merkle-tree = { path = "../merkle-tree", version = "=1.10.0" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.0" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.0" }
|
||||
solana-rayon-threadlimit = { path = "../rayon-threadlimit", version = "=1.10.0" }
|
||||
solana-rpc = { path = "../rpc", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.0" }
|
||||
solana-test-validator = { path = "../test-validator", version = "=1.10.0" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
futures-util = "0.3.21"
|
||||
serde_json = "1.0.79"
|
||||
serial_test = "0.6.0"
|
||||
solana-client = { path = "../client", version = "=1.10.2" }
|
||||
solana-ledger = { path = "../ledger", version = "=1.10.2" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-merkle-tree = { path = "../merkle-tree", version = "=1.10.2" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.2" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.2" }
|
||||
solana-rayon-threadlimit = { path = "../rayon-threadlimit", version = "=1.10.2" }
|
||||
solana-rpc = { path = "../rpc", version = "=1.10.2" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.2" }
|
||||
solana-test-validator = { path = "../test-validator", version = "=1.10.2" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
systemstat = "0.1.10"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
|
||||
[dev-dependencies]
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -15,7 +15,7 @@ use {
|
||||
solana_ledger::{blockstore::Blockstore, get_tmp_ledger_path},
|
||||
solana_rpc::{
|
||||
optimistically_confirmed_bank_tracker::OptimisticallyConfirmedBank,
|
||||
rpc::create_test_transactions_and_populate_blockstore,
|
||||
rpc::{create_test_transaction_entries, populate_blockstore_for_tests},
|
||||
rpc_pubsub_service::{PubSubConfig, PubSubService},
|
||||
rpc_subscriptions::RpcSubscriptions,
|
||||
},
|
||||
@@ -36,7 +36,9 @@ use {
|
||||
},
|
||||
solana_streamer::socket::SocketAddrSpace,
|
||||
solana_test_validator::TestValidator,
|
||||
solana_transaction_status::{ConfirmedBlock, TransactionDetails, UiTransactionEncoding},
|
||||
solana_transaction_status::{
|
||||
BlockEncodingOptions, ConfirmedBlock, TransactionDetails, UiTransactionEncoding,
|
||||
},
|
||||
std::{
|
||||
collections::HashSet,
|
||||
net::{IpAddr, SocketAddr},
|
||||
@@ -230,9 +232,12 @@ fn test_block_subscription() {
|
||||
let max_complete_transaction_status_slot = Arc::new(AtomicU64::new(blockstore.max_root()));
|
||||
bank.transfer(rent_exempt_amount, &alice, &keypair2.pubkey())
|
||||
.unwrap();
|
||||
let _confirmed_block_signatures = create_test_transactions_and_populate_blockstore(
|
||||
vec![&alice, &keypair1, &keypair2, &keypair3],
|
||||
0,
|
||||
populate_blockstore_for_tests(
|
||||
create_test_transaction_entries(
|
||||
vec![&alice, &keypair1, &keypair2, &keypair3],
|
||||
bank.clone(),
|
||||
)
|
||||
.0,
|
||||
bank,
|
||||
blockstore.clone(),
|
||||
max_complete_transaction_status_slot,
|
||||
@@ -270,6 +275,7 @@ fn test_block_subscription() {
|
||||
encoding: Some(UiTransactionEncoding::Json),
|
||||
transaction_details: Some(TransactionDetails::Signatures),
|
||||
show_rewards: None,
|
||||
max_supported_transaction_version: None,
|
||||
}),
|
||||
)
|
||||
.unwrap();
|
||||
@@ -281,14 +287,17 @@ fn test_block_subscription() {
|
||||
match maybe_actual {
|
||||
Ok(actual) => {
|
||||
let versioned_block = blockstore.get_complete_block(slot, false).unwrap();
|
||||
let legacy_block = ConfirmedBlock::from(versioned_block)
|
||||
.into_legacy_block()
|
||||
let confirmed_block = ConfirmedBlock::from(versioned_block);
|
||||
let block = confirmed_block
|
||||
.encode_with_options(
|
||||
UiTransactionEncoding::Json,
|
||||
BlockEncodingOptions {
|
||||
transaction_details: TransactionDetails::Signatures,
|
||||
show_rewards: false,
|
||||
max_supported_transaction_version: None,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
let block = legacy_block.configure(
|
||||
UiTransactionEncoding::Json,
|
||||
TransactionDetails::Signatures,
|
||||
false,
|
||||
);
|
||||
assert_eq!(actual.value.slot, slot);
|
||||
assert!(block.eq(&actual.value.block.unwrap()));
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-client"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana Client"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -10,31 +10,37 @@ license = "Apache-2.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
async-mutex = "1.4.0"
|
||||
async-trait = "0.1.52"
|
||||
base64 = "0.13.0"
|
||||
bincode = "1.3.3"
|
||||
bs58 = "0.4.0"
|
||||
bytes = "1.1.0"
|
||||
clap = "2.33.0"
|
||||
crossbeam-channel = "0.5"
|
||||
futures-util = "0.3.19"
|
||||
futures = "0.3"
|
||||
futures-util = "0.3.21"
|
||||
indicatif = "0.16.2"
|
||||
itertools = "0.10.2"
|
||||
jsonrpc-core = "18.0.0"
|
||||
log = "0.4.14"
|
||||
quinn = "0.8.0"
|
||||
rayon = "1.5.1"
|
||||
reqwest = { version = "0.11.9", default-features = false, features = ["blocking", "rustls-tls", "json"] }
|
||||
semver = "1.0.5"
|
||||
rustls = { version = "0.20.2", features = ["dangerous_configuration"] }
|
||||
semver = "1.0.6"
|
||||
serde = "1.0.136"
|
||||
serde_derive = "1.0.103"
|
||||
serde_json = "1.0.78"
|
||||
solana-account-decoder = { path = "../account-decoder", version = "=1.10.0" }
|
||||
solana-clap-utils = { path = "../clap-utils", version = "=1.10.0" }
|
||||
solana-faucet = { path = "../faucet", version = "=1.10.0" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.0" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.0" }
|
||||
serde_json = "1.0.79"
|
||||
solana-account-decoder = { path = "../account-decoder", version = "=1.10.2" }
|
||||
solana-clap-utils = { path = "../clap-utils", version = "=1.10.2" }
|
||||
solana-faucet = { path = "../faucet", version = "=1.10.2" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.2" }
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-stream = "0.1.8"
|
||||
@@ -45,7 +51,7 @@ url = "2.2.2"
|
||||
[dev-dependencies]
|
||||
assert_matches = "1.5.0"
|
||||
jsonrpc-http-server = "18.0.0"
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
@@ -1,6 +1,7 @@
|
||||
pub use reqwest;
|
||||
use {
|
||||
crate::{rpc_request, rpc_response},
|
||||
quinn::{ConnectError, WriteError},
|
||||
solana_faucet::faucet::FaucetError,
|
||||
solana_sdk::{
|
||||
signature::SignerError, transaction::TransactionError, transport::TransportError,
|
||||
@@ -72,6 +73,18 @@ impl From<ClientErrorKind> for TransportError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WriteError> for ClientErrorKind {
|
||||
fn from(write_error: WriteError) -> Self {
|
||||
Self::Custom(format!("{:?}", write_error))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ConnectError> for ClientErrorKind {
|
||||
fn from(connect_error: ConnectError) -> Self {
|
||||
Self::Custom(format!("{:?}", connect_error))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("{kind}")]
|
||||
pub struct ClientError {
|
||||
|
@@ -10,6 +10,7 @@ pub mod nonblocking;
|
||||
pub mod nonce_utils;
|
||||
pub mod perf_utils;
|
||||
pub mod pubsub_client;
|
||||
pub mod quic_client;
|
||||
pub mod rpc_cache;
|
||||
pub mod rpc_client;
|
||||
pub mod rpc_config;
|
||||
@@ -18,11 +19,13 @@ pub mod rpc_deprecated_config;
|
||||
pub mod rpc_filter;
|
||||
pub mod rpc_request;
|
||||
pub mod rpc_response;
|
||||
pub(crate) mod rpc_sender;
|
||||
pub mod rpc_sender;
|
||||
pub mod spinner;
|
||||
pub mod thin_client;
|
||||
pub mod tpu_client;
|
||||
pub mod tpu_connection;
|
||||
pub mod transaction_executor;
|
||||
pub mod udp_client;
|
||||
|
||||
pub mod mock_sender_for_cli {
|
||||
/// Magic `SIGNATURE` value used by `solana-cli` unit tests.
|
||||
|
@@ -28,13 +28,13 @@ use {
|
||||
pubkey::Pubkey,
|
||||
signature::Signature,
|
||||
sysvar::epoch_schedule::EpochSchedule,
|
||||
transaction::{self, Transaction, TransactionError},
|
||||
transaction::{self, Transaction, TransactionError, TransactionVersion},
|
||||
},
|
||||
solana_transaction_status::{
|
||||
EncodedConfirmedBlock, EncodedConfirmedTransactionWithStatusMeta, EncodedTransaction,
|
||||
EncodedTransactionWithStatusMeta, Rewards, TransactionConfirmationStatus,
|
||||
TransactionStatus, UiCompiledInstruction, UiMessage, UiRawMessage, UiTransaction,
|
||||
UiTransactionEncoding, UiTransactionStatusMeta,
|
||||
EncodedTransactionWithStatusMeta, Rewards, TransactionBinaryEncoding,
|
||||
TransactionConfirmationStatus, TransactionStatus, UiCompiledInstruction, UiMessage,
|
||||
UiRawMessage, UiTransaction, UiTransactionStatusMeta,
|
||||
},
|
||||
solana_version::Version,
|
||||
std::{collections::HashMap, net::SocketAddr, str::FromStr, sync::RwLock},
|
||||
@@ -192,6 +192,7 @@ impl RpcSender for MockSender {
|
||||
"getTransaction" => serde_json::to_value(EncodedConfirmedTransactionWithStatusMeta {
|
||||
slot: 2,
|
||||
transaction: EncodedTransactionWithStatusMeta {
|
||||
version: Some(TransactionVersion::LEGACY),
|
||||
transaction: EncodedTransaction::Json(
|
||||
UiTransaction {
|
||||
signatures: vec!["3AsdoALgZFuq2oUVWrDYhg2pNeaLJKPLf8hU2mQ6U8qJxeJ6hsrPVpMn9ma39DtfYCrDQSvngWRP8NnTpEhezJpE".to_string()],
|
||||
@@ -213,6 +214,7 @@ impl RpcSender for MockSender {
|
||||
accounts: vec![0, 1],
|
||||
data: "3Bxs49DitAvXtoDR".to_string(),
|
||||
}],
|
||||
address_table_lookups: None,
|
||||
})
|
||||
}),
|
||||
meta: Some(UiTransactionStatusMeta {
|
||||
@@ -226,6 +228,7 @@ impl RpcSender for MockSender {
|
||||
pre_token_balances: None,
|
||||
post_token_balances: None,
|
||||
rewards: None,
|
||||
loaded_addresses: None,
|
||||
}),
|
||||
},
|
||||
block_time: Some(1628633791),
|
||||
@@ -378,9 +381,10 @@ impl RpcSender for MockSender {
|
||||
pLHxcaShD81xBNaFDgnA2nkkdHnKtZt4hVSfKAmw3VRZbjrZ7L2fKZBx21CwsG\
|
||||
hD6onjM2M3qZW5C8J6d1pj41MxKmZgPBSha3MyKkNLkAGFASK"
|
||||
.to_string(),
|
||||
UiTransactionEncoding::Base58,
|
||||
TransactionBinaryEncoding::Base58,
|
||||
),
|
||||
meta: None,
|
||||
version: Some(TransactionVersion::LEGACY),
|
||||
}],
|
||||
rewards: Rewards::new(),
|
||||
block_time: None,
|
||||
|
@@ -6,6 +6,7 @@
|
||||
//!
|
||||
//! [JSON-RPC]: https://www.jsonrpc.org/specification
|
||||
|
||||
pub use crate::mock_sender::Mocks;
|
||||
#[allow(deprecated)]
|
||||
use crate::rpc_deprecated_config::{
|
||||
RpcConfirmedBlockConfig, RpcConfirmedTransactionConfig,
|
||||
@@ -15,7 +16,7 @@ use {
|
||||
crate::{
|
||||
client_error::{ClientError, ClientErrorKind, Result as ClientResult},
|
||||
http_sender::HttpSender,
|
||||
mock_sender::{MockSender, Mocks},
|
||||
mock_sender::MockSender,
|
||||
rpc_client::{GetConfirmedSignaturesForAddress2Config, RpcClientConfig},
|
||||
rpc_config::{RpcAccountInfoConfig, *},
|
||||
rpc_request::{RpcError, RpcRequest, RpcResponseErrorData, TokenAccountsFilter},
|
||||
@@ -146,9 +147,9 @@ impl RpcClient {
|
||||
///
|
||||
/// This is the basic constructor, allowing construction with any type of
|
||||
/// `RpcSender`. Most applications should use one of the other constructors,
|
||||
/// such as [`new`] and [`new_mock`], which create an `RpcClient`
|
||||
/// encapsulating an [`HttpSender`] and [`MockSender`] respectively.
|
||||
pub(crate) fn new_sender<T: RpcSender + Send + Sync + 'static>(
|
||||
/// such as [`RpcClient::new`], [`RpcClient::new_with_commitment`] or
|
||||
/// [`RpcClient::new_with_timeout`].
|
||||
pub fn new_sender<T: RpcSender + Send + Sync + 'static>(
|
||||
sender: T,
|
||||
config: RpcClientConfig,
|
||||
) -> Self {
|
||||
@@ -314,8 +315,34 @@ impl RpcClient {
|
||||
|
||||
/// Create a mock `RpcClient`.
|
||||
///
|
||||
/// See the [`MockSender`] documentation for an explanation of
|
||||
/// how it treats the `url` argument.
|
||||
/// A mock `RpcClient` contains an implementation of [`RpcSender`] that does
|
||||
/// not use the network, and instead returns synthetic responses, for use in
|
||||
/// tests.
|
||||
///
|
||||
/// It is primarily for internal use, with limited customizability, and
|
||||
/// behaviors determined by internal Solana test cases. New users should
|
||||
/// consider implementing `RpcSender` themselves and constructing
|
||||
/// `RpcClient` with [`RpcClient::new_sender`] to get mock behavior.
|
||||
///
|
||||
/// Unless directed otherwise, a mock `RpcClient` will generally return a
|
||||
/// reasonable default response to any request, at least for [`RpcRequest`]
|
||||
/// values for which responses have been implemented.
|
||||
///
|
||||
/// This mock can be customized by changing the `url` argument, which is not
|
||||
/// actually a URL, but a simple string directive that changes the mock
|
||||
/// behavior in specific scenarios:
|
||||
///
|
||||
/// - It is customary to set the `url` to "succeeds" for mocks that should
|
||||
/// return sucessfully, though this value is not actually interpreted.
|
||||
///
|
||||
/// - If `url` is "fails" then any call to `send` will return `Ok(Value::Null)`.
|
||||
///
|
||||
/// - Other possible values of `url` are specific to different `RpcRequest`
|
||||
/// values. Read the implementation of (non-public) `MockSender` for
|
||||
/// details.
|
||||
///
|
||||
/// The [`RpcClient::new_mock_with_mocks`] function offers further
|
||||
/// customization options.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@@ -341,8 +368,43 @@ impl RpcClient {
|
||||
|
||||
/// Create a mock `RpcClient`.
|
||||
///
|
||||
/// See the [`MockSender`] documentation for an explanation of how it treats
|
||||
/// the `url` argument.
|
||||
/// A mock `RpcClient` contains an implementation of [`RpcSender`] that does
|
||||
/// not use the network, and instead returns synthetic responses, for use in
|
||||
/// tests.
|
||||
///
|
||||
/// It is primarily for internal use, with limited customizability, and
|
||||
/// behaviors determined by internal Solana test cases. New users should
|
||||
/// consider implementing `RpcSender` themselves and constructing
|
||||
/// `RpcClient` with [`RpcClient::new_sender`] to get mock behavior.
|
||||
///
|
||||
/// Unless directed otherwise, a mock `RpcClient` will generally return a
|
||||
/// reasonable default response to any request, at least for [`RpcRequest`]
|
||||
/// values for which responses have been implemented.
|
||||
///
|
||||
/// This mock can be customized in two ways:
|
||||
///
|
||||
/// 1) By changing the `url` argument, which is not actually a URL, but a
|
||||
/// simple string directive that changes the mock behavior in specific
|
||||
/// scenarios.
|
||||
///
|
||||
/// It is customary to set the `url` to "succeeds" for mocks that should
|
||||
/// return sucessfully, though this value is not actually interpreted.
|
||||
///
|
||||
/// If `url` is "fails" then any call to `send` will return `Ok(Value::Null)`.
|
||||
///
|
||||
/// Other possible values of `url` are specific to different `RpcRequest`
|
||||
/// values. Read the implementation of `MockSender` (which is non-public)
|
||||
/// for details.
|
||||
///
|
||||
/// 2) Custom responses can be configured by providing [`Mocks`]. This type
|
||||
/// is a [`HashMap`] from [`RpcRequest`] to a JSON [`Value`] response,
|
||||
/// Any entries in this map override the default behavior for the given
|
||||
/// request.
|
||||
///
|
||||
/// The [`RpcClient::new_mock_with_mocks`] function offers further
|
||||
/// customization options.
|
||||
///
|
||||
/// [`HashMap`]: std::collections::HashMap
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@@ -2411,6 +2473,7 @@ impl RpcClient {
|
||||
/// transaction_details: Some(TransactionDetails::None),
|
||||
/// rewards: Some(true),
|
||||
/// commitment: None,
|
||||
/// max_supported_transaction_version: Some(0),
|
||||
/// };
|
||||
/// let block = rpc_client.get_block_with_config(
|
||||
/// slot,
|
||||
@@ -3051,6 +3114,7 @@ impl RpcClient {
|
||||
/// let config = RpcTransactionConfig {
|
||||
/// encoding: Some(UiTransactionEncoding::Json),
|
||||
/// commitment: Some(CommitmentConfig::confirmed()),
|
||||
/// max_supported_transaction_version: Some(0),
|
||||
/// };
|
||||
/// let transaction = rpc_client.get_transaction_with_config(
|
||||
/// &signature,
|
||||
|
208
client/src/quic_client.rs
Normal file
208
client/src/quic_client.rs
Normal file
@@ -0,0 +1,208 @@
|
||||
//! Simple client that connects to a given UDP port with the QUIC protocol and provides
|
||||
//! an interface for sending transactions which is restricted by the server's flow control.
|
||||
|
||||
use {
|
||||
crate::{client_error::ClientErrorKind, tpu_connection::TpuConnection},
|
||||
async_mutex::Mutex,
|
||||
futures::future::join_all,
|
||||
itertools::Itertools,
|
||||
quinn::{ClientConfig, Endpoint, EndpointConfig, NewConnection, WriteError},
|
||||
rayon::iter::{IntoParallelIterator, ParallelIterator},
|
||||
solana_sdk::{
|
||||
quic::{QUIC_MAX_CONCURRENT_STREAMS, QUIC_PORT_OFFSET},
|
||||
transaction::Transaction,
|
||||
transport::Result as TransportResult,
|
||||
},
|
||||
std::{
|
||||
net::{SocketAddr, UdpSocket},
|
||||
sync::Arc,
|
||||
},
|
||||
tokio::runtime::Runtime,
|
||||
};
|
||||
|
||||
struct SkipServerVerification;
|
||||
|
||||
impl SkipServerVerification {
|
||||
pub fn new() -> Arc<Self> {
|
||||
Arc::new(Self)
|
||||
}
|
||||
}
|
||||
|
||||
impl rustls::client::ServerCertVerifier for SkipServerVerification {
|
||||
fn verify_server_cert(
|
||||
&self,
|
||||
_end_entity: &rustls::Certificate,
|
||||
_intermediates: &[rustls::Certificate],
|
||||
_server_name: &rustls::ServerName,
|
||||
_scts: &mut dyn Iterator<Item = &[u8]>,
|
||||
_ocsp_response: &[u8],
|
||||
_now: std::time::SystemTime,
|
||||
) -> Result<rustls::client::ServerCertVerified, rustls::Error> {
|
||||
Ok(rustls::client::ServerCertVerified::assertion())
|
||||
}
|
||||
}
|
||||
|
||||
struct QuicClient {
|
||||
runtime: Runtime,
|
||||
endpoint: Endpoint,
|
||||
connection: Arc<Mutex<Option<Arc<NewConnection>>>>,
|
||||
addr: SocketAddr,
|
||||
}
|
||||
|
||||
pub struct QuicTpuConnection {
|
||||
client: Arc<QuicClient>,
|
||||
}
|
||||
|
||||
impl TpuConnection for QuicTpuConnection {
|
||||
fn new(client_socket: UdpSocket, tpu_addr: SocketAddr) -> Self {
|
||||
let tpu_addr = SocketAddr::new(tpu_addr.ip(), tpu_addr.port() + QUIC_PORT_OFFSET);
|
||||
let client = Arc::new(QuicClient::new(client_socket, tpu_addr));
|
||||
|
||||
Self { client }
|
||||
}
|
||||
|
||||
fn tpu_addr(&self) -> &SocketAddr {
|
||||
&self.client.addr
|
||||
}
|
||||
|
||||
fn send_wire_transaction(&self, data: Vec<u8>) -> TransportResult<()> {
|
||||
let _guard = self.client.runtime.enter();
|
||||
let send_buffer = self.client.send_buffer(&data[..]);
|
||||
self.client.runtime.block_on(send_buffer)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_batch(&self, transactions: Vec<Transaction>) -> TransportResult<()> {
|
||||
let buffers = transactions
|
||||
.into_par_iter()
|
||||
.map(|tx| bincode::serialize(&tx).expect("serialize Transaction in send_batch"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let _guard = self.client.runtime.enter();
|
||||
let send_batch = self.client.send_batch(&buffers[..]);
|
||||
self.client.runtime.block_on(send_batch)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl QuicClient {
|
||||
pub fn new(client_socket: UdpSocket, addr: SocketAddr) -> Self {
|
||||
let runtime = tokio::runtime::Builder::new_multi_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let _guard = runtime.enter();
|
||||
|
||||
let crypto = rustls::ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_custom_certificate_verifier(SkipServerVerification::new())
|
||||
.with_no_client_auth();
|
||||
|
||||
let create_endpoint = QuicClient::create_endpoint(EndpointConfig::default(), client_socket);
|
||||
|
||||
let mut endpoint = runtime.block_on(create_endpoint);
|
||||
|
||||
endpoint.set_default_client_config(ClientConfig::new(Arc::new(crypto)));
|
||||
|
||||
Self {
|
||||
runtime,
|
||||
endpoint,
|
||||
connection: Arc::new(Mutex::new(None)),
|
||||
addr,
|
||||
}
|
||||
}
|
||||
|
||||
// If this function becomes public, it should be changed to
|
||||
// not expose details of the specific Quic implementation we're using
|
||||
async fn create_endpoint(config: EndpointConfig, client_socket: UdpSocket) -> Endpoint {
|
||||
quinn::Endpoint::new(config, None, client_socket).unwrap().0
|
||||
}
|
||||
|
||||
async fn _send_buffer_using_conn(
|
||||
data: &[u8],
|
||||
connection: &NewConnection,
|
||||
) -> Result<(), WriteError> {
|
||||
let mut send_stream = connection.connection.open_uni().await?;
|
||||
send_stream.write_all(data).await?;
|
||||
send_stream.finish().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Attempts to send data, connecting/reconnecting as necessary
|
||||
// On success, returns the connection used to successfully send the data
|
||||
async fn _send_buffer(&self, data: &[u8]) -> Result<Arc<NewConnection>, WriteError> {
|
||||
let connection = {
|
||||
let mut conn_guard = self.connection.lock().await;
|
||||
|
||||
let maybe_conn = (*conn_guard).clone();
|
||||
match maybe_conn {
|
||||
Some(conn) => conn.clone(),
|
||||
None => {
|
||||
let connecting = self.endpoint.connect(self.addr, "connect").unwrap();
|
||||
let connection = Arc::new(connecting.await?);
|
||||
*conn_guard = Some(connection.clone());
|
||||
connection
|
||||
}
|
||||
}
|
||||
};
|
||||
match Self::_send_buffer_using_conn(data, &connection).await {
|
||||
Ok(()) => Ok(connection),
|
||||
_ => {
|
||||
let connection = {
|
||||
let connecting = self.endpoint.connect(self.addr, "connect").unwrap();
|
||||
let connection = Arc::new(connecting.await?);
|
||||
let mut conn_guard = self.connection.lock().await;
|
||||
*conn_guard = Some(connection.clone());
|
||||
connection
|
||||
};
|
||||
Self::_send_buffer_using_conn(data, &connection).await?;
|
||||
Ok(connection)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_buffer(&self, data: &[u8]) -> Result<(), ClientErrorKind> {
|
||||
self._send_buffer(data).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn send_batch(&self, buffers: &[Vec<u8>]) -> Result<(), ClientErrorKind> {
|
||||
// Start off by "testing" the connection by sending the first transaction
|
||||
// This will also connect to the server if not already connected
|
||||
// and reconnect and retry if the first send attempt failed
|
||||
// (for example due to a timed out connection), returning an error
|
||||
// or the connection that was used to successfully send the transaction.
|
||||
// We will use the returned connection to send the rest of the transactions in the batch
|
||||
// to avoid touching the mutex in self, and not bother reconnecting if we fail along the way
|
||||
// since testing even in the ideal GCE environment has found no cases
|
||||
// where reconnecting and retrying in the middle of a batch send
|
||||
// (i.e. we encounter a connection error in the middle of a batch send, which presumably cannot
|
||||
// be due to a timed out connection) has succeeded
|
||||
if buffers.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let connection = self._send_buffer(&buffers[0][..]).await?;
|
||||
|
||||
// Used to avoid dereferencing the Arc multiple times below
|
||||
// by just getting a reference to the NewConnection once
|
||||
let connection_ref: &NewConnection = &connection;
|
||||
|
||||
let chunks = buffers[1..buffers.len()]
|
||||
.iter()
|
||||
.chunks(QUIC_MAX_CONCURRENT_STREAMS);
|
||||
|
||||
let futures = chunks.into_iter().map(|buffs| {
|
||||
join_all(
|
||||
buffs
|
||||
.into_iter()
|
||||
.map(|buf| Self::_send_buffer_using_conn(&buf[..], connection_ref)),
|
||||
)
|
||||
});
|
||||
|
||||
for f in futures {
|
||||
f.await.into_iter().try_for_each(|res| res)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
@@ -6,13 +6,14 @@
|
||||
//!
|
||||
//! [JSON-RPC]: https://www.jsonrpc.org/specification
|
||||
|
||||
pub use crate::mock_sender::Mocks;
|
||||
#[allow(deprecated)]
|
||||
use crate::rpc_deprecated_config::{RpcConfirmedBlockConfig, RpcConfirmedTransactionConfig};
|
||||
use {
|
||||
crate::{
|
||||
client_error::Result as ClientResult,
|
||||
http_sender::HttpSender,
|
||||
mock_sender::{MockSender, Mocks},
|
||||
mock_sender::MockSender,
|
||||
nonblocking::{self, rpc_client::get_rpc_request_str},
|
||||
rpc_config::{RpcAccountInfoConfig, *},
|
||||
rpc_request::{RpcRequest, TokenAccountsFilter},
|
||||
@@ -103,8 +104,8 @@ pub struct GetConfirmedSignaturesForAddress2Config {
|
||||
/// [`Processed`] commitment level. These exceptions are noted in the method
|
||||
/// documentation.
|
||||
///
|
||||
/// [`Finalized`]: CommitmentLevel::Finalized
|
||||
/// [`Processed`]: CommitmentLevel::Processed
|
||||
/// [`Finalized`]: solana_sdk::commitment_config::CommitmentLevel::Finalized
|
||||
/// [`Processed`]: solana_sdk::commitment_config::CommitmentLevel::Processed
|
||||
/// [jsonprot]: https://docs.solana.com/developing/clients/jsonrpc-api
|
||||
/// [JSON-RPC]: https://www.jsonrpc.org/specification
|
||||
/// [slots]: https://docs.solana.com/terminology#slot
|
||||
@@ -145,6 +146,10 @@ pub struct GetConfirmedSignaturesForAddress2Config {
|
||||
/// [`is_timeout`](crate::client_error::reqwest::Error::is_timeout) method
|
||||
/// returns `true`. The default timeout is 30 seconds, and may be changed by
|
||||
/// calling an appropriate constructor with a `timeout` parameter.
|
||||
///
|
||||
/// [`ClientError`]: crate::client_error::ClientError
|
||||
/// [`ClientErrorKind`]: crate::client_error::ClientErrorKind
|
||||
/// [`ClientErrorKind::Reqwest`]: crate::client_error::ClientErrorKind::Reqwest
|
||||
pub struct RpcClient {
|
||||
rpc_client: nonblocking::rpc_client::RpcClient,
|
||||
runtime: Option<tokio::runtime::Runtime>,
|
||||
@@ -161,9 +166,9 @@ impl RpcClient {
|
||||
///
|
||||
/// This is the basic constructor, allowing construction with any type of
|
||||
/// `RpcSender`. Most applications should use one of the other constructors,
|
||||
/// such as [`new`] and [`new_mock`], which create an `RpcClient`
|
||||
/// encapsulating an [`HttpSender`] and [`MockSender`] respectively.
|
||||
fn new_sender<T: RpcSender + Send + Sync + 'static>(
|
||||
/// such as [`RpcClient::new`], [`RpcClient::new_with_commitment`] or
|
||||
/// [`RpcClient::new_with_timeout`].
|
||||
pub fn new_sender<T: RpcSender + Send + Sync + 'static>(
|
||||
sender: T,
|
||||
config: RpcClientConfig,
|
||||
) -> Self {
|
||||
@@ -186,9 +191,10 @@ impl RpcClient {
|
||||
/// "http://localhost:8899".
|
||||
///
|
||||
/// The client has a default timeout of 30 seconds, and a default [commitment
|
||||
/// level][cl] of [`Finalized`](CommitmentLevel::Finalized).
|
||||
/// level][cl] of [`Finalized`].
|
||||
///
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
/// [`Finalized`]: solana_sdk::commitment_config::CommitmentLevel::Finalized
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@@ -211,6 +217,8 @@ impl RpcClient {
|
||||
/// The client has a default timeout of 30 seconds, and a user-specified
|
||||
/// [`CommitmentLevel`] via [`CommitmentConfig`].
|
||||
///
|
||||
/// [`CommitmentLevel`]: solana_sdk::commitment_config::CommitmentLevel
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
@@ -233,9 +241,10 @@ impl RpcClient {
|
||||
/// "http://localhost:8899".
|
||||
///
|
||||
/// The client has and a default [commitment level][cl] of
|
||||
/// [`Finalized`](CommitmentLevel::Finalized).
|
||||
/// [`Finalized`].
|
||||
///
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
/// [`Finalized`]: solana_sdk::commitment_config::CommitmentLevel::Finalized
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@@ -335,8 +344,34 @@ impl RpcClient {
|
||||
|
||||
/// Create a mock `RpcClient`.
|
||||
///
|
||||
/// See the [`MockSender`] documentation for an explanation of
|
||||
/// how it treats the `url` argument.
|
||||
/// A mock `RpcClient` contains an implementation of [`RpcSender`] that does
|
||||
/// not use the network, and instead returns synthetic responses, for use in
|
||||
/// tests.
|
||||
///
|
||||
/// It is primarily for internal use, with limited customizability, and
|
||||
/// behaviors determined by internal Solana test cases. New users should
|
||||
/// consider implementing `RpcSender` themselves and constructing
|
||||
/// `RpcClient` with [`RpcClient::new_sender`] to get mock behavior.
|
||||
///
|
||||
/// Unless directed otherwise, a mock `RpcClient` will generally return a
|
||||
/// reasonable default response to any request, at least for [`RpcRequest`]
|
||||
/// values for which responses have been implemented.
|
||||
///
|
||||
/// This mock can be customized by changing the `url` argument, which is not
|
||||
/// actually a URL, but a simple string directive that changes the mock
|
||||
/// behavior in specific scenarios:
|
||||
///
|
||||
/// - It is customary to set the `url` to "succeeds" for mocks that should
|
||||
/// return sucessfully, though this value is not actually interpreted.
|
||||
///
|
||||
/// - If `url` is "fails" then any call to `send` will return `Ok(Value::Null)`.
|
||||
///
|
||||
/// - Other possible values of `url` are specific to different `RpcRequest`
|
||||
/// values. Read the implementation of (non-public) `MockSender` for
|
||||
/// details.
|
||||
///
|
||||
/// The [`RpcClient::new_mock_with_mocks`] function offers further
|
||||
/// customization options.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@@ -362,8 +397,43 @@ impl RpcClient {
|
||||
|
||||
/// Create a mock `RpcClient`.
|
||||
///
|
||||
/// See the [`MockSender`] documentation for an explanation of how it treats
|
||||
/// the `url` argument.
|
||||
/// A mock `RpcClient` contains an implementation of [`RpcSender`] that does
|
||||
/// not use the network, and instead returns synthetic responses, for use in
|
||||
/// tests.
|
||||
///
|
||||
/// It is primarily for internal use, with limited customizability, and
|
||||
/// behaviors determined by internal Solana test cases. New users should
|
||||
/// consider implementing `RpcSender` themselves and constructing
|
||||
/// `RpcClient` with [`RpcClient::new_sender`] to get mock behavior.
|
||||
///
|
||||
/// Unless directed otherwise, a mock `RpcClient` will generally return a
|
||||
/// reasonable default response to any request, at least for [`RpcRequest`]
|
||||
/// values for which responses have been implemented.
|
||||
///
|
||||
/// This mock can be customized in two ways:
|
||||
///
|
||||
/// 1) By changing the `url` argument, which is not actually a URL, but a
|
||||
/// simple string directive that changes the mock behavior in specific
|
||||
/// scenarios.
|
||||
///
|
||||
/// It is customary to set the `url` to "succeeds" for mocks that should
|
||||
/// return sucessfully, though this value is not actually interpreted.
|
||||
///
|
||||
/// If `url` is "fails" then any call to `send` will return `Ok(Value::Null)`.
|
||||
///
|
||||
/// Other possible values of `url` are specific to different `RpcRequest`
|
||||
/// values. Read the implementation of `MockSender` (which is non-public)
|
||||
/// for details.
|
||||
///
|
||||
/// 2) Custom responses can be configured by providing [`Mocks`]. This type
|
||||
/// is a [`HashMap`] from [`RpcRequest`] to a JSON [`Value`] response,
|
||||
/// Any entries in this map override the default behavior for the given
|
||||
/// request.
|
||||
///
|
||||
/// The [`RpcClient::new_mock_with_mocks`] function offers further
|
||||
/// customization options.
|
||||
///
|
||||
/// [`HashMap`]: std::collections::HashMap
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@@ -397,9 +467,10 @@ impl RpcClient {
|
||||
/// Create an HTTP `RpcClient` from a [`SocketAddr`].
|
||||
///
|
||||
/// The client has a default timeout of 30 seconds, and a default [commitment
|
||||
/// level][cl] of [`Finalized`](CommitmentLevel::Finalized).
|
||||
/// level][cl] of [`Finalized`].
|
||||
///
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
/// [`Finalized`]: solana_sdk::commitment_config::CommitmentLevel::Finalized
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@@ -420,6 +491,8 @@ impl RpcClient {
|
||||
/// The client has a default timeout of 30 seconds, and a user-specified
|
||||
/// [`CommitmentLevel`] via [`CommitmentConfig`].
|
||||
///
|
||||
/// [`CommitmentLevel`]: solana_sdk::commitment_config::CommitmentLevel
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
@@ -442,9 +515,10 @@ impl RpcClient {
|
||||
|
||||
/// Create an HTTP `RpcClient` from a [`SocketAddr`] with specified timeout.
|
||||
///
|
||||
/// The client has a default [commitment level][cl] of [`Finalized`](CommitmentLevel::Finalized).
|
||||
/// The client has a default [commitment level][cl] of [`Finalized`].
|
||||
///
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
/// [`Finalized`]: solana_sdk::commitment_config::CommitmentLevel::Finalized
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@@ -469,7 +543,9 @@ impl RpcClient {
|
||||
/// determines how thoroughly committed a transaction must be when waiting
|
||||
/// for its confirmation or otherwise checking for confirmation. If not
|
||||
/// specified, the default commitment level is
|
||||
/// [`Finalized`](CommitmentLevel::Finalized).
|
||||
/// [`Finalized`].
|
||||
///
|
||||
/// [`Finalized`]: solana_sdk::commitment_config::CommitmentLevel::Finalized
|
||||
///
|
||||
/// The default commitment level is overridden when calling methods that
|
||||
/// explicitly provide a [`CommitmentConfig`], like
|
||||
@@ -503,7 +579,8 @@ impl RpcClient {
|
||||
/// containing an [`RpcResponseError`] with `code` set to
|
||||
/// [`JSON_RPC_SERVER_ERROR_NODE_UNHEALTHY`].
|
||||
///
|
||||
/// [`RpcResponseError`]: RpcError::RpcResponseError
|
||||
/// [`RpcError`]: crate::rpc_request::RpcError
|
||||
/// [`RpcResponseError`]: crate::rpc_request::RpcError::RpcResponseError
|
||||
/// [`JSON_RPC_SERVER_ERROR_TRANSACTION_SIGNATURE_VERIFICATION_FAILURE`]: crate::rpc_custom_error::JSON_RPC_SERVER_ERROR_TRANSACTION_SIGNATURE_VERIFICATION_FAILURE
|
||||
/// [`JSON_RPC_SERVER_ERROR_SEND_TRANSACTION_PREFLIGHT_FAILURE`]: crate::rpc_custom_error::JSON_RPC_SERVER_ERROR_SEND_TRANSACTION_PREFLIGHT_FAILURE
|
||||
/// [`JSON_RPC_SERVER_ERROR_NODE_UNHEALTHY`]: crate::rpc_custom_error::JSON_RPC_SERVER_ERROR_NODE_UNHEALTHY
|
||||
@@ -616,7 +693,8 @@ impl RpcClient {
|
||||
/// containing an [`RpcResponseError`] with `code` set to
|
||||
/// [`JSON_RPC_SERVER_ERROR_NODE_UNHEALTHY`].
|
||||
///
|
||||
/// [`RpcResponseError`]: RpcError::RpcResponseError
|
||||
/// [`RpcError`]: crate::rpc_request::RpcError
|
||||
/// [`RpcResponseError`]: crate::rpc_request::RpcError::RpcResponseError
|
||||
/// [`JSON_RPC_SERVER_ERROR_TRANSACTION_SIGNATURE_VERIFICATION_FAILURE`]: crate::rpc_custom_error::JSON_RPC_SERVER_ERROR_TRANSACTION_SIGNATURE_VERIFICATION_FAILURE
|
||||
/// [`JSON_RPC_SERVER_ERROR_SEND_TRANSACTION_PREFLIGHT_FAILURE`]: crate::rpc_custom_error::JSON_RPC_SERVER_ERROR_SEND_TRANSACTION_PREFLIGHT_FAILURE
|
||||
/// [`JSON_RPC_SERVER_ERROR_NODE_UNHEALTHY`]: crate::rpc_custom_error::JSON_RPC_SERVER_ERROR_NODE_UNHEALTHY
|
||||
@@ -690,7 +768,8 @@ impl RpcClient {
|
||||
/// containing an [`RpcResponseError`] with `code` set to
|
||||
/// [`JSON_RPC_SERVER_ERROR_NODE_UNHEALTHY`].
|
||||
///
|
||||
/// [`RpcResponseError`]: RpcError::RpcResponseError
|
||||
/// [`RpcError`]: crate::rpc_request::RpcError
|
||||
/// [`RpcResponseError`]: crate::rpc_request::RpcError::RpcResponseError
|
||||
/// [`JSON_RPC_SERVER_ERROR_TRANSACTION_SIGNATURE_VERIFICATION_FAILURE`]: crate::rpc_custom_error::JSON_RPC_SERVER_ERROR_TRANSACTION_SIGNATURE_VERIFICATION_FAILURE
|
||||
/// [`JSON_RPC_SERVER_ERROR_SEND_TRANSACTION_PREFLIGHT_FAILURE`]: crate::rpc_custom_error::JSON_RPC_SERVER_ERROR_SEND_TRANSACTION_PREFLIGHT_FAILURE
|
||||
/// [`JSON_RPC_SERVER_ERROR_NODE_UNHEALTHY`]: crate::rpc_custom_error::JSON_RPC_SERVER_ERROR_NODE_UNHEALTHY
|
||||
@@ -2034,6 +2113,7 @@ impl RpcClient {
|
||||
/// transaction_details: Some(TransactionDetails::None),
|
||||
/// rewards: Some(true),
|
||||
/// commitment: None,
|
||||
/// max_supported_transaction_version: Some(0),
|
||||
/// };
|
||||
/// let block = rpc_client.get_block_with_config(
|
||||
/// slot,
|
||||
@@ -2101,7 +2181,7 @@ impl RpcClient {
|
||||
///
|
||||
/// This method uses the [`Finalized`] [commitment level][cl].
|
||||
///
|
||||
/// [`Finalized`]: CommitmentLevel::Finalized
|
||||
/// [`Finalized`]: solana_sdk::commitment_config::CommitmentLevel::Finalized
|
||||
/// [`get_blocks_with_limit`]: RpcClient::get_blocks_with_limit.
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
///
|
||||
@@ -2160,7 +2240,7 @@ impl RpcClient {
|
||||
/// This method returns an error if the given commitment level is below
|
||||
/// [`Confirmed`].
|
||||
///
|
||||
/// [`Confirmed`]: CommitmentLevel::Confirmed
|
||||
/// [`Confirmed`]: solana_sdk::commitment_config::CommitmentLevel::Confirmed
|
||||
///
|
||||
/// # RPC Reference
|
||||
///
|
||||
@@ -2253,7 +2333,7 @@ impl RpcClient {
|
||||
/// [`Confirmed`].
|
||||
///
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
/// [`Confirmed`]: CommitmentLevel::Confirmed
|
||||
/// [`Confirmed`]: solana_sdk::commitment_config::CommitmentLevel::Confirmed
|
||||
///
|
||||
/// # RPC Reference
|
||||
///
|
||||
@@ -2414,7 +2494,7 @@ impl RpcClient {
|
||||
/// [`Confirmed`].
|
||||
///
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
/// [`Confirmed`]: CommitmentLevel::Confirmed
|
||||
/// [`Confirmed`]: solana_sdk::commitment_config::CommitmentLevel::Confirmed
|
||||
///
|
||||
/// # RPC Reference
|
||||
///
|
||||
@@ -2504,7 +2584,7 @@ impl RpcClient {
|
||||
///
|
||||
/// This method uses the [`Finalized`] [commitment level][cl].
|
||||
///
|
||||
/// [`Finalized`]: CommitmentLevel::Finalized
|
||||
/// [`Finalized`]: solana_sdk::commitment_config::CommitmentLevel::Finalized
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
///
|
||||
/// # RPC Reference
|
||||
@@ -2559,7 +2639,7 @@ impl RpcClient {
|
||||
/// [`Confirmed`].
|
||||
///
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
/// [`Confirmed`]: CommitmentLevel::Confirmed
|
||||
/// [`Confirmed`]: solana_sdk::commitment_config::CommitmentLevel::Confirmed
|
||||
///
|
||||
/// # RPC Reference
|
||||
///
|
||||
@@ -2596,6 +2676,7 @@ impl RpcClient {
|
||||
/// let config = RpcTransactionConfig {
|
||||
/// encoding: Some(UiTransactionEncoding::Json),
|
||||
/// commitment: Some(CommitmentConfig::confirmed()),
|
||||
/// max_supported_transaction_version: Some(0),
|
||||
/// };
|
||||
/// let transaction = rpc_client.get_transaction_with_config(
|
||||
/// &signature,
|
||||
@@ -2924,7 +3005,7 @@ impl RpcClient {
|
||||
///
|
||||
/// This method uses the [`Finalized`] [commitment level][cl].
|
||||
///
|
||||
/// [`Finalized`]: CommitmentLevel::Finalized
|
||||
/// [`Finalized`]: solana_sdk::commitment_config::CommitmentLevel::Finalized
|
||||
/// [cl]: https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment
|
||||
///
|
||||
/// # RPC Reference
|
||||
@@ -3082,6 +3163,7 @@ impl RpcClient {
|
||||
/// [`RpcError::ForUser`]. This is unlike [`get_account_with_commitment`],
|
||||
/// which returns `Ok(None)` if the account does not exist.
|
||||
///
|
||||
/// [`RpcError::ForUser`]: crate::rpc_request::RpcError::ForUser
|
||||
/// [`get_account_with_commitment`]: RpcClient::get_account_with_commitment
|
||||
///
|
||||
/// # RPC Reference
|
||||
|
@@ -197,6 +197,7 @@ pub struct RpcBlockSubscribeConfig {
|
||||
pub encoding: Option<UiTransactionEncoding>,
|
||||
pub transaction_details: Option<TransactionDetails>,
|
||||
pub show_rewards: Option<bool>,
|
||||
pub max_supported_transaction_version: Option<u8>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize)]
|
||||
@@ -248,6 +249,7 @@ pub struct RpcBlockConfig {
|
||||
pub rewards: Option<bool>,
|
||||
#[serde(flatten)]
|
||||
pub commitment: Option<CommitmentConfig>,
|
||||
pub max_supported_transaction_version: Option<u8>,
|
||||
}
|
||||
|
||||
impl EncodingConfig for RpcBlockConfig {
|
||||
@@ -288,6 +290,7 @@ pub struct RpcTransactionConfig {
|
||||
pub encoding: Option<UiTransactionEncoding>,
|
||||
#[serde(flatten)]
|
||||
pub commitment: Option<CommitmentConfig>,
|
||||
pub max_supported_transaction_version: Option<u8>,
|
||||
}
|
||||
|
||||
impl EncodingConfig for RpcTransactionConfig {
|
||||
|
@@ -3,6 +3,7 @@ use {
|
||||
crate::rpc_response::RpcSimulateTransactionResult,
|
||||
jsonrpc_core::{Error, ErrorCode},
|
||||
solana_sdk::clock::Slot,
|
||||
solana_transaction_status::EncodeError,
|
||||
thiserror::Error,
|
||||
};
|
||||
|
||||
@@ -59,7 +60,7 @@ pub enum RpcCustomError {
|
||||
#[error("BlockStatusNotAvailableYet")]
|
||||
BlockStatusNotAvailableYet { slot: Slot },
|
||||
#[error("UnsupportedTransactionVersion")]
|
||||
UnsupportedTransactionVersion,
|
||||
UnsupportedTransactionVersion(u8),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -68,6 +69,16 @@ pub struct NodeUnhealthyErrorData {
|
||||
pub num_slots_behind: Option<Slot>,
|
||||
}
|
||||
|
||||
impl From<EncodeError> for RpcCustomError {
|
||||
fn from(err: EncodeError) -> Self {
|
||||
match err {
|
||||
EncodeError::UnsupportedTransactionVersion(version) => {
|
||||
Self::UnsupportedTransactionVersion(version)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RpcCustomError> for Error {
|
||||
fn from(e: RpcCustomError) -> Self {
|
||||
match e {
|
||||
@@ -172,9 +183,9 @@ impl From<RpcCustomError> for Error {
|
||||
message: format!("Block status not yet available for slot {}", slot),
|
||||
data: None,
|
||||
},
|
||||
RpcCustomError::UnsupportedTransactionVersion => Self {
|
||||
RpcCustomError::UnsupportedTransactionVersion(version) => Self {
|
||||
code: ErrorCode::ServerError(JSON_RPC_SERVER_ERROR_UNSUPPORTED_TRANSACTION_VERSION),
|
||||
message: "Versioned transactions are not supported".to_string(),
|
||||
message: format!("Transaction version ({}) is not supported", version),
|
||||
data: None,
|
||||
},
|
||||
}
|
||||
|
@@ -71,6 +71,7 @@ impl From<RpcConfirmedBlockConfig> for RpcBlockConfig {
|
||||
transaction_details: config.transaction_details,
|
||||
rewards: config.rewards,
|
||||
commitment: config.commitment,
|
||||
max_supported_transaction_version: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -98,6 +99,7 @@ impl From<RpcConfirmedTransactionConfig> for RpcTransactionConfig {
|
||||
Self {
|
||||
encoding: config.encoding,
|
||||
commitment: config.commitment,
|
||||
max_supported_transaction_version: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -117,7 +117,7 @@ pub struct RpcInflationRate {
|
||||
pub epoch: Epoch,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RpcKeyedAccount {
|
||||
pub pubkey: String,
|
||||
@@ -246,7 +246,7 @@ pub struct RpcBlockProductionRange {
|
||||
pub last_slot: Slot,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RpcBlockProduction {
|
||||
/// Map of leader base58 identity pubkeys to a tuple of `(number of leader slots, number of blocks produced)`
|
||||
@@ -363,7 +363,7 @@ pub struct RpcAccountBalance {
|
||||
pub lamports: u64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RpcSupply {
|
||||
pub total: u64,
|
||||
@@ -432,8 +432,8 @@ pub enum RpcBlockUpdateError {
|
||||
#[error("block store error")]
|
||||
BlockStoreError,
|
||||
|
||||
#[error("unsupported transaction version")]
|
||||
UnsupportedTransactionVersion,
|
||||
#[error("unsupported transaction version ({0})")]
|
||||
UnsupportedTransactionVersion(u8),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
|
@@ -23,13 +23,9 @@ pub struct RpcTransportStats {
|
||||
/// `RpcSender` implements the underlying transport of requests to, and
|
||||
/// responses from, a Solana node, and is used primarily by [`RpcClient`].
|
||||
///
|
||||
/// It is typically implemented by [`HttpSender`] in production, and
|
||||
/// [`MockSender`] in unit tests.
|
||||
///
|
||||
/// [`HttpSender`]: crate::http_sender::HttpSender
|
||||
/// [`MockSender`]: crate::mock_sender::MockSender
|
||||
/// [`RpcClient`]: crate::rpc_client::RpcClient
|
||||
#[async_trait]
|
||||
pub(crate) trait RpcSender {
|
||||
pub trait RpcSender {
|
||||
async fn send(
|
||||
&self,
|
||||
request: RpcRequest,
|
||||
|
@@ -4,8 +4,10 @@
|
||||
//! unstable and may change in future releases.
|
||||
|
||||
use {
|
||||
crate::{rpc_client::RpcClient, rpc_config::RpcProgramAccountsConfig, rpc_response::Response},
|
||||
bincode::{serialize_into, serialized_size},
|
||||
crate::{
|
||||
rpc_client::RpcClient, rpc_config::RpcProgramAccountsConfig, rpc_response::Response,
|
||||
tpu_connection::TpuConnection, udp_client::UdpTpuConnection,
|
||||
},
|
||||
log::*,
|
||||
solana_sdk::{
|
||||
account::Account,
|
||||
@@ -17,7 +19,6 @@ use {
|
||||
hash::Hash,
|
||||
instruction::Instruction,
|
||||
message::Message,
|
||||
packet::PACKET_DATA_SIZE,
|
||||
pubkey::Pubkey,
|
||||
signature::{Keypair, Signature, Signer},
|
||||
signers::Signers,
|
||||
@@ -117,22 +118,20 @@ impl ClientOptimizer {
|
||||
}
|
||||
|
||||
/// An object for querying and sending transactions to the network.
|
||||
pub struct ThinClient {
|
||||
transactions_socket: UdpSocket,
|
||||
tpu_addrs: Vec<SocketAddr>,
|
||||
pub struct ThinClient<C: 'static + TpuConnection> {
|
||||
rpc_clients: Vec<RpcClient>,
|
||||
tpu_connections: Vec<C>,
|
||||
optimizer: ClientOptimizer,
|
||||
}
|
||||
|
||||
impl ThinClient {
|
||||
impl<C: 'static + TpuConnection> ThinClient<C> {
|
||||
/// Create a new ThinClient that will interface with the Rpc at `rpc_addr` using TCP
|
||||
/// and the Tpu at `tpu_addr` over `transactions_socket` using UDP.
|
||||
/// and the Tpu at `tpu_addr` over `transactions_socket` using Quic or UDP
|
||||
/// (currently hardcoded to UDP)
|
||||
pub fn new(rpc_addr: SocketAddr, tpu_addr: SocketAddr, transactions_socket: UdpSocket) -> Self {
|
||||
Self::new_from_client(
|
||||
tpu_addr,
|
||||
transactions_socket,
|
||||
RpcClient::new_socket(rpc_addr),
|
||||
)
|
||||
let tpu_connection = C::new(transactions_socket, tpu_addr);
|
||||
|
||||
Self::new_from_client(RpcClient::new_socket(rpc_addr), tpu_connection)
|
||||
}
|
||||
|
||||
pub fn new_socket_with_timeout(
|
||||
@@ -142,18 +141,14 @@ impl ThinClient {
|
||||
timeout: Duration,
|
||||
) -> Self {
|
||||
let rpc_client = RpcClient::new_socket_with_timeout(rpc_addr, timeout);
|
||||
Self::new_from_client(tpu_addr, transactions_socket, rpc_client)
|
||||
let tpu_connection = C::new(transactions_socket, tpu_addr);
|
||||
Self::new_from_client(rpc_client, tpu_connection)
|
||||
}
|
||||
|
||||
fn new_from_client(
|
||||
tpu_addr: SocketAddr,
|
||||
transactions_socket: UdpSocket,
|
||||
rpc_client: RpcClient,
|
||||
) -> Self {
|
||||
fn new_from_client(rpc_client: RpcClient, tpu_connection: C) -> Self {
|
||||
Self {
|
||||
transactions_socket,
|
||||
tpu_addrs: vec![tpu_addr],
|
||||
rpc_clients: vec![rpc_client],
|
||||
tpu_connections: vec![tpu_connection],
|
||||
optimizer: ClientOptimizer::new(0),
|
||||
}
|
||||
}
|
||||
@@ -168,16 +163,19 @@ impl ThinClient {
|
||||
|
||||
let rpc_clients: Vec<_> = rpc_addrs.into_iter().map(RpcClient::new_socket).collect();
|
||||
let optimizer = ClientOptimizer::new(rpc_clients.len());
|
||||
let tpu_connections: Vec<_> = tpu_addrs
|
||||
.into_iter()
|
||||
.map(|tpu_addr| C::new(transactions_socket.try_clone().unwrap(), tpu_addr))
|
||||
.collect();
|
||||
Self {
|
||||
transactions_socket,
|
||||
tpu_addrs,
|
||||
rpc_clients,
|
||||
tpu_connections,
|
||||
optimizer,
|
||||
}
|
||||
}
|
||||
|
||||
fn tpu_addr(&self) -> &SocketAddr {
|
||||
&self.tpu_addrs[self.optimizer.best()]
|
||||
fn tpu_connection(&self) -> &C {
|
||||
&self.tpu_connections[self.optimizer.best()]
|
||||
}
|
||||
|
||||
fn rpc_client(&self) -> &RpcClient {
|
||||
@@ -205,7 +203,6 @@ impl ThinClient {
|
||||
self.send_and_confirm_transaction(&[keypair], transaction, tries, 0)
|
||||
}
|
||||
|
||||
/// Retry sending a signed Transaction to the server for processing
|
||||
pub fn send_and_confirm_transaction<T: Signers>(
|
||||
&self,
|
||||
keypairs: &T,
|
||||
@@ -215,18 +212,13 @@ impl ThinClient {
|
||||
) -> TransportResult<Signature> {
|
||||
for x in 0..tries {
|
||||
let now = Instant::now();
|
||||
let mut buf = vec![0; serialized_size(&transaction).unwrap() as usize];
|
||||
let mut wr = std::io::Cursor::new(&mut buf[..]);
|
||||
let mut num_confirmed = 0;
|
||||
let mut wait_time = MAX_PROCESSING_AGE;
|
||||
serialize_into(&mut wr, &transaction)
|
||||
.expect("serialize Transaction in pub fn transfer_signed");
|
||||
// resend the same transaction until the transaction has no chance of succeeding
|
||||
while now.elapsed().as_secs() < wait_time as u64 {
|
||||
if num_confirmed == 0 {
|
||||
// Send the transaction if there has been no confirmation (e.g. the first time)
|
||||
self.transactions_socket
|
||||
.send_to(&buf[..], &self.tpu_addr())?;
|
||||
self.tpu_connection().send_transaction(transaction)?;
|
||||
}
|
||||
|
||||
if let Ok(confirmed_blocks) = self.poll_for_signature_confirmation(
|
||||
@@ -321,13 +313,13 @@ impl ThinClient {
|
||||
}
|
||||
}
|
||||
|
||||
impl Client for ThinClient {
|
||||
impl<C: 'static + TpuConnection> Client for ThinClient<C> {
|
||||
fn tpu_addr(&self) -> String {
|
||||
self.tpu_addr().to_string()
|
||||
self.tpu_connection().tpu_addr().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl SyncClient for ThinClient {
|
||||
impl<C: 'static + TpuConnection> SyncClient for ThinClient<C> {
|
||||
fn send_and_confirm_message<T: Signers>(
|
||||
&self,
|
||||
keypairs: &T,
|
||||
@@ -607,17 +599,16 @@ impl SyncClient for ThinClient {
|
||||
}
|
||||
}
|
||||
|
||||
impl AsyncClient for ThinClient {
|
||||
impl<C: 'static + TpuConnection> AsyncClient for ThinClient<C> {
|
||||
fn async_send_transaction(&self, transaction: Transaction) -> TransportResult<Signature> {
|
||||
let mut buf = vec![0; serialized_size(&transaction).unwrap() as usize];
|
||||
let mut wr = std::io::Cursor::new(&mut buf[..]);
|
||||
serialize_into(&mut wr, &transaction)
|
||||
.expect("serialize Transaction in pub fn transfer_signed");
|
||||
assert!(buf.len() < PACKET_DATA_SIZE);
|
||||
self.transactions_socket
|
||||
.send_to(&buf[..], &self.tpu_addr())?;
|
||||
self.tpu_connection().send_transaction(&transaction)?;
|
||||
Ok(transaction.signatures[0])
|
||||
}
|
||||
|
||||
fn async_send_batch(&self, transactions: Vec<Transaction>) -> TransportResult<()> {
|
||||
self.tpu_connection().send_batch(transactions)
|
||||
}
|
||||
|
||||
fn async_send_message<T: Signers>(
|
||||
&self,
|
||||
keypairs: &T,
|
||||
@@ -649,20 +640,23 @@ impl AsyncClient for ThinClient {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_client((rpc, tpu): (SocketAddr, SocketAddr), range: (u16, u16)) -> ThinClient {
|
||||
pub fn create_client(
|
||||
(rpc, tpu): (SocketAddr, SocketAddr),
|
||||
range: (u16, u16),
|
||||
) -> ThinClient<UdpTpuConnection> {
|
||||
let (_, transactions_socket) =
|
||||
solana_net_utils::bind_in_range(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), range).unwrap();
|
||||
ThinClient::new(rpc, tpu, transactions_socket)
|
||||
ThinClient::<UdpTpuConnection>::new(rpc, tpu, transactions_socket)
|
||||
}
|
||||
|
||||
pub fn create_client_with_timeout(
|
||||
(rpc, tpu): (SocketAddr, SocketAddr),
|
||||
range: (u16, u16),
|
||||
timeout: Duration,
|
||||
) -> ThinClient {
|
||||
) -> ThinClient<UdpTpuConnection> {
|
||||
let (_, transactions_socket) =
|
||||
solana_net_utils::bind_in_range(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), range).unwrap();
|
||||
ThinClient::new_socket_with_timeout(rpc, tpu, transactions_socket, timeout)
|
||||
ThinClient::<UdpTpuConnection>::new_socket_with_timeout(rpc, tpu, transactions_socket, timeout)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
19
client/src/tpu_connection.rs
Normal file
19
client/src/tpu_connection.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
use {
|
||||
solana_sdk::{transaction::Transaction, transport::Result as TransportResult},
|
||||
std::net::{SocketAddr, UdpSocket},
|
||||
};
|
||||
|
||||
pub trait TpuConnection {
|
||||
fn new(client_socket: UdpSocket, tpu_addr: SocketAddr) -> Self;
|
||||
|
||||
fn tpu_addr(&self) -> &SocketAddr;
|
||||
|
||||
fn send_transaction(&self, tx: &Transaction) -> TransportResult<()> {
|
||||
let data = bincode::serialize(tx).expect("serialize Transaction in send_transaction");
|
||||
self.send_wire_transaction(data)
|
||||
}
|
||||
|
||||
fn send_wire_transaction(&self, data: Vec<u8>) -> TransportResult<()>;
|
||||
|
||||
fn send_batch(&self, transactions: Vec<Transaction>) -> TransportResult<()>;
|
||||
}
|
42
client/src/udp_client.rs
Normal file
42
client/src/udp_client.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
//! Simple TPU client that communicates with the given UDP port with UDP and provides
|
||||
//! an interface for sending transactions
|
||||
|
||||
use {
|
||||
crate::tpu_connection::TpuConnection,
|
||||
solana_sdk::{transaction::Transaction, transport::Result as TransportResult},
|
||||
std::net::{SocketAddr, UdpSocket},
|
||||
};
|
||||
|
||||
pub struct UdpTpuConnection {
|
||||
socket: UdpSocket,
|
||||
addr: SocketAddr,
|
||||
}
|
||||
|
||||
impl TpuConnection for UdpTpuConnection {
|
||||
fn new(client_socket: UdpSocket, tpu_addr: SocketAddr) -> Self {
|
||||
Self {
|
||||
socket: client_socket,
|
||||
addr: tpu_addr,
|
||||
}
|
||||
}
|
||||
|
||||
fn tpu_addr(&self) -> &SocketAddr {
|
||||
&self.addr
|
||||
}
|
||||
|
||||
fn send_wire_transaction(&self, data: Vec<u8>) -> TransportResult<()> {
|
||||
self.socket.send_to(&data[..], self.addr)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_batch(&self, transactions: Vec<Transaction>) -> TransportResult<()> {
|
||||
transactions
|
||||
.into_iter()
|
||||
.map(|tx| bincode::serialize(&tx).expect("serialize Transaction in send_batch"))
|
||||
.try_for_each(|buff| -> TransportResult<()> {
|
||||
self.socket.send_to(&buff[..], self.addr)?;
|
||||
Ok(())
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "solana-core"
|
||||
description = "Blockchain, Rebuilt for Scale"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
homepage = "https://solana.com/"
|
||||
documentation = "https://docs.rs/solana-core"
|
||||
readme = "../README.md"
|
||||
@@ -15,73 +15,69 @@ codecov = { repository = "solana-labs/solana", branch = "master", service = "git
|
||||
|
||||
[dependencies]
|
||||
ahash = "0.7.6"
|
||||
base64 = "0.12.3"
|
||||
base64 = "0.13.0"
|
||||
bincode = "1.3.3"
|
||||
bs58 = "0.4.0"
|
||||
chrono = { version = "0.4.11", features = ["serde"] }
|
||||
crossbeam-channel = "0.5"
|
||||
dashmap = { version = "4.0.2", features = ["rayon", "raw-api"] }
|
||||
etcd-client = { version = "0.8.3", features = ["tls"]}
|
||||
etcd-client = { version = "0.8.4", features = ["tls"] }
|
||||
fs_extra = "1.2.0"
|
||||
histogram = "0.6.9"
|
||||
itertools = "0.10.3"
|
||||
log = "0.4.14"
|
||||
lru = "0.7.2"
|
||||
lru = "0.7.3"
|
||||
rand = "0.7.0"
|
||||
rand_chacha = "0.2.2"
|
||||
rayon = "1.5.1"
|
||||
retain_mut = "0.1.5"
|
||||
retain_mut = "0.1.7"
|
||||
serde = "1.0.136"
|
||||
serde_derive = "1.0.103"
|
||||
solana-address-lookup-table-program = { path = "../programs/address-lookup-table", version = "=1.10.0" }
|
||||
solana-bloom = { path = "../bloom", version = "=1.10.0" }
|
||||
solana-accountsdb-plugin-manager = { path = "../accountsdb-plugin-manager", version = "=1.10.0" }
|
||||
solana-client = { path = "../client", version = "=1.10.0" }
|
||||
solana-entry = { path = "../entry", version = "=1.10.0" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.0" }
|
||||
solana-ledger = { path = "../ledger", version = "=1.10.0" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.0" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.0" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.0" }
|
||||
solana-poh = { path = "../poh", version = "=1.10.0" }
|
||||
solana-program-runtime = { path = "../program-runtime", version = "=1.10.0" }
|
||||
solana-rpc = { path = "../rpc", version = "=1.10.0" }
|
||||
solana-replica-lib = { path = "../replica-lib", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-frozen-abi = { path = "../frozen-abi", version = "=1.10.0" }
|
||||
solana-frozen-abi-macro = { path = "../frozen-abi/macro", version = "=1.10.0" }
|
||||
solana-send-transaction-service = { path = "../send-transaction-service", version = "=1.10.0" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.0" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.0" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.0" }
|
||||
solana-accountsdb-plugin-manager = { path = "../accountsdb-plugin-manager", version = "=1.10.2" }
|
||||
solana-address-lookup-table-program = { path = "../programs/address-lookup-table", version = "=1.10.2" }
|
||||
solana-bloom = { path = "../bloom", version = "=1.10.2" }
|
||||
solana-client = { path = "../client", version = "=1.10.2" }
|
||||
solana-entry = { path = "../entry", version = "=1.10.2" }
|
||||
solana-frozen-abi = { path = "../frozen-abi", version = "=1.10.2" }
|
||||
solana-frozen-abi-macro = { path = "../frozen-abi/macro", version = "=1.10.2" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.2" }
|
||||
solana-ledger = { path = "../ledger", version = "=1.10.2" }
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.2" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.2" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.2" }
|
||||
solana-poh = { path = "../poh", version = "=1.10.2" }
|
||||
solana-program-runtime = { path = "../program-runtime", version = "=1.10.2" }
|
||||
solana-rayon-threadlimit = { path = "../rayon-threadlimit", version = "=1.10.2" }
|
||||
solana-replica-lib = { path = "../replica-lib", version = "=1.10.2" }
|
||||
solana-rpc = { path = "../rpc", version = "=1.10.2" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-send-transaction-service = { path = "../send-transaction-service", version = "=1.10.2" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.2" }
|
||||
solana-transaction-status = { path = "../transaction-status", version = "=1.10.2" }
|
||||
solana-vote-program = { path = "../programs/vote", version = "=1.10.2" }
|
||||
sys-info = "0.9.1"
|
||||
tempfile = "3.3.0"
|
||||
thiserror = "1.0"
|
||||
solana-rayon-threadlimit = { path = "../rayon-threadlimit", version = "=1.10.0" }
|
||||
sys-info = "0.9.1"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
trees = "0.4.2"
|
||||
|
||||
[dev-dependencies]
|
||||
jsonrpc-core = "18.0.0"
|
||||
jsonrpc-core-client = { version = "18.0.0", features = ["ipc", "ws"] }
|
||||
jsonrpc-derive = "18.0.0"
|
||||
jsonrpc-pubsub = "18.0.0"
|
||||
matches = "0.1.9"
|
||||
raptorq = "1.6.5"
|
||||
reqwest = { version = "0.11.9", default-features = false, features = ["blocking", "rustls-tls", "json"] }
|
||||
serde_json = "1.0.78"
|
||||
serial_test = "0.5.1"
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-program-runtime = { path = "../program-runtime", version = "=1.10.0" }
|
||||
solana-stake-program = { path = "../programs/stake", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
serde_json = "1.0.79"
|
||||
serial_test = "0.6.0"
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
solana-program-runtime = { path = "../program-runtime", version = "=1.10.2" }
|
||||
solana-stake-program = { path = "../programs/stake", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
static_assertions = "1.1.0"
|
||||
systemstat = "0.1.10"
|
||||
|
||||
[target."cfg(unix)".dependencies]
|
||||
sysctl = "0.4.3"
|
||||
sysctl = "0.4.4"
|
||||
|
||||
[build-dependencies]
|
||||
rustc_version = "0.4"
|
||||
|
@@ -12,6 +12,7 @@ use {
|
||||
banking_stage::{BankingStage, BankingStageStats},
|
||||
leader_slot_banking_stage_metrics::LeaderSlotMetricsTracker,
|
||||
qos_service::QosService,
|
||||
unprocessed_packet_batches::*,
|
||||
},
|
||||
solana_entry::entry::{next_hash, Entry},
|
||||
solana_gossip::cluster_info::{ClusterInfo, Node},
|
||||
@@ -82,7 +83,11 @@ fn bench_consume_buffered(bencher: &mut Bencher) {
|
||||
let mut packet_batches = VecDeque::new();
|
||||
for batch in batches {
|
||||
let batch_len = batch.packets.len();
|
||||
packet_batches.push_back((batch, vec![0usize; batch_len], false));
|
||||
packet_batches.push_back(DeserializedPacketBatch::new(
|
||||
batch,
|
||||
vec![0usize; batch_len],
|
||||
false,
|
||||
));
|
||||
}
|
||||
let (s, _r) = unbounded();
|
||||
// This tests the performance of buffering packets.
|
||||
|
112
core/benches/cluster_nodes.rs
Normal file
112
core/benches/cluster_nodes.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
#![feature(test)]
|
||||
|
||||
extern crate test;
|
||||
|
||||
use {
|
||||
rand::{seq::SliceRandom, Rng},
|
||||
solana_core::{
|
||||
cluster_nodes::{make_test_cluster, new_cluster_nodes, ClusterNodes},
|
||||
retransmit_stage::RetransmitStage,
|
||||
},
|
||||
solana_gossip::contact_info::ContactInfo,
|
||||
solana_sdk::{clock::Slot, hash::hashv, pubkey::Pubkey, signature::Signature},
|
||||
test::Bencher,
|
||||
};
|
||||
|
||||
const NUM_SIMULATED_SHREDS: usize = 4;
|
||||
|
||||
fn make_cluster_nodes<R: Rng>(
|
||||
rng: &mut R,
|
||||
unstaked_ratio: Option<(u32, u32)>,
|
||||
) -> (Vec<ContactInfo>, ClusterNodes<RetransmitStage>) {
|
||||
let (nodes, stakes, cluster_info) = make_test_cluster(rng, 5_000, unstaked_ratio);
|
||||
let cluster_nodes = new_cluster_nodes::<RetransmitStage>(&cluster_info, &stakes);
|
||||
(nodes, cluster_nodes)
|
||||
}
|
||||
|
||||
fn get_retransmit_peers_deterministic(
|
||||
cluster_nodes: &ClusterNodes<RetransmitStage>,
|
||||
slot: &Slot,
|
||||
slot_leader: &Pubkey,
|
||||
num_simulated_shreds: usize,
|
||||
) {
|
||||
for i in 0..num_simulated_shreds {
|
||||
// see Shred::seed
|
||||
let shred_seed = hashv(&[
|
||||
&slot.to_le_bytes(),
|
||||
&(i as u32).to_le_bytes(),
|
||||
&slot_leader.to_bytes(),
|
||||
])
|
||||
.to_bytes();
|
||||
|
||||
let (_neighbors, _children) = cluster_nodes.get_retransmit_peers_deterministic(
|
||||
shred_seed,
|
||||
solana_gossip::cluster_info::DATA_PLANE_FANOUT,
|
||||
*slot_leader,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn get_retransmit_peers_compat(
|
||||
cluster_nodes: &ClusterNodes<RetransmitStage>,
|
||||
slot_leader: &Pubkey,
|
||||
signatures: &[Signature],
|
||||
) {
|
||||
for signature in signatures.iter() {
|
||||
// see Shred::seed
|
||||
let signature = signature.as_ref();
|
||||
let offset = signature.len().checked_sub(32).unwrap();
|
||||
let shred_seed = signature[offset..].try_into().unwrap();
|
||||
|
||||
let (_neighbors, _children) = cluster_nodes.get_retransmit_peers_compat(
|
||||
shred_seed,
|
||||
solana_gossip::cluster_info::DATA_PLANE_FANOUT,
|
||||
*slot_leader,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn get_retransmit_peers_deterministic_wrapper(b: &mut Bencher, unstaked_ratio: Option<(u32, u32)>) {
|
||||
let mut rng = rand::thread_rng();
|
||||
let (nodes, cluster_nodes) = make_cluster_nodes(&mut rng, unstaked_ratio);
|
||||
let slot_leader = nodes[1..].choose(&mut rng).unwrap().id;
|
||||
let slot = rand::random::<u64>();
|
||||
b.iter(|| {
|
||||
get_retransmit_peers_deterministic(
|
||||
&cluster_nodes,
|
||||
&slot,
|
||||
&slot_leader,
|
||||
NUM_SIMULATED_SHREDS,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
fn get_retransmit_peers_compat_wrapper(b: &mut Bencher, unstaked_ratio: Option<(u32, u32)>) {
|
||||
let mut rng = rand::thread_rng();
|
||||
let (nodes, cluster_nodes) = make_cluster_nodes(&mut rng, unstaked_ratio);
|
||||
let slot_leader = nodes[1..].choose(&mut rng).unwrap().id;
|
||||
let signatures: Vec<_> = std::iter::repeat_with(Signature::new_unique)
|
||||
.take(NUM_SIMULATED_SHREDS)
|
||||
.collect();
|
||||
b.iter(|| get_retransmit_peers_compat(&cluster_nodes, &slot_leader, &signatures));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_get_retransmit_peers_deterministic_unstaked_ratio_1_2(b: &mut Bencher) {
|
||||
get_retransmit_peers_deterministic_wrapper(b, Some((1, 2)));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_get_retransmit_peers_compat_unstaked_ratio_1_2(b: &mut Bencher) {
|
||||
get_retransmit_peers_compat_wrapper(b, Some((1, 2)));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_get_retransmit_peers_deterministic_unstaked_ratio_1_32(b: &mut Bencher) {
|
||||
get_retransmit_peers_deterministic_wrapper(b, Some((1, 32)));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_get_retransmit_peers_compat_unstaked_ratio_1_32(b: &mut Bencher) {
|
||||
get_retransmit_peers_compat_wrapper(b, Some((1, 32)));
|
||||
}
|
@@ -3,6 +3,7 @@ use {
|
||||
cluster_slots::ClusterSlots,
|
||||
duplicate_repair_status::{DeadSlotAncestorRequestStatus, DuplicateAncestorDecision},
|
||||
outstanding_requests::OutstandingRequests,
|
||||
packet_threshold::DynamicPacketToProcessThreshold,
|
||||
repair_response::{self},
|
||||
repair_service::{DuplicateSlotsResetSender, RepairInfo, RepairStatsGroup},
|
||||
replay_stage::DUPLICATE_THRESHOLD,
|
||||
@@ -12,7 +13,6 @@ use {
|
||||
crossbeam_channel::{unbounded, Receiver, Sender},
|
||||
dashmap::{mapref::entry::Entry::Occupied, DashMap},
|
||||
solana_ledger::{blockstore::Blockstore, shred::SIZE_OF_NONCE},
|
||||
solana_measure::measure::Measure,
|
||||
solana_perf::{
|
||||
packet::{limited_deserialize, Packet, PacketBatch},
|
||||
recycler::Recycler,
|
||||
@@ -208,7 +208,7 @@ impl AncestorHashesService {
|
||||
.spawn(move || {
|
||||
let mut last_stats_report = Instant::now();
|
||||
let mut stats = AncestorHashesResponsesStats::default();
|
||||
let mut max_packets = 1024;
|
||||
let mut packet_threshold = DynamicPacketToProcessThreshold::default();
|
||||
loop {
|
||||
let result = Self::process_new_packets_from_channel(
|
||||
&ancestor_hashes_request_statuses,
|
||||
@@ -216,13 +216,13 @@ impl AncestorHashesService {
|
||||
&blockstore,
|
||||
&outstanding_requests,
|
||||
&mut stats,
|
||||
&mut max_packets,
|
||||
&mut packet_threshold,
|
||||
&duplicate_slots_reset_sender,
|
||||
&retryable_slots_sender,
|
||||
);
|
||||
match result {
|
||||
Err(Error::RecvTimeout(_)) | Ok(_) => {}
|
||||
Err(err) => info!("ancestors hashes reponses listener error: {:?}", err),
|
||||
Err(err) => info!("ancestors hashes responses listener error: {:?}", err),
|
||||
};
|
||||
if exit.load(Ordering::Relaxed) {
|
||||
return;
|
||||
@@ -243,7 +243,7 @@ impl AncestorHashesService {
|
||||
blockstore: &Blockstore,
|
||||
outstanding_requests: &RwLock<OutstandingAncestorHashesRepairs>,
|
||||
stats: &mut AncestorHashesResponsesStats,
|
||||
max_packets: &mut usize,
|
||||
packet_threshold: &mut DynamicPacketToProcessThreshold,
|
||||
duplicate_slots_reset_sender: &DuplicateSlotsResetSender,
|
||||
retryable_slots_sender: &RetryableSlotsSender,
|
||||
) -> Result<()> {
|
||||
@@ -254,18 +254,17 @@ impl AncestorHashesService {
|
||||
let mut dropped_packets = 0;
|
||||
while let Ok(batch) = response_receiver.try_recv() {
|
||||
total_packets += batch.packets.len();
|
||||
if total_packets < *max_packets {
|
||||
// Drop the rest in the channel in case of DOS
|
||||
packet_batches.push(batch);
|
||||
} else {
|
||||
if packet_threshold.should_drop(total_packets) {
|
||||
dropped_packets += batch.packets.len();
|
||||
} else {
|
||||
packet_batches.push(batch);
|
||||
}
|
||||
}
|
||||
|
||||
stats.dropped_packets += dropped_packets;
|
||||
stats.total_packets += total_packets;
|
||||
|
||||
let mut time = Measure::start("ancestor_hashes::handle_packets");
|
||||
let timer = Instant::now();
|
||||
for packet_batch in packet_batches {
|
||||
Self::process_packet_batch(
|
||||
ancestor_hashes_request_statuses,
|
||||
@@ -277,14 +276,7 @@ impl AncestorHashesService {
|
||||
retryable_slots_sender,
|
||||
);
|
||||
}
|
||||
time.stop();
|
||||
if total_packets >= *max_packets {
|
||||
if time.as_ms() > 1000 {
|
||||
*max_packets = (*max_packets * 9) / 10;
|
||||
} else {
|
||||
*max_packets = (*max_packets * 10) / 9;
|
||||
}
|
||||
}
|
||||
packet_threshold.update(total_packets, timer.elapsed());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@@ -8,6 +8,7 @@ use {
|
||||
LeaderExecuteAndCommitTimings, RecordTransactionsTimings,
|
||||
},
|
||||
qos_service::QosService,
|
||||
unprocessed_packet_batches::*,
|
||||
},
|
||||
crossbeam_channel::{Receiver as CrossbeamReceiver, RecvTimeoutError},
|
||||
histogram::Histogram,
|
||||
@@ -45,8 +46,7 @@ use {
|
||||
feature_set,
|
||||
message::Message,
|
||||
pubkey::Pubkey,
|
||||
short_vec::decode_shortu16_len,
|
||||
signature::Signature,
|
||||
saturating_add_assign,
|
||||
timing::{duration_as_ms, timestamp, AtomicInterval},
|
||||
transaction::{
|
||||
self, AddressLoader, SanitizedTransaction, TransactionError, VersionedTransaction,
|
||||
@@ -60,7 +60,6 @@ use {
|
||||
cmp,
|
||||
collections::{HashMap, VecDeque},
|
||||
env,
|
||||
mem::size_of,
|
||||
net::{SocketAddr, UdpSocket},
|
||||
sync::{
|
||||
atomic::{AtomicU64, AtomicUsize, Ordering},
|
||||
@@ -71,12 +70,6 @@ use {
|
||||
},
|
||||
};
|
||||
|
||||
/// (packets, valid_indexes, forwarded)
|
||||
/// Batch of packets with a list of which are valid and if this batch has been forwarded.
|
||||
type PacketBatchAndOffsets = (PacketBatch, Vec<usize>, bool);
|
||||
|
||||
pub type UnprocessedPacketBatches = VecDeque<PacketBatchAndOffsets>;
|
||||
|
||||
/// Transaction forwarding
|
||||
pub const FORWARD_TRANSACTIONS_TO_LEADER_AT_SLOT_OFFSET: u64 = 2;
|
||||
pub const HOLD_TRANSACTIONS_SLOT_OFFSET: u64 = 20;
|
||||
@@ -90,6 +83,7 @@ const MAX_NUM_TRANSACTIONS_PER_BATCH: usize = 128;
|
||||
|
||||
const NUM_VOTE_PROCESSING_THREADS: u32 = 2;
|
||||
const MIN_THREADS_BANKING: u32 = 1;
|
||||
const MIN_TOTAL_THREADS: u32 = NUM_VOTE_PROCESSING_THREADS + MIN_THREADS_BANKING;
|
||||
|
||||
pub struct ProcessTransactionBatchOutput {
|
||||
// The number of transactions filtered out by the cost model
|
||||
@@ -331,6 +325,12 @@ impl BankingStageStats {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct BatchedTransactionDetails {
|
||||
pub costs: BatchedTransactionCostDetails,
|
||||
pub errors: BatchedTransactionErrorDetails,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct BatchedTransactionCostDetails {
|
||||
pub batched_signature_cost: u64,
|
||||
@@ -339,6 +339,15 @@ pub struct BatchedTransactionCostDetails {
|
||||
pub batched_execute_cost: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct BatchedTransactionErrorDetails {
|
||||
pub batched_retried_txs_per_block_limit_count: u64,
|
||||
pub batched_retried_txs_per_vote_limit_count: u64,
|
||||
pub batched_retried_txs_per_account_limit_count: u64,
|
||||
pub batched_retried_txs_per_account_data_block_limit_count: u64,
|
||||
pub batched_dropped_txs_per_account_data_total_limit_count: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct EndOfSlot {
|
||||
next_slot_leader: Option<Pubkey>,
|
||||
@@ -403,13 +412,14 @@ impl BankingStage {
|
||||
gossip_vote_sender: ReplayVoteSender,
|
||||
cost_model: Arc<RwLock<CostModel>>,
|
||||
) -> Self {
|
||||
let batch_limit = TOTAL_BUFFERED_PACKETS / ((num_threads - 1) as usize * PACKETS_PER_BATCH);
|
||||
assert!(num_threads >= MIN_TOTAL_THREADS);
|
||||
// Single thread to generate entries from many banks.
|
||||
// This thread talks to poh_service and broadcasts the entries once they have been recorded.
|
||||
// Once an entry has been recorded, its blockhash is registered with the bank.
|
||||
let data_budget = Arc::new(DataBudget::default());
|
||||
let batch_limit = TOTAL_BUFFERED_PACKETS
|
||||
/ ((num_threads - NUM_VOTE_PROCESSING_THREADS) as usize * PACKETS_PER_BATCH);
|
||||
// Many banks that process transactions in parallel.
|
||||
assert!(num_threads >= NUM_VOTE_PROCESSING_THREADS + MIN_THREADS_BANKING);
|
||||
let bank_thread_hdls: Vec<JoinHandle<()>> = (0..num_threads)
|
||||
.map(|i| {
|
||||
let (verified_receiver, forward_option) = match i {
|
||||
@@ -433,7 +443,7 @@ impl BankingStage {
|
||||
let data_budget = data_budget.clone();
|
||||
let cost_model = cost_model.clone();
|
||||
Builder::new()
|
||||
.name("solana-banking-stage-tx".to_string())
|
||||
.name(format!("solana-banking-stage-tx-{}", i))
|
||||
.spawn(move || {
|
||||
Self::process_loop(
|
||||
&verified_receiver,
|
||||
@@ -456,12 +466,15 @@ impl BankingStage {
|
||||
}
|
||||
|
||||
fn filter_valid_packets_for_forwarding<'a>(
|
||||
packet_batches: impl Iterator<Item = &'a PacketBatchAndOffsets>,
|
||||
packet_batches: impl Iterator<Item = &'a DeserializedPacketBatch>,
|
||||
) -> Vec<&'a Packet> {
|
||||
packet_batches
|
||||
.filter(|(_batch, _indexes, forwarded)| !forwarded)
|
||||
.flat_map(|(batch, valid_indexes, _forwarded)| {
|
||||
valid_indexes.iter().map(move |x| &batch.packets[*x])
|
||||
.filter(|deserialized_packet_batch| !deserialized_packet_batch.forwarded)
|
||||
.flat_map(|deserialized_packet_batch| {
|
||||
deserialized_packet_batch
|
||||
.unprocessed_packets
|
||||
.iter()
|
||||
.map(|(index, _)| &deserialized_packet_batch.packet_batch.packets[*index])
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -507,20 +520,6 @@ impl BankingStage {
|
||||
(Ok(()), packet_vec.len())
|
||||
}
|
||||
|
||||
// Returns whether the given `PacketBatch` has any more remaining unprocessed
|
||||
// transactions
|
||||
fn update_buffered_packets_with_new_unprocessed(
|
||||
original_unprocessed_indexes: &mut Vec<usize>,
|
||||
new_unprocessed_indexes: Vec<usize>,
|
||||
) -> bool {
|
||||
let has_more_unprocessed_transactions =
|
||||
Self::packet_has_more_unprocessed_transactions(&new_unprocessed_indexes);
|
||||
if has_more_unprocessed_transactions {
|
||||
*original_unprocessed_indexes = new_unprocessed_indexes
|
||||
};
|
||||
has_more_unprocessed_transactions
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn consume_buffered_packets(
|
||||
my_pubkey: &Pubkey,
|
||||
@@ -541,154 +540,101 @@ impl BankingStage {
|
||||
let mut proc_start = Measure::start("consume_buffered_process");
|
||||
let mut reached_end_of_slot: Option<EndOfSlot> = None;
|
||||
|
||||
RetainMut::retain_mut(
|
||||
buffered_packet_batches,
|
||||
|buffered_packet_batch_and_offsets| {
|
||||
let (packet_batch, ref mut original_unprocessed_indexes, _forwarded) =
|
||||
buffered_packet_batch_and_offsets;
|
||||
if let Some(end_of_slot) = &reached_end_of_slot {
|
||||
let (should_retain, end_of_slot_filtering_time) = Measure::this(
|
||||
|_| {
|
||||
// We've hit the end of this slot, no need to perform more processing,
|
||||
// just filter the remaining packets for the invalid (e.g. too old) ones
|
||||
// if the working_bank is available
|
||||
if let Some(bank) = &end_of_slot.working_bank {
|
||||
let new_unprocessed_indexes =
|
||||
Self::filter_unprocessed_packets_at_end_of_slot(
|
||||
bank,
|
||||
packet_batch,
|
||||
original_unprocessed_indexes,
|
||||
my_pubkey,
|
||||
end_of_slot.next_slot_leader,
|
||||
banking_stage_stats,
|
||||
);
|
||||
|
||||
let end_of_slot_filtered_invalid_count =
|
||||
original_unprocessed_indexes
|
||||
.len()
|
||||
.saturating_sub(new_unprocessed_indexes.len());
|
||||
|
||||
slot_metrics_tracker.increment_end_of_slot_filtered_invalid_count(
|
||||
end_of_slot_filtered_invalid_count as u64,
|
||||
);
|
||||
|
||||
banking_stage_stats
|
||||
.end_of_slot_filtered_invalid_count
|
||||
.fetch_add(
|
||||
end_of_slot_filtered_invalid_count,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
|
||||
Self::update_buffered_packets_with_new_unprocessed(
|
||||
original_unprocessed_indexes,
|
||||
new_unprocessed_indexes,
|
||||
)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
},
|
||||
(),
|
||||
"end_of_slot_filtering",
|
||||
RetainMut::retain_mut(buffered_packet_batches, |deserialized_packet_batch| {
|
||||
let packet_batch = &deserialized_packet_batch.packet_batch;
|
||||
let original_unprocessed_indexes = deserialized_packet_batch
|
||||
.unprocessed_packets
|
||||
.keys()
|
||||
.cloned()
|
||||
.collect::<Vec<usize>>();
|
||||
if let Some(end_of_slot) = &reached_end_of_slot {
|
||||
// We've hit the end of this slot, no need to perform more processing,
|
||||
// just filter the remaining packets for the invalid (e.g. too old) ones
|
||||
// if the working_bank is available
|
||||
let mut end_of_slot_filtering_time = Measure::start("end_of_slot_filtering");
|
||||
let should_retain = if let Some(bank) = &end_of_slot.working_bank {
|
||||
let new_unprocessed_indexes = Self::filter_unprocessed_packets_at_end_of_slot(
|
||||
bank,
|
||||
packet_batch,
|
||||
&original_unprocessed_indexes,
|
||||
my_pubkey,
|
||||
end_of_slot.next_slot_leader,
|
||||
banking_stage_stats,
|
||||
);
|
||||
slot_metrics_tracker
|
||||
.increment_end_of_slot_filtering_us(end_of_slot_filtering_time.as_us());
|
||||
should_retain
|
||||
|
||||
let end_of_slot_filtered_invalid_count = original_unprocessed_indexes
|
||||
.len()
|
||||
.saturating_sub(new_unprocessed_indexes.len());
|
||||
|
||||
slot_metrics_tracker.increment_end_of_slot_filtered_invalid_count(
|
||||
end_of_slot_filtered_invalid_count as u64,
|
||||
);
|
||||
|
||||
banking_stage_stats
|
||||
.end_of_slot_filtered_invalid_count
|
||||
.fetch_add(end_of_slot_filtered_invalid_count, Ordering::Relaxed);
|
||||
|
||||
deserialized_packet_batch.update_buffered_packets_with_new_unprocessed(
|
||||
&original_unprocessed_indexes,
|
||||
&new_unprocessed_indexes,
|
||||
)
|
||||
} else {
|
||||
let (bank_start, poh_recorder_lock_time) = Measure::this(
|
||||
|_| poh_recorder.lock().unwrap().bank_start(),
|
||||
(),
|
||||
"poh_recorder_lock",
|
||||
);
|
||||
slot_metrics_tracker.increment_consume_buffered_packets_poh_recorder_lock_us(
|
||||
poh_recorder_lock_time.as_us(),
|
||||
);
|
||||
true
|
||||
};
|
||||
end_of_slot_filtering_time.stop();
|
||||
slot_metrics_tracker
|
||||
.increment_end_of_slot_filtering_us(end_of_slot_filtering_time.as_us());
|
||||
should_retain
|
||||
} else {
|
||||
let (bank_start, poh_recorder_lock_time) = Measure::this(
|
||||
|_| poh_recorder.lock().unwrap().bank_start(),
|
||||
(),
|
||||
"poh_recorder_lock",
|
||||
);
|
||||
slot_metrics_tracker.increment_consume_buffered_packets_poh_recorder_lock_us(
|
||||
poh_recorder_lock_time.as_us(),
|
||||
);
|
||||
|
||||
if let Some(BankStart {
|
||||
working_bank,
|
||||
bank_creation_time,
|
||||
}) = bank_start
|
||||
{
|
||||
let (process_transactions_summary, process_packets_transactions_time) =
|
||||
Measure::this(
|
||||
|_| {
|
||||
Self::process_packets_transactions(
|
||||
&working_bank,
|
||||
&bank_creation_time,
|
||||
recorder,
|
||||
packet_batch,
|
||||
original_unprocessed_indexes.to_owned(),
|
||||
transaction_status_sender.clone(),
|
||||
gossip_vote_sender,
|
||||
banking_stage_stats,
|
||||
qos_service,
|
||||
slot_metrics_tracker,
|
||||
)
|
||||
},
|
||||
(),
|
||||
"process_packets_transactions",
|
||||
);
|
||||
slot_metrics_tracker.increment_process_packets_transactions_us(
|
||||
process_packets_transactions_time.as_us(),
|
||||
if let Some(BankStart {
|
||||
working_bank,
|
||||
bank_creation_time,
|
||||
}) = bank_start
|
||||
{
|
||||
let (process_transactions_summary, process_packets_transactions_time) =
|
||||
Measure::this(
|
||||
|_| {
|
||||
Self::process_packets_transactions(
|
||||
&working_bank,
|
||||
&bank_creation_time,
|
||||
recorder,
|
||||
packet_batch,
|
||||
original_unprocessed_indexes.to_owned(),
|
||||
transaction_status_sender.clone(),
|
||||
gossip_vote_sender,
|
||||
banking_stage_stats,
|
||||
qos_service,
|
||||
slot_metrics_tracker,
|
||||
)
|
||||
},
|
||||
(),
|
||||
"process_packets_transactions",
|
||||
);
|
||||
slot_metrics_tracker.increment_process_packets_transactions_us(
|
||||
process_packets_transactions_time.as_us(),
|
||||
);
|
||||
|
||||
let ProcessTransactionsSummary {
|
||||
reached_max_poh_height,
|
||||
retryable_transaction_indexes,
|
||||
..
|
||||
} = process_transactions_summary;
|
||||
let ProcessTransactionsSummary {
|
||||
reached_max_poh_height,
|
||||
retryable_transaction_indexes,
|
||||
..
|
||||
} = process_transactions_summary;
|
||||
|
||||
if reached_max_poh_height
|
||||
|| !Bank::should_bank_still_be_processing_txs(
|
||||
&bank_creation_time,
|
||||
max_tx_ingestion_ns,
|
||||
)
|
||||
{
|
||||
let poh_recorder_lock_time = {
|
||||
let (poh_recorder_locked, poh_recorder_lock_time) = Measure::this(
|
||||
|_| poh_recorder.lock().unwrap(),
|
||||
(),
|
||||
"poh_recorder_lock",
|
||||
);
|
||||
|
||||
reached_end_of_slot = Some(EndOfSlot {
|
||||
next_slot_leader: poh_recorder_locked.next_slot_leader(),
|
||||
working_bank: Some(working_bank),
|
||||
});
|
||||
poh_recorder_lock_time
|
||||
};
|
||||
|
||||
slot_metrics_tracker
|
||||
.increment_consume_buffered_packets_poh_recorder_lock_us(
|
||||
poh_recorder_lock_time.as_us(),
|
||||
);
|
||||
}
|
||||
|
||||
// The difference between all transactions passed to execution and the ones that
|
||||
// are retryable were the ones that were either:
|
||||
// 1) Committed into the block
|
||||
// 2) Dropped without being committed because they had some fatal error (too old,
|
||||
// duplicate signature, etc.)
|
||||
//
|
||||
// Note: This assumes that every packet deserializes into one transaction!
|
||||
consumed_buffered_packets_count += original_unprocessed_indexes
|
||||
.len()
|
||||
.saturating_sub(retryable_transaction_indexes.len());
|
||||
|
||||
// Out of the buffered packets just retried, collect any still unprocessed
|
||||
// transactions in this batch for forwarding
|
||||
rebuffered_packet_count += retryable_transaction_indexes.len();
|
||||
let has_more_unprocessed_transactions =
|
||||
Self::update_buffered_packets_with_new_unprocessed(
|
||||
original_unprocessed_indexes,
|
||||
retryable_transaction_indexes,
|
||||
);
|
||||
if let Some(test_fn) = &test_fn {
|
||||
test_fn();
|
||||
}
|
||||
has_more_unprocessed_transactions
|
||||
} else {
|
||||
// mark as end-of-slot to avoid aggressively lock poh for the remaining for
|
||||
// packet batches in buffer
|
||||
if reached_max_poh_height
|
||||
|| !Bank::should_bank_still_be_processing_txs(
|
||||
&bank_creation_time,
|
||||
max_tx_ingestion_ns,
|
||||
)
|
||||
{
|
||||
let poh_recorder_lock_time = {
|
||||
let (poh_recorder_locked, poh_recorder_lock_time) = Measure::this(
|
||||
|_| poh_recorder.lock().unwrap(),
|
||||
@@ -698,25 +644,69 @@ impl BankingStage {
|
||||
|
||||
reached_end_of_slot = Some(EndOfSlot {
|
||||
next_slot_leader: poh_recorder_locked.next_slot_leader(),
|
||||
working_bank: None,
|
||||
working_bank: Some(working_bank),
|
||||
});
|
||||
poh_recorder_lock_time
|
||||
};
|
||||
|
||||
slot_metrics_tracker
|
||||
.increment_consume_buffered_packets_poh_recorder_lock_us(
|
||||
poh_recorder_lock_time.as_us(),
|
||||
);
|
||||
|
||||
// `original_unprocessed_indexes` must have remaining packets to process
|
||||
// if not yet processed.
|
||||
assert!(Self::packet_has_more_unprocessed_transactions(
|
||||
original_unprocessed_indexes
|
||||
));
|
||||
true
|
||||
}
|
||||
|
||||
// The difference between all transactions passed to execution and the ones that
|
||||
// are retryable were the ones that were either:
|
||||
// 1) Committed into the block
|
||||
// 2) Dropped without being committed because they had some fatal error (too old,
|
||||
// duplicate signature, etc.)
|
||||
//
|
||||
// Note: This assumes that every packet deserializes into one transaction!
|
||||
consumed_buffered_packets_count += original_unprocessed_indexes
|
||||
.len()
|
||||
.saturating_sub(retryable_transaction_indexes.len());
|
||||
|
||||
// Out of the buffered packets just retried, collect any still unprocessed
|
||||
// transactions in this batch for forwarding
|
||||
rebuffered_packet_count += retryable_transaction_indexes.len();
|
||||
let has_more_unprocessed_transactions = deserialized_packet_batch
|
||||
.update_buffered_packets_with_new_unprocessed(
|
||||
&original_unprocessed_indexes,
|
||||
&retryable_transaction_indexes,
|
||||
);
|
||||
if let Some(test_fn) = &test_fn {
|
||||
test_fn();
|
||||
}
|
||||
has_more_unprocessed_transactions
|
||||
} else {
|
||||
// mark as end-of-slot to avoid aggressively lock poh for the remaining for
|
||||
// packet batches in buffer
|
||||
let poh_recorder_lock_time = {
|
||||
let (poh_recorder_locked, poh_recorder_lock_time) = Measure::this(
|
||||
|_| poh_recorder.lock().unwrap(),
|
||||
(),
|
||||
"poh_recorder_lock",
|
||||
);
|
||||
|
||||
reached_end_of_slot = Some(EndOfSlot {
|
||||
next_slot_leader: poh_recorder_locked.next_slot_leader(),
|
||||
working_bank: None,
|
||||
});
|
||||
poh_recorder_lock_time
|
||||
};
|
||||
slot_metrics_tracker.increment_consume_buffered_packets_poh_recorder_lock_us(
|
||||
poh_recorder_lock_time.as_us(),
|
||||
);
|
||||
|
||||
// `original_unprocessed_indexes` must have remaining packets to process
|
||||
// if not yet processed.
|
||||
assert!(Self::packet_has_more_unprocessed_transactions(
|
||||
&original_unprocessed_indexes
|
||||
));
|
||||
true
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
proc_start.stop();
|
||||
|
||||
@@ -810,7 +800,7 @@ impl BankingStage {
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
slot_metrics_tracker.update_on_leader_slot_boundary(&bank_start);
|
||||
Self::consume_or_forward_packets(
|
||||
my_pubkey,
|
||||
leader_at_slot_offset,
|
||||
@@ -940,9 +930,11 @@ impl BankingStage {
|
||||
}
|
||||
|
||||
if hold {
|
||||
buffered_packet_batches.retain(|(_, index, _)| !index.is_empty());
|
||||
for (_, _, forwarded) in buffered_packet_batches.iter_mut() {
|
||||
*forwarded = true;
|
||||
buffered_packet_batches.retain(|deserialized_packet_batch| {
|
||||
!deserialized_packet_batch.unprocessed_packets.is_empty()
|
||||
});
|
||||
for deserialized_packet_batch in buffered_packet_batches.iter_mut() {
|
||||
deserialized_packet_batch.forwarded = true;
|
||||
}
|
||||
} else {
|
||||
slot_metrics_tracker
|
||||
@@ -1064,7 +1056,7 @@ impl BankingStage {
|
||||
env::var("SOLANA_BANKING_THREADS")
|
||||
.map(|x| x.parse().unwrap_or(NUM_THREADS))
|
||||
.unwrap_or(NUM_THREADS),
|
||||
NUM_VOTE_PROCESSING_THREADS + MIN_THREADS_BANKING,
|
||||
MIN_TOTAL_THREADS,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1439,27 +1431,74 @@ impl BankingStage {
|
||||
fn accumulate_batched_transaction_costs<'a>(
|
||||
transactions_costs: impl Iterator<Item = &'a TransactionCost>,
|
||||
transaction_results: impl Iterator<Item = &'a transaction::Result<()>>,
|
||||
) -> BatchedTransactionCostDetails {
|
||||
let mut cost_details = BatchedTransactionCostDetails::default();
|
||||
) -> BatchedTransactionDetails {
|
||||
let mut batched_transaction_details = BatchedTransactionDetails::default();
|
||||
transactions_costs
|
||||
.zip(transaction_results)
|
||||
.for_each(|(cost, result)| {
|
||||
if result.is_ok() {
|
||||
cost_details.batched_signature_cost = cost_details
|
||||
.batched_signature_cost
|
||||
.saturating_add(cost.signature_cost);
|
||||
cost_details.batched_write_lock_cost = cost_details
|
||||
.batched_write_lock_cost
|
||||
.saturating_add(cost.write_lock_cost);
|
||||
cost_details.batched_data_bytes_cost = cost_details
|
||||
.batched_data_bytes_cost
|
||||
.saturating_add(cost.data_bytes_cost);
|
||||
cost_details.batched_execute_cost = cost_details
|
||||
.batched_execute_cost
|
||||
.saturating_add(cost.execution_cost);
|
||||
.for_each(|(cost, result)| match result {
|
||||
Ok(_) => {
|
||||
saturating_add_assign!(
|
||||
batched_transaction_details.costs.batched_signature_cost,
|
||||
cost.signature_cost
|
||||
);
|
||||
saturating_add_assign!(
|
||||
batched_transaction_details.costs.batched_write_lock_cost,
|
||||
cost.write_lock_cost
|
||||
);
|
||||
saturating_add_assign!(
|
||||
batched_transaction_details.costs.batched_data_bytes_cost,
|
||||
cost.data_bytes_cost
|
||||
);
|
||||
saturating_add_assign!(
|
||||
batched_transaction_details.costs.batched_execute_cost,
|
||||
cost.execution_cost
|
||||
);
|
||||
}
|
||||
Err(transaction_error) => match transaction_error {
|
||||
TransactionError::WouldExceedMaxBlockCostLimit => {
|
||||
saturating_add_assign!(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_retried_txs_per_block_limit_count,
|
||||
1
|
||||
);
|
||||
}
|
||||
TransactionError::WouldExceedMaxVoteCostLimit => {
|
||||
saturating_add_assign!(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_retried_txs_per_vote_limit_count,
|
||||
1
|
||||
);
|
||||
}
|
||||
TransactionError::WouldExceedMaxAccountCostLimit => {
|
||||
saturating_add_assign!(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_retried_txs_per_account_limit_count,
|
||||
1
|
||||
);
|
||||
}
|
||||
TransactionError::WouldExceedAccountDataBlockLimit => {
|
||||
saturating_add_assign!(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_retried_txs_per_account_data_block_limit_count,
|
||||
1
|
||||
);
|
||||
}
|
||||
TransactionError::WouldExceedAccountDataTotalLimit => {
|
||||
saturating_add_assign!(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_dropped_txs_per_account_data_total_limit_count,
|
||||
1
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
});
|
||||
cost_details
|
||||
batched_transaction_details
|
||||
}
|
||||
|
||||
fn accumulate_execute_units_and_time(execute_timings: &ExecuteTimings) -> (u64, u64) {
|
||||
@@ -1632,16 +1671,6 @@ impl BankingStage {
|
||||
.collect_vec()
|
||||
}
|
||||
|
||||
/// Read the transaction message from packet data
|
||||
fn packet_message(packet: &Packet) -> Option<&[u8]> {
|
||||
let (sig_len, sig_size) = decode_shortu16_len(&packet.data).ok()?;
|
||||
let msg_start = sig_len
|
||||
.checked_mul(size_of::<Signature>())
|
||||
.and_then(|v| v.checked_add(sig_size))?;
|
||||
let msg_end = packet.meta.size;
|
||||
Some(&packet.data[msg_start..msg_end])
|
||||
}
|
||||
|
||||
// This function deserializes packets into transactions, computes the blake3 hash of transaction
|
||||
// messages, and verifies secp256k1 instructions. A list of sanitized transactions are returned
|
||||
// with their packet indexes.
|
||||
@@ -1662,7 +1691,7 @@ impl BankingStage {
|
||||
}
|
||||
|
||||
let tx: VersionedTransaction = limited_deserialize(&p.data[0..p.meta.size]).ok()?;
|
||||
let message_bytes = Self::packet_message(p)?;
|
||||
let message_bytes = DeserializedPacketBatch::packet_message(p)?;
|
||||
let message_hash = Message::hash_raw_message(message_bytes);
|
||||
let tx = SanitizedTransaction::try_create(
|
||||
tx,
|
||||
@@ -1932,7 +1961,7 @@ impl BankingStage {
|
||||
batch_limit,
|
||||
banking_stage_stats,
|
||||
slot_metrics_tracker,
|
||||
);
|
||||
)
|
||||
}
|
||||
proc_start.stop();
|
||||
|
||||
@@ -1965,7 +1994,9 @@ impl BankingStage {
|
||||
banking_stage_stats.current_buffered_packets_count.swap(
|
||||
buffered_packet_batches
|
||||
.iter()
|
||||
.map(|packets| packets.1.len())
|
||||
.map(|deserialized_packet_batch| {
|
||||
deserialized_packet_batch.unprocessed_packets.len()
|
||||
})
|
||||
.sum(),
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
@@ -1988,9 +2019,9 @@ impl BankingStage {
|
||||
if unprocessed_packet_batches.len() >= batch_limit {
|
||||
*dropped_packet_batches_count += 1;
|
||||
if let Some(dropped_batch) = unprocessed_packet_batches.pop_front() {
|
||||
*dropped_packets_count += dropped_batch.1.len();
|
||||
*dropped_packets_count += dropped_batch.unprocessed_packets.len();
|
||||
slot_metrics_tracker.increment_exceeded_buffer_limit_dropped_packets_count(
|
||||
dropped_batch.1.len() as u64,
|
||||
dropped_batch.unprocessed_packets.len() as u64,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -2001,7 +2032,12 @@ impl BankingStage {
|
||||
*newly_buffered_packets_count += packet_indexes.len();
|
||||
slot_metrics_tracker
|
||||
.increment_newly_buffered_packets_count(packet_indexes.len() as u64);
|
||||
unprocessed_packet_batches.push_back((packet_batch, packet_indexes, false));
|
||||
|
||||
unprocessed_packet_batches.push_back(DeserializedPacketBatch::new(
|
||||
packet_batch,
|
||||
packet_indexes,
|
||||
false,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2956,7 +2992,14 @@ mod tests {
|
||||
let packet_batch = PacketBatch::new(
|
||||
(0..32)
|
||||
.map(|packet_id| {
|
||||
let mut p = Packet::default();
|
||||
// packets are deserialized upon receiving, failed packets will not be
|
||||
// forwarded; Therefore we need to create real packets here.
|
||||
let keypair = Keypair::new();
|
||||
let pubkey = solana_sdk::pubkey::new_rand();
|
||||
let blockhash = Hash::new_unique();
|
||||
let transaction =
|
||||
system_transaction::transfer(&keypair, &pubkey, 1, blockhash);
|
||||
let mut p = Packet::from_data(None, &transaction).unwrap();
|
||||
p.meta.port = packets_id << 8 | packet_id;
|
||||
p
|
||||
})
|
||||
@@ -2965,7 +3008,7 @@ mod tests {
|
||||
let valid_indexes = (0..32)
|
||||
.filter_map(|x| if x % 2 != 0 { Some(x as usize) } else { None })
|
||||
.collect_vec();
|
||||
(packet_batch, valid_indexes, false)
|
||||
DeserializedPacketBatch::new(packet_batch, valid_indexes, false)
|
||||
})
|
||||
.collect_vec();
|
||||
|
||||
@@ -2973,17 +3016,22 @@ mod tests {
|
||||
|
||||
assert_eq!(result.len(), 256);
|
||||
|
||||
let _ = result
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, p)| {
|
||||
let packets_id = index / 16;
|
||||
let packet_id = (index % 16) * 2 + 1;
|
||||
assert_eq!(p.meta.port, (packets_id << 8 | packet_id) as u16);
|
||||
// packets in a batch are forwarded in arbitrary order; verify the ports match after
|
||||
// sorting
|
||||
let expected_ports: Vec<_> = (0..16)
|
||||
.flat_map(|packets_id| {
|
||||
(0..16).map(move |packet_id| {
|
||||
let packet_id = packet_id * 2 + 1;
|
||||
(packets_id << 8 | packet_id) as u16
|
||||
})
|
||||
})
|
||||
.collect_vec();
|
||||
.collect();
|
||||
|
||||
packet_batches[0].2 = true;
|
||||
let mut forwarded_ports: Vec<_> = result.into_iter().map(|p| p.meta.port).collect();
|
||||
forwarded_ports.sort_unstable();
|
||||
assert_eq!(expected_ports, forwarded_ports);
|
||||
|
||||
packet_batches[0].forwarded = true;
|
||||
let result = BankingStage::filter_valid_packets_for_forwarding(packet_batches.iter());
|
||||
assert_eq!(result.len(), 240);
|
||||
}
|
||||
@@ -3128,10 +3176,6 @@ mod tests {
|
||||
..
|
||||
} = create_slow_genesis_config(lamports);
|
||||
let bank = Arc::new(Bank::new_no_wallclock_throttle_for_tests(&genesis_config));
|
||||
// set cost tracker limits to MAX so it will not filter out TXs
|
||||
bank.write_cost_tracker()
|
||||
.unwrap()
|
||||
.set_limits(std::u64::MAX, std::u64::MAX, std::u64::MAX);
|
||||
|
||||
// Transfer more than the balance of the mint keypair, should cause a
|
||||
// InstructionError::InsufficientFunds that is then committed. Needs to be
|
||||
@@ -3188,10 +3232,6 @@ mod tests {
|
||||
..
|
||||
} = create_slow_genesis_config(10_000);
|
||||
let bank = Arc::new(Bank::new_no_wallclock_throttle_for_tests(&genesis_config));
|
||||
// set cost tracker limits to MAX so it will not filter out TXs
|
||||
bank.write_cost_tracker()
|
||||
.unwrap()
|
||||
.set_limits(std::u64::MAX, std::u64::MAX, std::u64::MAX);
|
||||
|
||||
// Make all repetitive transactions that conflict on the `mint_keypair`, so only 1 should be executed
|
||||
let mut transactions = vec![
|
||||
@@ -3382,9 +3422,7 @@ mod tests {
|
||||
account_address: Pubkey,
|
||||
address_lookup_table: AddressLookupTable<'static>,
|
||||
) -> AccountSharedData {
|
||||
let mut data = Vec::new();
|
||||
address_lookup_table.serialize_for_tests(&mut data).unwrap();
|
||||
|
||||
let data = address_lookup_table.serialize_for_tests().unwrap();
|
||||
let mut account =
|
||||
AccountSharedData::new(1, data.len(), &solana_address_lookup_table_program::id());
|
||||
account.set_data(data);
|
||||
@@ -3588,13 +3626,14 @@ mod tests {
|
||||
num_conflicting_transactions
|
||||
);
|
||||
let packet_batch = packet_batches.pop().unwrap();
|
||||
let mut buffered_packet_batches: UnprocessedPacketBatches = vec![(
|
||||
packet_batch,
|
||||
(0..num_conflicting_transactions).into_iter().collect(),
|
||||
false,
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let mut buffered_packet_batches: UnprocessedPacketBatches =
|
||||
vec![DeserializedPacketBatch::new(
|
||||
packet_batch,
|
||||
(0..num_conflicting_transactions).into_iter().collect(),
|
||||
false,
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let (gossip_vote_sender, _gossip_vote_receiver) = unbounded();
|
||||
|
||||
@@ -3615,7 +3654,7 @@ mod tests {
|
||||
&mut LeaderSlotMetricsTracker::new(0),
|
||||
);
|
||||
assert_eq!(
|
||||
buffered_packet_batches[0].1.len(),
|
||||
buffered_packet_batches[0].unprocessed_packets.len(),
|
||||
num_conflicting_transactions
|
||||
);
|
||||
// When the poh recorder has a bank, should process all non conflicting buffered packets.
|
||||
@@ -3638,7 +3677,10 @@ mod tests {
|
||||
if num_expected_unprocessed == 0 {
|
||||
assert!(buffered_packet_batches.is_empty())
|
||||
} else {
|
||||
assert_eq!(buffered_packet_batches[0].1.len(), num_expected_unprocessed);
|
||||
assert_eq!(
|
||||
buffered_packet_batches[0].unprocessed_packets.len(),
|
||||
num_expected_unprocessed
|
||||
);
|
||||
}
|
||||
}
|
||||
poh_recorder
|
||||
@@ -3666,7 +3708,7 @@ mod tests {
|
||||
let mut buffered_packet_batches: UnprocessedPacketBatches = packet_batches
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|single_packets| (single_packets, vec![0], false))
|
||||
.map(|single_packets| DeserializedPacketBatch::new(single_packets, vec![0], false))
|
||||
.collect();
|
||||
|
||||
let (continue_sender, continue_receiver) = unbounded();
|
||||
@@ -3709,13 +3751,12 @@ mod tests {
|
||||
buffered_packet_batches.len(),
|
||||
packet_batches[interrupted_iteration + 1..].len()
|
||||
);
|
||||
for ((remaining_unprocessed_packet, _, _forwarded), original_packet) in
|
||||
buffered_packet_batches
|
||||
.iter()
|
||||
.zip(&packet_batches[interrupted_iteration + 1..])
|
||||
for (deserialized_packet_batch, original_packet) in buffered_packet_batches
|
||||
.iter()
|
||||
.zip(&packet_batches[interrupted_iteration + 1..])
|
||||
{
|
||||
assert_eq!(
|
||||
remaining_unprocessed_packet.packets[0],
|
||||
deserialized_packet_batch.packet_batch.packets[0],
|
||||
original_packet.packets[0]
|
||||
);
|
||||
}
|
||||
@@ -3748,7 +3789,13 @@ mod tests {
|
||||
fn test_forwarder_budget() {
|
||||
solana_logger::setup();
|
||||
// Create `PacketBatch` with 1 unprocessed packet
|
||||
let packet = Packet::from_data(None, &[0]).unwrap();
|
||||
let tx = system_transaction::transfer(
|
||||
&Keypair::new(),
|
||||
&solana_sdk::pubkey::new_rand(),
|
||||
1,
|
||||
Hash::new_unique(),
|
||||
);
|
||||
let packet = Packet::from_data(None, &tx).unwrap();
|
||||
let single_packet_batch = PacketBatch::new(vec![packet]);
|
||||
|
||||
let genesis_config_info = create_slow_genesis_config(10_000);
|
||||
@@ -3787,9 +3834,13 @@ mod tests {
|
||||
|
||||
for (name, data_budget, expected_num_forwarded) in test_cases {
|
||||
let mut unprocessed_packet_batches: UnprocessedPacketBatches =
|
||||
vec![(single_packet_batch.clone(), vec![0], false)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
vec![DeserializedPacketBatch::new(
|
||||
single_packet_batch.clone(),
|
||||
vec![0],
|
||||
false,
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
BankingStage::handle_forwarding(
|
||||
&ForwardOption::ForwardTransaction,
|
||||
&cluster_info,
|
||||
@@ -3819,22 +3870,34 @@ mod tests {
|
||||
#[test]
|
||||
fn test_handle_forwarding() {
|
||||
solana_logger::setup();
|
||||
// packets are deserialized upon receiving, failed packets will not be
|
||||
// forwarded; Therefore need to create real packets here.
|
||||
let keypair = Keypair::new();
|
||||
let pubkey = solana_sdk::pubkey::new_rand();
|
||||
|
||||
const FWD_PACKET: u8 = 1;
|
||||
let fwd_block_hash = Hash::new_unique();
|
||||
let forwarded_packet = {
|
||||
let mut packet = Packet::from_data(None, &[FWD_PACKET]).unwrap();
|
||||
let transaction = system_transaction::transfer(&keypair, &pubkey, 1, fwd_block_hash);
|
||||
let mut packet = Packet::from_data(None, &transaction).unwrap();
|
||||
packet.meta.flags |= PacketFlags::FORWARDED;
|
||||
packet
|
||||
};
|
||||
|
||||
const NORMAL_PACKET: u8 = 2;
|
||||
let normal_packet = Packet::from_data(None, &[NORMAL_PACKET]).unwrap();
|
||||
let normal_block_hash = Hash::new_unique();
|
||||
let normal_packet = {
|
||||
let transaction = system_transaction::transfer(&keypair, &pubkey, 1, normal_block_hash);
|
||||
Packet::from_data(None, &transaction).unwrap()
|
||||
};
|
||||
|
||||
let packet_batch = PacketBatch::new(vec![forwarded_packet, normal_packet]);
|
||||
let mut unprocessed_packet_batches: UnprocessedPacketBatches =
|
||||
vec![(packet_batch, vec![0, 1], false)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
vec![DeserializedPacketBatch::new(
|
||||
packet_batch,
|
||||
vec![0, 1],
|
||||
false,
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let genesis_config_info = create_slow_genesis_config(10_000);
|
||||
let GenesisConfigInfo {
|
||||
@@ -3870,7 +3933,7 @@ mod tests {
|
||||
"fwd-normal",
|
||||
ForwardOption::ForwardTransaction,
|
||||
true,
|
||||
vec![NORMAL_PACKET],
|
||||
vec![normal_block_hash],
|
||||
2,
|
||||
),
|
||||
(
|
||||
@@ -3909,13 +3972,20 @@ mod tests {
|
||||
let num_received = recv_mmsg(recv_socket, &mut packets[..]).unwrap_or_default();
|
||||
assert_eq!(num_received, expected_ids.len(), "{}", name);
|
||||
for (i, expected_id) in expected_ids.iter().enumerate() {
|
||||
assert_eq!(packets[i].meta.size, 1);
|
||||
assert_eq!(packets[i].data[0], *expected_id, "{}", name);
|
||||
assert_eq!(packets[i].meta.size, 215);
|
||||
let recv_transaction: VersionedTransaction =
|
||||
limited_deserialize(&packets[i].data[0..packets[i].meta.size]).unwrap();
|
||||
assert_eq!(
|
||||
recv_transaction.message.recent_blockhash(),
|
||||
expected_id,
|
||||
"{}",
|
||||
name
|
||||
);
|
||||
}
|
||||
|
||||
let num_unprocessed_packets: usize = unprocessed_packet_batches
|
||||
.iter()
|
||||
.map(|(b, ..)| b.packets.len())
|
||||
.map(|b| b.packet_batch.packets.len())
|
||||
.sum();
|
||||
assert_eq!(
|
||||
num_unprocessed_packets, expected_num_unprocessed,
|
||||
@@ -3934,11 +4004,24 @@ mod tests {
|
||||
fn test_push_unprocessed_batch_limit() {
|
||||
solana_logger::setup();
|
||||
// Create `PacketBatch` with 2 unprocessed packets
|
||||
let new_packet_batch = PacketBatch::new(vec![Packet::default(); 2]);
|
||||
let mut unprocessed_packets: UnprocessedPacketBatches =
|
||||
vec![(new_packet_batch, vec![0, 1], false)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
let tx = system_transaction::transfer(
|
||||
&Keypair::new(),
|
||||
&solana_sdk::pubkey::new_rand(),
|
||||
1,
|
||||
Hash::new_unique(),
|
||||
);
|
||||
let packet = Packet::from_data(None, &tx).unwrap();
|
||||
let new_packet_batch = PacketBatch::new(vec![packet; 2]);
|
||||
let mut unprocessed_packets: UnprocessedPacketBatches = vec![DeserializedPacketBatch::new(
|
||||
new_packet_batch,
|
||||
vec![0, 1],
|
||||
false,
|
||||
)]
|
||||
.into_iter()
|
||||
.collect();
|
||||
assert_eq!(unprocessed_packets.len(), 1);
|
||||
assert_eq!(unprocessed_packets[0].unprocessed_packets.len(), 2);
|
||||
|
||||
// Set the limit to 2
|
||||
let batch_limit = 2;
|
||||
// Create new unprocessed packets and add to a batch
|
||||
@@ -4007,7 +4090,7 @@ mod tests {
|
||||
);
|
||||
assert_eq!(unprocessed_packets.len(), 2);
|
||||
assert_eq!(
|
||||
unprocessed_packets[1].0.packets[0],
|
||||
unprocessed_packets[1].packet_batch.packets[0],
|
||||
new_packet_batch.packets[0]
|
||||
);
|
||||
assert_eq!(dropped_packet_batches_count, 1);
|
||||
@@ -4015,19 +4098,6 @@ mod tests {
|
||||
assert_eq!(newly_buffered_packets_count, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_packet_message() {
|
||||
let keypair = Keypair::new();
|
||||
let pubkey = solana_sdk::pubkey::new_rand();
|
||||
let blockhash = Hash::new_unique();
|
||||
let transaction = system_transaction::transfer(&keypair, &pubkey, 1, blockhash);
|
||||
let packet = Packet::from_data(None, &transaction).unwrap();
|
||||
assert_eq!(
|
||||
BankingStage::packet_message(&packet).unwrap().to_vec(),
|
||||
transaction.message_data()
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn make_test_packets(
|
||||
transactions: Vec<Transaction>,
|
||||
@@ -4190,12 +4260,24 @@ mod tests {
|
||||
let expected_write_locks = 7;
|
||||
let expected_data_bytes = 9;
|
||||
let expected_executions = 30;
|
||||
let cost_details =
|
||||
let batched_transaction_details =
|
||||
BankingStage::accumulate_batched_transaction_costs(tx_costs.iter(), tx_results.iter());
|
||||
assert_eq!(expected_signatures, cost_details.batched_signature_cost);
|
||||
assert_eq!(expected_write_locks, cost_details.batched_write_lock_cost);
|
||||
assert_eq!(expected_data_bytes, cost_details.batched_data_bytes_cost);
|
||||
assert_eq!(expected_executions, cost_details.batched_execute_cost);
|
||||
assert_eq!(
|
||||
expected_signatures,
|
||||
batched_transaction_details.costs.batched_signature_cost
|
||||
);
|
||||
assert_eq!(
|
||||
expected_write_locks,
|
||||
batched_transaction_details.costs.batched_write_lock_cost
|
||||
);
|
||||
assert_eq!(
|
||||
expected_data_bytes,
|
||||
batched_transaction_details.costs.batched_data_bytes_cost
|
||||
);
|
||||
assert_eq!(
|
||||
expected_executions,
|
||||
batched_transaction_details.costs.batched_execute_cost
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@@ -2,12 +2,14 @@ use {
|
||||
crate::{broadcast_stage::BroadcastStage, retransmit_stage::RetransmitStage},
|
||||
itertools::Itertools,
|
||||
lru::LruCache,
|
||||
rand::SeedableRng,
|
||||
rand::{seq::SliceRandom, Rng, SeedableRng},
|
||||
rand_chacha::ChaChaRng,
|
||||
solana_gossip::{
|
||||
cluster_info::{compute_retransmit_peers, ClusterInfo},
|
||||
contact_info::ContactInfo,
|
||||
crds::GossipRoute,
|
||||
crds_gossip_pull::CRDS_GOSSIP_PULL_CRDS_TIMEOUT_MS,
|
||||
crds_value::{CrdsData, CrdsValue},
|
||||
weighted_shuffle::{weighted_best, weighted_shuffle, WeightedShuffle},
|
||||
},
|
||||
solana_ledger::shred::Shred,
|
||||
@@ -16,6 +18,7 @@ use {
|
||||
clock::{Epoch, Slot},
|
||||
feature_set,
|
||||
pubkey::Pubkey,
|
||||
signature::Keypair,
|
||||
timing::timestamp,
|
||||
},
|
||||
solana_streamer::socket::SocketAddrSpace,
|
||||
@@ -23,6 +26,7 @@ use {
|
||||
any::TypeId,
|
||||
cmp::Reverse,
|
||||
collections::HashMap,
|
||||
iter::repeat_with,
|
||||
marker::PhantomData,
|
||||
net::SocketAddr,
|
||||
ops::Deref,
|
||||
@@ -39,7 +43,7 @@ enum NodeId {
|
||||
Pubkey(Pubkey),
|
||||
}
|
||||
|
||||
struct Node {
|
||||
pub struct Node {
|
||||
node: NodeId,
|
||||
stake: u64,
|
||||
}
|
||||
@@ -233,6 +237,18 @@ impl ClusterNodes<RetransmitStage> {
|
||||
if !enable_turbine_peers_shuffle_patch(shred.slot(), root_bank) {
|
||||
return self.get_retransmit_peers_compat(shred_seed, fanout, slot_leader);
|
||||
}
|
||||
self.get_retransmit_peers_deterministic(shred_seed, fanout, slot_leader)
|
||||
}
|
||||
|
||||
pub fn get_retransmit_peers_deterministic(
|
||||
&self,
|
||||
shred_seed: [u8; 32],
|
||||
fanout: usize,
|
||||
slot_leader: Pubkey,
|
||||
) -> (
|
||||
Vec<&Node>, // neighbors
|
||||
Vec<&Node>, // children
|
||||
) {
|
||||
let mut weighted_shuffle = self.weighted_shuffle.clone();
|
||||
// Exclude slot leader from list of nodes.
|
||||
if slot_leader == self.pubkey {
|
||||
@@ -256,7 +272,7 @@ impl ClusterNodes<RetransmitStage> {
|
||||
(neighbors, children)
|
||||
}
|
||||
|
||||
fn get_retransmit_peers_compat(
|
||||
pub fn get_retransmit_peers_compat(
|
||||
&self,
|
||||
shred_seed: [u8; 32],
|
||||
fanout: usize,
|
||||
@@ -297,7 +313,7 @@ impl ClusterNodes<RetransmitStage> {
|
||||
}
|
||||
}
|
||||
|
||||
fn new_cluster_nodes<T: 'static>(
|
||||
pub fn new_cluster_nodes<T: 'static>(
|
||||
cluster_info: &ClusterInfo,
|
||||
stakes: &HashMap<Pubkey, u64>,
|
||||
) -> ClusterNodes<T> {
|
||||
@@ -462,22 +478,61 @@ impl From<Pubkey> for NodeId {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn make_test_cluster<R: Rng>(
|
||||
rng: &mut R,
|
||||
num_nodes: usize,
|
||||
unstaked_ratio: Option<(u32, u32)>,
|
||||
) -> (
|
||||
Vec<ContactInfo>,
|
||||
HashMap<Pubkey, u64>, // stakes
|
||||
ClusterInfo,
|
||||
) {
|
||||
let (unstaked_numerator, unstaked_denominator) = unstaked_ratio.unwrap_or((1, 7));
|
||||
let mut nodes: Vec<_> = repeat_with(|| ContactInfo::new_rand(rng, None))
|
||||
.take(num_nodes)
|
||||
.collect();
|
||||
nodes.shuffle(rng);
|
||||
let this_node = nodes[0].clone();
|
||||
let mut stakes: HashMap<Pubkey, u64> = nodes
|
||||
.iter()
|
||||
.filter_map(|node| {
|
||||
if rng.gen_ratio(unstaked_numerator, unstaked_denominator) {
|
||||
None // No stake for some of the nodes.
|
||||
} else {
|
||||
Some((node.id, rng.gen_range(0, 20)))
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
// Add some staked nodes with no contact-info.
|
||||
stakes.extend(repeat_with(|| (Pubkey::new_unique(), rng.gen_range(0, 20))).take(100));
|
||||
let cluster_info = ClusterInfo::new(
|
||||
this_node,
|
||||
Arc::new(Keypair::new()),
|
||||
SocketAddrSpace::Unspecified,
|
||||
);
|
||||
{
|
||||
let now = timestamp();
|
||||
let mut gossip_crds = cluster_info.gossip.crds.write().unwrap();
|
||||
// First node is pushed to crds table by ClusterInfo constructor.
|
||||
for node in nodes.iter().skip(1) {
|
||||
let node = CrdsData::ContactInfo(node.clone());
|
||||
let node = CrdsValue::new_unsigned(node);
|
||||
assert_eq!(
|
||||
gossip_crds.insert(node, now, GossipRoute::LocalMessage),
|
||||
Ok(())
|
||||
);
|
||||
}
|
||||
}
|
||||
(nodes, stakes, cluster_info)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use {
|
||||
super::*,
|
||||
rand::{seq::SliceRandom, Rng},
|
||||
solana_gossip::{
|
||||
crds::GossipRoute,
|
||||
crds_value::{CrdsData, CrdsValue},
|
||||
deprecated::{
|
||||
shuffle_peers_and_index, sorted_retransmit_peers_and_stakes,
|
||||
sorted_stakes_with_index,
|
||||
},
|
||||
solana_gossip::deprecated::{
|
||||
shuffle_peers_and_index, sorted_retransmit_peers_and_stakes, sorted_stakes_with_index,
|
||||
},
|
||||
solana_sdk::{signature::Keypair, timing::timestamp},
|
||||
solana_streamer::socket::SocketAddrSpace,
|
||||
std::{iter::repeat_with, sync::Arc},
|
||||
};
|
||||
|
||||
// Legacy methods copied for testing backward compatibility.
|
||||
@@ -499,55 +554,10 @@ mod tests {
|
||||
sorted_stakes_with_index(peers, stakes)
|
||||
}
|
||||
|
||||
fn make_cluster<R: Rng>(
|
||||
rng: &mut R,
|
||||
) -> (
|
||||
Vec<ContactInfo>,
|
||||
HashMap<Pubkey, u64>, // stakes
|
||||
ClusterInfo,
|
||||
) {
|
||||
let mut nodes: Vec<_> = repeat_with(|| ContactInfo::new_rand(rng, None))
|
||||
.take(1000)
|
||||
.collect();
|
||||
nodes.shuffle(rng);
|
||||
let this_node = nodes[0].clone();
|
||||
let mut stakes: HashMap<Pubkey, u64> = nodes
|
||||
.iter()
|
||||
.filter_map(|node| {
|
||||
if rng.gen_ratio(1, 7) {
|
||||
None // No stake for some of the nodes.
|
||||
} else {
|
||||
Some((node.id, rng.gen_range(0, 20)))
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
// Add some staked nodes with no contact-info.
|
||||
stakes.extend(repeat_with(|| (Pubkey::new_unique(), rng.gen_range(0, 20))).take(100));
|
||||
let cluster_info = ClusterInfo::new(
|
||||
this_node,
|
||||
Arc::new(Keypair::new()),
|
||||
SocketAddrSpace::Unspecified,
|
||||
);
|
||||
{
|
||||
let now = timestamp();
|
||||
let mut gossip_crds = cluster_info.gossip.crds.write().unwrap();
|
||||
// First node is pushed to crds table by ClusterInfo constructor.
|
||||
for node in nodes.iter().skip(1) {
|
||||
let node = CrdsData::ContactInfo(node.clone());
|
||||
let node = CrdsValue::new_unsigned(node);
|
||||
assert_eq!(
|
||||
gossip_crds.insert(node, now, GossipRoute::LocalMessage),
|
||||
Ok(())
|
||||
);
|
||||
}
|
||||
}
|
||||
(nodes, stakes, cluster_info)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cluster_nodes_retransmit() {
|
||||
let mut rng = rand::thread_rng();
|
||||
let (nodes, stakes, cluster_info) = make_cluster(&mut rng);
|
||||
let (nodes, stakes, cluster_info) = make_test_cluster(&mut rng, 1_000, None);
|
||||
let this_node = cluster_info.my_contact_info();
|
||||
// ClusterInfo::tvu_peers excludes the node itself.
|
||||
assert_eq!(cluster_info.tvu_peers().len(), nodes.len() - 1);
|
||||
@@ -628,7 +638,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_cluster_nodes_broadcast() {
|
||||
let mut rng = rand::thread_rng();
|
||||
let (nodes, stakes, cluster_info) = make_cluster(&mut rng);
|
||||
let (nodes, stakes, cluster_info) = make_test_cluster(&mut rng, 1_000, None);
|
||||
// ClusterInfo::tvu_peers excludes the node itself.
|
||||
assert_eq!(cluster_info.tvu_peers().len(), nodes.len() - 1);
|
||||
let cluster_nodes = ClusterNodes::<BroadcastStage>::new(&cluster_info, &stakes);
|
||||
|
@@ -11,44 +11,33 @@ use {
|
||||
solana_runtime::{bank::Bank, cost_model::CostModel},
|
||||
solana_sdk::timing::timestamp,
|
||||
std::{
|
||||
sync::{Arc, RwLock},
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Arc, RwLock,
|
||||
},
|
||||
thread::{self, Builder, JoinHandle},
|
||||
time::Duration,
|
||||
},
|
||||
};
|
||||
|
||||
// Update blockstore persistence storage when accumulated cost_table updates count exceeds the threshold
|
||||
const PERSIST_THRESHOLD: u64 = 1_000;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct CostUpdateServiceTiming {
|
||||
last_print: u64,
|
||||
update_cost_model_count: u64,
|
||||
update_cost_model_elapsed: u64,
|
||||
persist_cost_table_elapsed: u64,
|
||||
}
|
||||
|
||||
impl CostUpdateServiceTiming {
|
||||
fn update(
|
||||
&mut self,
|
||||
update_cost_model_count: Option<u64>,
|
||||
update_cost_model_elapsed: Option<u64>,
|
||||
persist_cost_table_elapsed: Option<u64>,
|
||||
) {
|
||||
if let Some(update_cost_model_count) = update_cost_model_count {
|
||||
self.update_cost_model_count += update_cost_model_count;
|
||||
}
|
||||
if let Some(update_cost_model_elapsed) = update_cost_model_elapsed {
|
||||
self.update_cost_model_elapsed += update_cost_model_elapsed;
|
||||
}
|
||||
if let Some(persist_cost_table_elapsed) = persist_cost_table_elapsed {
|
||||
self.persist_cost_table_elapsed += persist_cost_table_elapsed;
|
||||
}
|
||||
fn update(&mut self, update_cost_model_count: u64, update_cost_model_elapsed: u64) {
|
||||
self.update_cost_model_count += update_cost_model_count;
|
||||
self.update_cost_model_elapsed += update_cost_model_elapsed;
|
||||
|
||||
let now = timestamp();
|
||||
let elapsed_ms = now - self.last_print;
|
||||
if elapsed_ms > 1000 {
|
||||
datapoint_info!(
|
||||
"cost-update-service-stats",
|
||||
("total_elapsed_us", elapsed_ms * 1000, i64),
|
||||
(
|
||||
"update_cost_model_count",
|
||||
self.update_cost_model_count as i64,
|
||||
@@ -59,11 +48,6 @@ impl CostUpdateServiceTiming {
|
||||
self.update_cost_model_elapsed as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"persist_cost_table_elapsed",
|
||||
self.persist_cost_table_elapsed as i64,
|
||||
i64
|
||||
),
|
||||
);
|
||||
|
||||
*self = CostUpdateServiceTiming::default();
|
||||
@@ -90,6 +74,7 @@ pub struct CostUpdateService {
|
||||
impl CostUpdateService {
|
||||
#[allow(clippy::new_ret_no_self)]
|
||||
pub fn new(
|
||||
exit: Arc<AtomicBool>,
|
||||
blockstore: Arc<Blockstore>,
|
||||
cost_model: Arc<RwLock<CostModel>>,
|
||||
cost_update_receiver: CostUpdateReceiver,
|
||||
@@ -97,7 +82,7 @@ impl CostUpdateService {
|
||||
let thread_hdl = Builder::new()
|
||||
.name("solana-cost-update-service".to_string())
|
||||
.spawn(move || {
|
||||
Self::service_loop(blockstore, cost_model, cost_update_receiver);
|
||||
Self::service_loop(exit, blockstore, cost_model, cost_update_receiver);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@@ -109,99 +94,85 @@ impl CostUpdateService {
|
||||
}
|
||||
|
||||
fn service_loop(
|
||||
blockstore: Arc<Blockstore>,
|
||||
exit: Arc<AtomicBool>,
|
||||
_blockstore: Arc<Blockstore>,
|
||||
cost_model: Arc<RwLock<CostModel>>,
|
||||
cost_update_receiver: CostUpdateReceiver,
|
||||
) {
|
||||
let mut cost_update_service_timing = CostUpdateServiceTiming::default();
|
||||
let mut update_count = 0_u64;
|
||||
let mut update_count: u64;
|
||||
let wait_timer = Duration::from_millis(100);
|
||||
|
||||
for cost_update in cost_update_receiver.iter() {
|
||||
match cost_update {
|
||||
CostUpdate::FrozenBank { bank } => {
|
||||
bank.read_cost_tracker().unwrap().report_stats(bank.slot());
|
||||
loop {
|
||||
if exit.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
|
||||
update_count = 0_u64;
|
||||
let mut update_cost_model_time = Measure::start("update_cost_model_time");
|
||||
for cost_update in cost_update_receiver.try_iter() {
|
||||
match cost_update {
|
||||
CostUpdate::FrozenBank { bank } => {
|
||||
bank.read_cost_tracker().unwrap().report_stats(bank.slot());
|
||||
}
|
||||
CostUpdate::ExecuteTiming {
|
||||
mut execute_timings,
|
||||
} => {
|
||||
Self::update_cost_model(&cost_model, &mut execute_timings);
|
||||
update_count += 1;
|
||||
}
|
||||
}
|
||||
CostUpdate::ExecuteTiming {
|
||||
mut execute_timings,
|
||||
} => {
|
||||
let mut update_cost_model_time = Measure::start("update_cost_model_time");
|
||||
update_count += Self::update_cost_model(&cost_model, &mut execute_timings);
|
||||
update_cost_model_time.stop();
|
||||
cost_update_service_timing.update(
|
||||
Some(update_count),
|
||||
Some(update_cost_model_time.as_us()),
|
||||
None,
|
||||
);
|
||||
}
|
||||
update_cost_model_time.stop();
|
||||
|
||||
if update_count > PERSIST_THRESHOLD {
|
||||
let mut persist_cost_table_time = Measure::start("persist_cost_table_time");
|
||||
Self::persist_cost_table(&blockstore, &cost_model);
|
||||
update_count = 0_u64;
|
||||
persist_cost_table_time.stop();
|
||||
cost_update_service_timing.update(
|
||||
None,
|
||||
None,
|
||||
Some(persist_cost_table_time.as_us()),
|
||||
cost_update_service_timing.update(update_count, update_cost_model_time.as_us());
|
||||
|
||||
thread::sleep(wait_timer);
|
||||
}
|
||||
}
|
||||
|
||||
fn update_cost_model(
|
||||
cost_model: &RwLock<CostModel>,
|
||||
execute_timings: &mut ExecuteTimings,
|
||||
) -> bool {
|
||||
let mut dirty = false;
|
||||
{
|
||||
for (program_id, program_timings) in &mut execute_timings.details.per_program_timings {
|
||||
let current_estimated_program_cost =
|
||||
cost_model.read().unwrap().find_instruction_cost(program_id);
|
||||
program_timings.coalesce_error_timings(current_estimated_program_cost);
|
||||
|
||||
if program_timings.count < 1 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let units = program_timings.accumulated_units / program_timings.count as u64;
|
||||
match cost_model
|
||||
.write()
|
||||
.unwrap()
|
||||
.upsert_instruction_cost(program_id, units)
|
||||
{
|
||||
Ok(c) => {
|
||||
debug!(
|
||||
"after replayed into bank, instruction {:?} has averaged cost {}",
|
||||
program_id, c
|
||||
);
|
||||
dirty = true;
|
||||
}
|
||||
Err(err) => {
|
||||
debug!(
|
||||
"after replayed into bank, instruction {:?} failed to update cost, err: {}",
|
||||
program_id, err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize `program_timings` with current estimated cost, update instruction_cost table
|
||||
// Returns number of updates applied
|
||||
fn update_cost_model(
|
||||
cost_model: &RwLock<CostModel>,
|
||||
execute_timings: &mut ExecuteTimings,
|
||||
) -> u64 {
|
||||
let mut update_count = 0_u64;
|
||||
for (program_id, program_timings) in &mut execute_timings.details.per_program_timings {
|
||||
let current_estimated_program_cost =
|
||||
cost_model.read().unwrap().find_instruction_cost(program_id);
|
||||
program_timings.coalesce_error_timings(current_estimated_program_cost);
|
||||
|
||||
if program_timings.count < 1 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let units = program_timings.accumulated_units / program_timings.count as u64;
|
||||
cost_model
|
||||
.write()
|
||||
.unwrap()
|
||||
.upsert_instruction_cost(program_id, units);
|
||||
update_count += 1;
|
||||
debug!(
|
||||
"After replayed into bank, updated cost for instruction {:?}, update_value {}, pre_aggregated_value {}",
|
||||
program_id, units, current_estimated_program_cost
|
||||
);
|
||||
}
|
||||
update_count
|
||||
}
|
||||
|
||||
// 1. Remove obsolete program entries from persisted table to limit its size
|
||||
// 2. Update persisted program cost. This involves EMA cost calculation at
|
||||
// execute_cost_table.get_cost()
|
||||
fn persist_cost_table(blockstore: &Blockstore, cost_model: &RwLock<CostModel>) {
|
||||
let db_records = blockstore.read_program_costs().expect("read programs");
|
||||
let cost_model = cost_model.read().unwrap();
|
||||
let active_program_keys = cost_model.get_program_keys();
|
||||
|
||||
// delete records from blockstore if they are no longer in cost_table
|
||||
db_records.iter().for_each(|(pubkey, _)| {
|
||||
if !active_program_keys.contains(&pubkey) {
|
||||
blockstore
|
||||
.delete_program_cost(pubkey)
|
||||
.expect("delete old program");
|
||||
}
|
||||
});
|
||||
|
||||
active_program_keys.iter().for_each(|program_id| {
|
||||
let cost = cost_model.find_instruction_cost(program_id);
|
||||
blockstore
|
||||
.write_program_cost(program_id, &cost)
|
||||
.expect("persist program costs to blockstore");
|
||||
});
|
||||
debug!(
|
||||
"after replayed into bank, updated cost model instruction cost table, current values: {:?}",
|
||||
cost_model.read().unwrap().get_instruction_cost_table()
|
||||
);
|
||||
dirty
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,9 +184,15 @@ mod tests {
|
||||
fn test_update_cost_model_with_empty_execute_timings() {
|
||||
let cost_model = Arc::new(RwLock::new(CostModel::default()));
|
||||
let mut empty_execute_timings = ExecuteTimings::default();
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut empty_execute_timings);
|
||||
|
||||
assert_eq!(
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut empty_execute_timings),
|
||||
0
|
||||
0,
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.get_instruction_cost_table()
|
||||
.len()
|
||||
);
|
||||
}
|
||||
|
||||
@@ -233,7 +210,7 @@ mod tests {
|
||||
let accumulated_units: u64 = 100;
|
||||
let total_errored_units = 0;
|
||||
let count: u32 = 10;
|
||||
expected_cost = accumulated_units / count as u64; // = 10
|
||||
expected_cost = accumulated_units / count as u64;
|
||||
|
||||
execute_timings.details.per_program_timings.insert(
|
||||
program_key_1,
|
||||
@@ -245,15 +222,22 @@ mod tests {
|
||||
total_errored_units,
|
||||
},
|
||||
);
|
||||
let update_count =
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
assert_eq!(1, update_count);
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
assert_eq!(
|
||||
expected_cost,
|
||||
1,
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.find_instruction_cost(&program_key_1)
|
||||
.get_instruction_cost_table()
|
||||
.len()
|
||||
);
|
||||
assert_eq!(
|
||||
Some(&expected_cost),
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.get_instruction_cost_table()
|
||||
.get(&program_key_1)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -262,8 +246,8 @@ mod tests {
|
||||
let accumulated_us: u64 = 2000;
|
||||
let accumulated_units: u64 = 200;
|
||||
let count: u32 = 10;
|
||||
// to expect new cost = (mean + 2 * std) of [10, 20]
|
||||
expected_cost = 13;
|
||||
// to expect new cost is Average(new_value, existing_value)
|
||||
expected_cost = ((accumulated_units / count as u64) + expected_cost) / 2;
|
||||
|
||||
execute_timings.details.per_program_timings.insert(
|
||||
program_key_1,
|
||||
@@ -275,15 +259,22 @@ mod tests {
|
||||
total_errored_units: 0,
|
||||
},
|
||||
);
|
||||
let update_count =
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
assert_eq!(1, update_count);
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
assert_eq!(
|
||||
expected_cost,
|
||||
1,
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.find_instruction_cost(&program_key_1)
|
||||
.get_instruction_cost_table()
|
||||
.len()
|
||||
);
|
||||
assert_eq!(
|
||||
Some(&expected_cost),
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.get_instruction_cost_table()
|
||||
.get(&program_key_1)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -307,49 +298,20 @@ mod tests {
|
||||
total_errored_units: 0,
|
||||
},
|
||||
);
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
// If both the `errored_txs_compute_consumed` is empty and `count == 0`, then
|
||||
// nothing should be inserted into the cost model
|
||||
assert_eq!(
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings),
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
// set up current instruction cost to 100
|
||||
let current_program_cost = 100;
|
||||
{
|
||||
execute_timings.details.per_program_timings.insert(
|
||||
program_key_1,
|
||||
ProgramTiming {
|
||||
accumulated_us: 1000,
|
||||
accumulated_units: current_program_cost,
|
||||
count: 1,
|
||||
errored_txs_compute_consumed: vec![],
|
||||
total_errored_units: 0,
|
||||
},
|
||||
);
|
||||
let update_count =
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
assert_eq!(1, update_count);
|
||||
assert_eq!(
|
||||
current_program_cost,
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.find_instruction_cost(&program_key_1)
|
||||
);
|
||||
assert!(cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.get_instruction_cost_table()
|
||||
.is_empty());
|
||||
}
|
||||
|
||||
// Test updating cost model with only erroring compute costs where the `cost_per_error` is
|
||||
// greater than the current instruction cost for the program. Should update with the
|
||||
// new erroring compute costs
|
||||
let cost_per_error = 1000;
|
||||
// expected_cost = (mean + 2*std) of data points:
|
||||
// [
|
||||
// 100, // original program_cost
|
||||
// 1000, // cost_per_error
|
||||
// ]
|
||||
let expected_cost = 289u64;
|
||||
{
|
||||
let errored_txs_compute_consumed = vec![cost_per_error; 3];
|
||||
let total_errored_units = errored_txs_compute_consumed.iter().sum();
|
||||
@@ -363,23 +325,29 @@ mod tests {
|
||||
total_errored_units,
|
||||
},
|
||||
);
|
||||
let update_count =
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
|
||||
assert_eq!(1, update_count);
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
assert_eq!(
|
||||
expected_cost,
|
||||
1,
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.find_instruction_cost(&program_key_1)
|
||||
.get_instruction_cost_table()
|
||||
.len()
|
||||
);
|
||||
assert_eq!(
|
||||
Some(&cost_per_error),
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.get_instruction_cost_table()
|
||||
.get(&program_key_1)
|
||||
);
|
||||
}
|
||||
|
||||
// Test updating cost model with only erroring compute costs where the error cost is
|
||||
// `smaller_cost_per_error`, less than the current instruction cost for the program.
|
||||
// The cost should not decrease for these new lesser errors
|
||||
let smaller_cost_per_error = expected_cost - 10;
|
||||
let smaller_cost_per_error = cost_per_error - 10;
|
||||
{
|
||||
let errored_txs_compute_consumed = vec![smaller_cost_per_error; 3];
|
||||
let total_errored_units = errored_txs_compute_consumed.iter().sum();
|
||||
@@ -393,23 +361,22 @@ mod tests {
|
||||
total_errored_units,
|
||||
},
|
||||
);
|
||||
let update_count =
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
|
||||
// expected_cost = (mean = 2*std) of data points:
|
||||
// [
|
||||
// 100, // original program cost,
|
||||
// 1000, // cost_per_error from above test
|
||||
// 289, // the smaller_cost_per_error will be coalesced to prev cost
|
||||
// ]
|
||||
let expected_cost = 293u64;
|
||||
assert_eq!(1, update_count);
|
||||
CostUpdateService::update_cost_model(&cost_model, &mut execute_timings);
|
||||
assert_eq!(
|
||||
expected_cost,
|
||||
1,
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.find_instruction_cost(&program_key_1)
|
||||
.get_instruction_cost_table()
|
||||
.len()
|
||||
);
|
||||
assert_eq!(
|
||||
Some(&cost_per_error),
|
||||
cost_model
|
||||
.read()
|
||||
.unwrap()
|
||||
.get_instruction_cost_table()
|
||||
.get(&program_key_1)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
44
core/src/ledger_metric_report_service.rs
Normal file
44
core/src/ledger_metric_report_service.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
//! The `ledger_metric_report_service` periodically reports ledger store metrics.
|
||||
|
||||
use {
|
||||
solana_ledger::blockstore::Blockstore,
|
||||
std::{
|
||||
string::ToString,
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Arc,
|
||||
},
|
||||
thread::{self, Builder, JoinHandle},
|
||||
time::Duration,
|
||||
},
|
||||
};
|
||||
|
||||
// Determines how often we report blockstore metrics.
|
||||
const BLOCKSTORE_METRICS_REPORT_PERIOD_MILLIS: u64 = 10000;
|
||||
|
||||
pub struct LedgerMetricReportService {
|
||||
t_cf_metric: JoinHandle<()>,
|
||||
}
|
||||
|
||||
impl LedgerMetricReportService {
|
||||
pub fn new(blockstore: Arc<Blockstore>, exit: &Arc<AtomicBool>) -> Self {
|
||||
let exit_signal = exit.clone();
|
||||
let t_cf_metric = Builder::new()
|
||||
.name("metric_report_rocksdb_cf_metrics".to_string())
|
||||
.spawn(move || loop {
|
||||
if exit_signal.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
thread::sleep(Duration::from_millis(
|
||||
BLOCKSTORE_METRICS_REPORT_PERIOD_MILLIS,
|
||||
));
|
||||
blockstore.submit_rocksdb_cf_metrics_for_all_cfs();
|
||||
})
|
||||
.unwrap();
|
||||
Self { t_cf_metric }
|
||||
}
|
||||
|
||||
pub fn join(self) -> thread::Result<()> {
|
||||
self.t_cf_metric.join()
|
||||
}
|
||||
}
|
@@ -31,9 +31,11 @@ pub mod latest_validator_votes_for_frozen_banks;
|
||||
pub mod leader_slot_banking_stage_metrics;
|
||||
pub mod leader_slot_banking_stage_timing_metrics;
|
||||
pub mod ledger_cleanup_service;
|
||||
pub mod ledger_metric_report_service;
|
||||
pub mod optimistic_confirmation_verifier;
|
||||
pub mod outstanding_requests;
|
||||
pub mod packet_hasher;
|
||||
pub mod packet_threshold;
|
||||
pub mod progress_map;
|
||||
pub mod qos_service;
|
||||
pub mod repair_generic_traversal;
|
||||
@@ -62,6 +64,7 @@ pub mod tpu;
|
||||
pub mod tree_diff;
|
||||
pub mod tvu;
|
||||
pub mod unfrozen_gossip_verified_vote_hashes;
|
||||
pub mod unprocessed_packet_batches;
|
||||
pub mod validator;
|
||||
pub mod verified_vote_packets;
|
||||
pub mod vote_simulator;
|
||||
|
84
core/src/packet_threshold.rs
Normal file
84
core/src/packet_threshold.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use std::time::Duration;
|
||||
|
||||
enum PacketThresholdUpdate {
|
||||
Increase,
|
||||
Decrease,
|
||||
}
|
||||
|
||||
impl PacketThresholdUpdate {
|
||||
const PERCENTAGE: usize = 90;
|
||||
|
||||
fn calculate(&self, current: usize) -> usize {
|
||||
match *self {
|
||||
PacketThresholdUpdate::Increase => {
|
||||
current.saturating_mul(100).saturating_div(Self::PERCENTAGE)
|
||||
}
|
||||
PacketThresholdUpdate::Decrease => {
|
||||
current.saturating_mul(Self::PERCENTAGE).saturating_div(100)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DynamicPacketToProcessThreshold {
|
||||
max_packets: usize,
|
||||
}
|
||||
|
||||
impl Default for DynamicPacketToProcessThreshold {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
max_packets: Self::DEFAULT_MAX_PACKETS,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DynamicPacketToProcessThreshold {
|
||||
const DEFAULT_MAX_PACKETS: usize = 1024;
|
||||
const TIME_THRESHOLD: Duration = Duration::from_secs(1);
|
||||
|
||||
pub fn update(&mut self, total_packets: usize, compute_time: Duration) {
|
||||
if total_packets >= self.max_packets {
|
||||
let threshold_update = if compute_time > Self::TIME_THRESHOLD {
|
||||
PacketThresholdUpdate::Decrease
|
||||
} else {
|
||||
PacketThresholdUpdate::Increase
|
||||
};
|
||||
self.max_packets = threshold_update.calculate(self.max_packets);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn should_drop(&self, total: usize) -> bool {
|
||||
total >= self.max_packets
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use {super::DynamicPacketToProcessThreshold, std::time::Duration};
|
||||
|
||||
#[test]
|
||||
fn test_dynamic_packet_threshold() {
|
||||
let mut threshold = DynamicPacketToProcessThreshold::default();
|
||||
assert_eq!(
|
||||
threshold.max_packets,
|
||||
DynamicPacketToProcessThreshold::DEFAULT_MAX_PACKETS
|
||||
);
|
||||
|
||||
assert!(!threshold.should_drop(10));
|
||||
assert!(threshold.should_drop(2000));
|
||||
|
||||
let old = threshold.max_packets;
|
||||
|
||||
// Increase
|
||||
let total = 2000;
|
||||
let compute_time = Duration::from_millis(500);
|
||||
threshold.update(total, compute_time);
|
||||
assert!(threshold.max_packets > old);
|
||||
|
||||
// Decrease
|
||||
let compute_time = Duration::from_millis(2000);
|
||||
threshold.update(total, compute_time);
|
||||
assert_eq!(threshold.max_packets, old - 1); // due to rounding error, there is a difference of 1
|
||||
}
|
||||
}
|
@@ -339,7 +339,7 @@ pub struct RetransmitInfo {
|
||||
impl RetransmitInfo {
|
||||
pub fn reached_retransmit_threshold(&self) -> bool {
|
||||
let backoff = std::cmp::min(self.retry_iteration, RETRANSMIT_BACKOFF_CAP);
|
||||
let backoff_duration_ms = 2_u64.pow(backoff) * RETRANSMIT_BASE_DELAY_MS;
|
||||
let backoff_duration_ms = (1_u64 << backoff) * RETRANSMIT_BASE_DELAY_MS;
|
||||
self.retry_time
|
||||
.map(|time| time.elapsed().as_millis() > backoff_duration_ms.into())
|
||||
.unwrap_or(true)
|
||||
|
@@ -3,7 +3,7 @@
|
||||
//! how transactions are included in blocks, and optimize those blocks.
|
||||
//!
|
||||
use {
|
||||
crate::banking_stage::BatchedTransactionCostDetails,
|
||||
crate::banking_stage::BatchedTransactionDetails,
|
||||
crossbeam_channel::{unbounded, Receiver, Sender},
|
||||
solana_measure::measure::Measure,
|
||||
solana_runtime::{
|
||||
@@ -68,8 +68,8 @@ impl QosService {
|
||||
let running_flag = Arc::new(AtomicBool::new(true));
|
||||
let metrics = Arc::new(QosServiceMetrics::new(id));
|
||||
|
||||
let running_flag_clone = running_flag.clone();
|
||||
let metrics_clone = metrics.clone();
|
||||
let running_flag_clone = Arc::clone(&running_flag);
|
||||
let metrics_clone = Arc::clone(&metrics);
|
||||
let reporting_thread = Some(
|
||||
Builder::new()
|
||||
.name("solana-qos-service-metrics-repoting".to_string())
|
||||
@@ -109,9 +109,11 @@ impl QosService {
|
||||
.collect();
|
||||
compute_cost_time.stop();
|
||||
self.metrics
|
||||
.stats
|
||||
.compute_cost_time
|
||||
.fetch_add(compute_cost_time.as_us(), Ordering::Relaxed);
|
||||
self.metrics
|
||||
.stats
|
||||
.compute_cost_count
|
||||
.fetch_add(txs_costs.len() as u64, Ordering::Relaxed);
|
||||
txs_costs
|
||||
@@ -134,7 +136,7 @@ impl QosService {
|
||||
.map(|(tx, cost)| match cost_tracker.try_add(tx, cost) {
|
||||
Ok(current_block_cost) => {
|
||||
debug!("slot {:?}, transaction {:?}, cost {:?}, fit into current block, current block cost {}", bank.slot(), tx, cost, current_block_cost);
|
||||
self.metrics.selected_txs_count.fetch_add(1, Ordering::Relaxed);
|
||||
self.metrics.stats.selected_txs_count.fetch_add(1, Ordering::Relaxed);
|
||||
num_included += 1;
|
||||
Ok(())
|
||||
},
|
||||
@@ -142,20 +144,19 @@ impl QosService {
|
||||
debug!("slot {:?}, transaction {:?}, cost {:?}, not fit into current block, '{:?}'", bank.slot(), tx, cost, e);
|
||||
match e {
|
||||
CostTrackerError::WouldExceedBlockMaxLimit => {
|
||||
self.metrics.retried_txs_per_block_limit_count.fetch_add(1, Ordering::Relaxed);
|
||||
Err(TransactionError::WouldExceedMaxBlockCostLimit)
|
||||
}
|
||||
CostTrackerError::WouldExceedVoteMaxLimit => {
|
||||
self.metrics.retried_txs_per_vote_limit_count.fetch_add(1, Ordering::Relaxed);
|
||||
Err(TransactionError::WouldExceedMaxVoteCostLimit)
|
||||
}
|
||||
CostTrackerError::WouldExceedAccountMaxLimit => {
|
||||
self.metrics.retried_txs_per_account_limit_count.fetch_add(1, Ordering::Relaxed);
|
||||
Err(TransactionError::WouldExceedMaxAccountCostLimit)
|
||||
}
|
||||
CostTrackerError::WouldExceedAccountDataMaxLimit => {
|
||||
self.metrics.retried_txs_per_account_data_limit_count.fetch_add(1, Ordering::Relaxed);
|
||||
Err(TransactionError::WouldExceedMaxAccountDataCostLimit)
|
||||
CostTrackerError::WouldExceedAccountDataBlockLimit => {
|
||||
Err(TransactionError::WouldExceedAccountDataBlockLimit)
|
||||
}
|
||||
CostTrackerError::WouldExceedAccountDataTotalLimit => {
|
||||
Err(TransactionError::WouldExceedAccountDataTotalLimit)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -163,6 +164,7 @@ impl QosService {
|
||||
.collect();
|
||||
cost_tracking_time.stop();
|
||||
self.metrics
|
||||
.stats
|
||||
.cost_tracking_time
|
||||
.fetch_add(cost_tracking_time.as_us(), Ordering::Relaxed);
|
||||
(select_results, num_included)
|
||||
@@ -177,30 +179,82 @@ impl QosService {
|
||||
|
||||
pub fn accumulate_estimated_transaction_costs(
|
||||
&self,
|
||||
cost_details: &BatchedTransactionCostDetails,
|
||||
batched_transaction_details: &BatchedTransactionDetails,
|
||||
) {
|
||||
self.metrics.stats.estimated_signature_cu.fetch_add(
|
||||
batched_transaction_details.costs.batched_signature_cost,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
self.metrics.stats.estimated_write_lock_cu.fetch_add(
|
||||
batched_transaction_details.costs.batched_write_lock_cost,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
self.metrics.stats.estimated_data_bytes_cu.fetch_add(
|
||||
batched_transaction_details.costs.batched_data_bytes_cost,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
self.metrics.stats.estimated_execute_cu.fetch_add(
|
||||
batched_transaction_details.costs.batched_execute_cost,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
|
||||
self.metrics
|
||||
.estimated_signature_cu
|
||||
.fetch_add(cost_details.batched_signature_cost, Ordering::Relaxed);
|
||||
.errors
|
||||
.retried_txs_per_block_limit_count
|
||||
.fetch_add(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_retried_txs_per_block_limit_count,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
self.metrics
|
||||
.estimated_write_lock_cu
|
||||
.fetch_add(cost_details.batched_write_lock_cost, Ordering::Relaxed);
|
||||
.errors
|
||||
.retried_txs_per_vote_limit_count
|
||||
.fetch_add(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_retried_txs_per_vote_limit_count,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
self.metrics
|
||||
.estimated_data_bytes_cu
|
||||
.fetch_add(cost_details.batched_data_bytes_cost, Ordering::Relaxed);
|
||||
.errors
|
||||
.retried_txs_per_account_limit_count
|
||||
.fetch_add(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_retried_txs_per_account_limit_count,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
self.metrics
|
||||
.estimated_execute_cu
|
||||
.fetch_add(cost_details.batched_execute_cost, Ordering::Relaxed);
|
||||
.errors
|
||||
.retried_txs_per_account_data_block_limit_count
|
||||
.fetch_add(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_retried_txs_per_account_data_block_limit_count,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
self.metrics
|
||||
.errors
|
||||
.dropped_txs_per_account_data_total_limit_count
|
||||
.fetch_add(
|
||||
batched_transaction_details
|
||||
.errors
|
||||
.batched_dropped_txs_per_account_data_total_limit_count,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn accumulate_actual_execute_cu(&self, units: u64) {
|
||||
self.metrics
|
||||
.stats
|
||||
.actual_execute_cu
|
||||
.fetch_add(units, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub fn accumulate_actual_execute_time(&self, micro_sec: u64) {
|
||||
self.metrics
|
||||
.stats
|
||||
.actual_execute_time_us
|
||||
.fetch_add(micro_sec, Ordering::Relaxed);
|
||||
}
|
||||
@@ -223,63 +277,77 @@ impl QosService {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
#[derive(Debug, Default)]
|
||||
struct QosServiceMetrics {
|
||||
// banking_stage creates one QosService instance per working threads, that is uniquely
|
||||
// identified by id. This field allows to categorize metrics for gossip votes, TPU votes
|
||||
// and other transactions.
|
||||
/// banking_stage creates one QosService instance per working threads, that is uniquely
|
||||
/// identified by id. This field allows to categorize metrics for gossip votes, TPU votes
|
||||
/// and other transactions.
|
||||
id: u32,
|
||||
|
||||
// aggregate metrics per slot
|
||||
/// aggregate metrics per slot
|
||||
slot: AtomicU64,
|
||||
|
||||
// accumulated time in micro-sec spent in computing transaction cost. It is the main performance
|
||||
// overhead introduced by cost_model
|
||||
stats: QosServiceMetricsStats,
|
||||
errors: QosServiceMetricsErrors,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct QosServiceMetricsStats {
|
||||
/// accumulated time in micro-sec spent in computing transaction cost. It is the main performance
|
||||
/// overhead introduced by cost_model
|
||||
compute_cost_time: AtomicU64,
|
||||
|
||||
// total nummber of transactions in the reporting period to be computed for theit cost. It is
|
||||
// usually the number of sanitized transactions leader receives.
|
||||
/// total nummber of transactions in the reporting period to be computed for theit cost. It is
|
||||
/// usually the number of sanitized transactions leader receives.
|
||||
compute_cost_count: AtomicU64,
|
||||
|
||||
// acumulated time in micro-sec spent in tracking each bank's cost. It is the second part of
|
||||
// overhead introduced
|
||||
/// acumulated time in micro-sec spent in tracking each bank's cost. It is the second part of
|
||||
/// overhead introduced
|
||||
cost_tracking_time: AtomicU64,
|
||||
|
||||
// number of transactions to be included in blocks
|
||||
/// number of transactions to be included in blocks
|
||||
selected_txs_count: AtomicU64,
|
||||
|
||||
// number of transactions to be queued for retry due to its potential to breach block limit
|
||||
retried_txs_per_block_limit_count: AtomicU64,
|
||||
|
||||
// number of transactions to be queued for retry due to its potential to breach vote limit
|
||||
retried_txs_per_vote_limit_count: AtomicU64,
|
||||
|
||||
// number of transactions to be queued for retry due to its potential to breach writable
|
||||
// account limit
|
||||
retried_txs_per_account_limit_count: AtomicU64,
|
||||
|
||||
// number of transactions to be queued for retry due to its account data limits
|
||||
retried_txs_per_account_data_limit_count: AtomicU64,
|
||||
|
||||
// accumulated estimated signature Compute Unites to be packed into block
|
||||
/// accumulated estimated signature Compute Unites to be packed into block
|
||||
estimated_signature_cu: AtomicU64,
|
||||
|
||||
// accumulated estimated write locks Compute Units to be packed into block
|
||||
/// accumulated estimated write locks Compute Units to be packed into block
|
||||
estimated_write_lock_cu: AtomicU64,
|
||||
|
||||
// accumulated estimated instructino data Compute Units to be packed into block
|
||||
/// accumulated estimated instructino data Compute Units to be packed into block
|
||||
estimated_data_bytes_cu: AtomicU64,
|
||||
|
||||
// accumulated estimated program Compute Units to be packed into block
|
||||
/// accumulated estimated program Compute Units to be packed into block
|
||||
estimated_execute_cu: AtomicU64,
|
||||
|
||||
// accumulated actual program Compute Units that have been packed into block
|
||||
/// accumulated actual program Compute Units that have been packed into block
|
||||
actual_execute_cu: AtomicU64,
|
||||
|
||||
// accumulated actual program execute micro-sec that have been packed into block
|
||||
/// accumulated actual program execute micro-sec that have been packed into block
|
||||
actual_execute_time_us: AtomicU64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct QosServiceMetricsErrors {
|
||||
/// number of transactions to be queued for retry due to their potential to breach block limit
|
||||
retried_txs_per_block_limit_count: AtomicU64,
|
||||
|
||||
/// number of transactions to be queued for retry due to their potential to breach vote limit
|
||||
retried_txs_per_vote_limit_count: AtomicU64,
|
||||
|
||||
/// number of transactions to be queued for retry due to their potential to breach writable
|
||||
/// account limit
|
||||
retried_txs_per_account_limit_count: AtomicU64,
|
||||
|
||||
/// number of transactions to be queued for retry due to their potential to breach account data
|
||||
/// block limits
|
||||
retried_txs_per_account_data_block_limit_count: AtomicU64,
|
||||
|
||||
/// number of transactions to be dropped due to their potential to breach account data total
|
||||
/// limits
|
||||
dropped_txs_per_account_data_total_limit_count: AtomicU64,
|
||||
}
|
||||
|
||||
impl QosServiceMetrics {
|
||||
pub fn new(id: u32) -> Self {
|
||||
QosServiceMetrics {
|
||||
@@ -296,76 +364,96 @@ impl QosServiceMetrics {
|
||||
("bank_slot", bank_slot as i64, i64),
|
||||
(
|
||||
"compute_cost_time",
|
||||
self.compute_cost_time.swap(0, Ordering::Relaxed) as i64,
|
||||
self.stats.compute_cost_time.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"compute_cost_count",
|
||||
self.compute_cost_count.swap(0, Ordering::Relaxed) as i64,
|
||||
self.stats.compute_cost_count.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"cost_tracking_time",
|
||||
self.cost_tracking_time.swap(0, Ordering::Relaxed) as i64,
|
||||
self.stats.cost_tracking_time.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"selected_txs_count",
|
||||
self.selected_txs_count.swap(0, Ordering::Relaxed) as i64,
|
||||
self.stats.selected_txs_count.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"estimated_signature_cu",
|
||||
self.stats.estimated_signature_cu.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"estimated_write_lock_cu",
|
||||
self.stats
|
||||
.estimated_write_lock_cu
|
||||
.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"estimated_data_bytes_cu",
|
||||
self.stats
|
||||
.estimated_data_bytes_cu
|
||||
.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"estimated_execute_cu",
|
||||
self.stats.estimated_execute_cu.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"actual_execute_cu",
|
||||
self.stats.actual_execute_cu.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"actual_execute_time_us",
|
||||
self.stats.actual_execute_time_us.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
);
|
||||
datapoint_info!(
|
||||
"qos-service-errors",
|
||||
("id", self.id as i64, i64),
|
||||
("bank_slot", bank_slot as i64, i64),
|
||||
(
|
||||
"retried_txs_per_block_limit_count",
|
||||
self.retried_txs_per_block_limit_count
|
||||
self.errors
|
||||
.retried_txs_per_block_limit_count
|
||||
.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"retried_txs_per_vote_limit_count",
|
||||
self.retried_txs_per_vote_limit_count
|
||||
self.errors
|
||||
.retried_txs_per_vote_limit_count
|
||||
.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"retried_txs_per_account_limit_count",
|
||||
self.retried_txs_per_account_limit_count
|
||||
self.errors
|
||||
.retried_txs_per_account_limit_count
|
||||
.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"retried_txs_per_account_data_limit_count",
|
||||
self.retried_txs_per_account_data_limit_count
|
||||
"retried_txs_per_account_data_block_limit_count",
|
||||
self.errors
|
||||
.retried_txs_per_account_data_block_limit_count
|
||||
.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"estimated_signature_cu",
|
||||
self.estimated_signature_cu.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"estimated_write_lock_cu",
|
||||
self.estimated_write_lock_cu.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"estimated_data_bytes_cu",
|
||||
self.estimated_data_bytes_cu.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"estimated_execute_cu",
|
||||
self.estimated_execute_cu.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"actual_execute_cu",
|
||||
self.actual_execute_cu.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
(
|
||||
"actual_execute_time_us",
|
||||
self.actual_execute_time_us.swap(0, Ordering::Relaxed) as i64,
|
||||
"dropped_txs_per_account_data_total_limit_count",
|
||||
self.errors
|
||||
.dropped_txs_per_account_data_total_limit_count
|
||||
.swap(0, Ordering::Relaxed) as i64,
|
||||
i64
|
||||
),
|
||||
);
|
||||
|
@@ -735,11 +735,16 @@ impl ReplayStage {
|
||||
restored_tower.adjust_lockouts_after_replay(root_bank.slot(), &slot_history)
|
||||
}).
|
||||
unwrap_or_else(|err| {
|
||||
// It's a fatal error if the tower is not present. This is
|
||||
// necessary to prevent the validator from violating
|
||||
// lockouts for its new identity
|
||||
error!("Failed to load tower for {}: {}", my_pubkey, err);
|
||||
std::process::exit(1);
|
||||
if err.is_file_missing() {
|
||||
Tower::new_from_bankforks(
|
||||
&bank_forks.read().unwrap(),
|
||||
&my_pubkey,
|
||||
&vote_account,
|
||||
)
|
||||
} else {
|
||||
error!("Failed to load tower for {}: {}", my_pubkey, err);
|
||||
std::process::exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
// Ensure the validator can land votes with the new identity before
|
||||
@@ -3129,7 +3134,7 @@ pub mod tests {
|
||||
},
|
||||
solana_rpc::{
|
||||
optimistically_confirmed_bank_tracker::OptimisticallyConfirmedBank,
|
||||
rpc::create_test_transactions_and_populate_blockstore,
|
||||
rpc::{create_test_transaction_entries, populate_blockstore_for_tests},
|
||||
},
|
||||
solana_runtime::{
|
||||
accounts_background_service::AbsRequestSender,
|
||||
@@ -3998,15 +4003,18 @@ pub mod tests {
|
||||
let bank1 = Arc::new(Bank::new_from_parent(&bank0, &Pubkey::default(), 1));
|
||||
let slot = bank1.slot();
|
||||
|
||||
let mut test_signatures_iter = create_test_transactions_and_populate_blockstore(
|
||||
let (entries, test_signatures) = create_test_transaction_entries(
|
||||
vec![&mint_keypair, &keypair1, &keypair2, &keypair3],
|
||||
bank0.slot(),
|
||||
bank1.clone(),
|
||||
);
|
||||
populate_blockstore_for_tests(
|
||||
entries,
|
||||
bank1,
|
||||
blockstore.clone(),
|
||||
Arc::new(AtomicU64::default()),
|
||||
)
|
||||
.into_iter();
|
||||
);
|
||||
|
||||
let mut test_signatures_iter = test_signatures.into_iter();
|
||||
let confirmed_block = blockstore.get_rooted_block(slot, false).unwrap();
|
||||
let actual_tx_results: Vec<_> = confirmed_block
|
||||
.transactions
|
||||
|
@@ -416,7 +416,7 @@ pub fn retransmitter(
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub(crate) struct RetransmitStage {
|
||||
pub struct RetransmitStage {
|
||||
retransmit_thread_handle: JoinHandle<()>,
|
||||
window_service: WindowService,
|
||||
cluster_slots_service: ClusterSlotsService,
|
||||
|
@@ -2,6 +2,7 @@ use {
|
||||
crate::{
|
||||
cluster_slots::ClusterSlots,
|
||||
duplicate_repair_status::ANCESTOR_HASH_REPAIR_SAMPLE_SIZE,
|
||||
packet_threshold::DynamicPacketToProcessThreshold,
|
||||
repair_response,
|
||||
repair_service::{OutstandingShredRepairs, RepairStats},
|
||||
request_response::RequestResponse,
|
||||
@@ -23,7 +24,6 @@ use {
|
||||
blockstore::Blockstore,
|
||||
shred::{Nonce, Shred, SIZE_OF_NONCE},
|
||||
},
|
||||
solana_measure::measure::Measure,
|
||||
solana_metrics::inc_new_counter_debug,
|
||||
solana_perf::packet::{limited_deserialize, PacketBatch, PacketBatchRecycler},
|
||||
solana_sdk::{
|
||||
@@ -322,7 +322,7 @@ impl ServeRepair {
|
||||
requests_receiver: &PacketBatchReceiver,
|
||||
response_sender: &PacketBatchSender,
|
||||
stats: &mut ServeRepairStats,
|
||||
max_packets: &mut usize,
|
||||
packet_threshold: &mut DynamicPacketToProcessThreshold,
|
||||
) -> Result<()> {
|
||||
//TODO cache connections
|
||||
let timeout = Duration::new(1, 0);
|
||||
@@ -332,29 +332,21 @@ impl ServeRepair {
|
||||
let mut dropped_packets = 0;
|
||||
while let Ok(more) = requests_receiver.try_recv() {
|
||||
total_packets += more.packets.len();
|
||||
if total_packets < *max_packets {
|
||||
// Drop the rest in the channel in case of dos
|
||||
reqs_v.push(more);
|
||||
} else {
|
||||
if packet_threshold.should_drop(total_packets) {
|
||||
dropped_packets += more.packets.len();
|
||||
} else {
|
||||
reqs_v.push(more);
|
||||
}
|
||||
}
|
||||
|
||||
stats.dropped_packets += dropped_packets;
|
||||
stats.total_packets += total_packets;
|
||||
|
||||
let mut time = Measure::start("repair::handle_packets");
|
||||
let timer = Instant::now();
|
||||
for reqs in reqs_v {
|
||||
Self::handle_packets(obj, recycler, blockstore, reqs, response_sender, stats);
|
||||
}
|
||||
time.stop();
|
||||
if total_packets >= *max_packets {
|
||||
if time.as_ms() > 1000 {
|
||||
*max_packets = (*max_packets * 9) / 10;
|
||||
} else {
|
||||
*max_packets = (*max_packets * 10) / 9;
|
||||
}
|
||||
}
|
||||
packet_threshold.update(total_packets, timer.elapsed());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -403,7 +395,7 @@ impl ServeRepair {
|
||||
.spawn(move || {
|
||||
let mut last_print = Instant::now();
|
||||
let mut stats = ServeRepairStats::default();
|
||||
let mut max_packets = 1024;
|
||||
let mut packet_threshold = DynamicPacketToProcessThreshold::default();
|
||||
loop {
|
||||
let result = Self::run_listen(
|
||||
&me,
|
||||
@@ -412,7 +404,7 @@ impl ServeRepair {
|
||||
&requests_receiver,
|
||||
&response_sender,
|
||||
&mut stats,
|
||||
&mut max_packets,
|
||||
&mut packet_threshold,
|
||||
);
|
||||
match result {
|
||||
Err(Error::RecvTimeout(_)) | Ok(_) => {}
|
||||
|
@@ -219,7 +219,7 @@ mod tests {
|
||||
snapshot_archive_info::SnapshotArchiveInfo,
|
||||
snapshot_package::{SnapshotPackage, SnapshotType},
|
||||
snapshot_utils::{
|
||||
self, ArchiveFormat, SnapshotVersion, SNAPSHOT_STATUS_CACHE_FILE_NAME,
|
||||
self, ArchiveFormat, SnapshotVersion, SNAPSHOT_STATUS_CACHE_FILENAME,
|
||||
},
|
||||
},
|
||||
solana_sdk::hash::Hash,
|
||||
@@ -335,7 +335,7 @@ mod tests {
|
||||
// the source dir for snapshots
|
||||
let dummy_slot_deltas: Vec<BankSlotDelta> = vec![];
|
||||
snapshot_utils::serialize_snapshot_data_file(
|
||||
&snapshots_dir.join(SNAPSHOT_STATUS_CACHE_FILE_NAME),
|
||||
&snapshots_dir.join(SNAPSHOT_STATUS_CACHE_FILENAME),
|
||||
|stream| {
|
||||
serialize_into(stream, &dummy_slot_deltas)?;
|
||||
Ok(())
|
||||
|
@@ -36,6 +36,9 @@ use {
|
||||
|
||||
pub const DEFAULT_TPU_COALESCE_MS: u64 = 5;
|
||||
|
||||
// allow multiple connections for NAT and any open/close overlap
|
||||
pub const MAX_QUIC_CONNECTIONS_PER_IP: usize = 8;
|
||||
|
||||
pub struct TpuSockets {
|
||||
pub transactions: Vec<UdpSocket>,
|
||||
pub transaction_forwards: Vec<UdpSocket>,
|
||||
@@ -108,6 +111,7 @@ impl Tpu {
|
||||
cluster_info.my_contact_info().tpu.ip(),
|
||||
packet_sender,
|
||||
exit.clone(),
|
||||
MAX_QUIC_CONNECTIONS_PER_IP,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@@ -16,6 +16,7 @@ use {
|
||||
cost_update_service::CostUpdateService,
|
||||
drop_bank_service::DropBankService,
|
||||
ledger_cleanup_service::LedgerCleanupService,
|
||||
ledger_metric_report_service::LedgerMetricReportService,
|
||||
replay_stage::{ReplayStage, ReplayStageConfig},
|
||||
retransmit_stage::RetransmitStage,
|
||||
rewards_recorder_service::RewardsRecorderSender,
|
||||
@@ -70,6 +71,7 @@ pub struct Tvu {
|
||||
retransmit_stage: RetransmitStage,
|
||||
replay_stage: ReplayStage,
|
||||
ledger_cleanup_service: Option<LedgerCleanupService>,
|
||||
ledger_metric_report_service: LedgerMetricReportService,
|
||||
accounts_background_service: AccountsBackgroundService,
|
||||
accounts_hash_verifier: AccountsHashVerifier,
|
||||
cost_update_service: CostUpdateService,
|
||||
@@ -307,8 +309,12 @@ impl Tvu {
|
||||
);
|
||||
|
||||
let (cost_update_sender, cost_update_receiver) = unbounded();
|
||||
let cost_update_service =
|
||||
CostUpdateService::new(blockstore.clone(), cost_model.clone(), cost_update_receiver);
|
||||
let cost_update_service = CostUpdateService::new(
|
||||
exit.clone(),
|
||||
blockstore.clone(),
|
||||
cost_model.clone(),
|
||||
cost_update_receiver,
|
||||
);
|
||||
|
||||
let (drop_bank_sender, drop_bank_receiver) = unbounded();
|
||||
|
||||
@@ -357,6 +363,8 @@ impl Tvu {
|
||||
)
|
||||
});
|
||||
|
||||
let ledger_metric_report_service = LedgerMetricReportService::new(blockstore, exit);
|
||||
|
||||
let accounts_background_service = AccountsBackgroundService::new(
|
||||
bank_forks.clone(),
|
||||
exit,
|
||||
@@ -373,6 +381,7 @@ impl Tvu {
|
||||
retransmit_stage,
|
||||
replay_stage,
|
||||
ledger_cleanup_service,
|
||||
ledger_metric_report_service,
|
||||
accounts_background_service,
|
||||
accounts_hash_verifier,
|
||||
cost_update_service,
|
||||
@@ -389,6 +398,7 @@ impl Tvu {
|
||||
if self.ledger_cleanup_service.is_some() {
|
||||
self.ledger_cleanup_service.unwrap().join()?;
|
||||
}
|
||||
self.ledger_metric_report_service.join()?;
|
||||
self.accounts_background_service.join()?;
|
||||
self.replay_stage.join()?;
|
||||
self.accounts_hash_verifier.join()?;
|
||||
|
131
core/src/unprocessed_packet_batches.rs
Normal file
131
core/src/unprocessed_packet_batches.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
use {
|
||||
solana_perf::packet::{limited_deserialize, Packet, PacketBatch},
|
||||
solana_sdk::{
|
||||
hash::Hash, message::Message, short_vec::decode_shortu16_len, signature::Signature,
|
||||
transaction::VersionedTransaction,
|
||||
},
|
||||
std::{
|
||||
collections::{HashMap, VecDeque},
|
||||
mem::size_of,
|
||||
},
|
||||
};
|
||||
|
||||
pub type UnprocessedPacketBatches = VecDeque<DeserializedPacketBatch>;
|
||||
|
||||
/// hold deserialized messages, as well as computed message_hash and other things needed to create
|
||||
/// SanitizedTransaction
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DeserializedPacket {
|
||||
#[allow(dead_code)]
|
||||
versioned_transaction: VersionedTransaction,
|
||||
|
||||
#[allow(dead_code)]
|
||||
message_hash: Hash,
|
||||
|
||||
#[allow(dead_code)]
|
||||
is_simple_vote: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DeserializedPacketBatch {
|
||||
pub packet_batch: PacketBatch,
|
||||
pub forwarded: bool,
|
||||
// indexes of valid packets in batch, and their corrersponding deserialized_packet
|
||||
pub unprocessed_packets: HashMap<usize, DeserializedPacket>,
|
||||
}
|
||||
|
||||
impl DeserializedPacketBatch {
|
||||
pub fn new(packet_batch: PacketBatch, packet_indexes: Vec<usize>, forwarded: bool) -> Self {
|
||||
let unprocessed_packets = Self::deserialize_packets(&packet_batch, &packet_indexes);
|
||||
Self {
|
||||
packet_batch,
|
||||
unprocessed_packets,
|
||||
forwarded,
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_packets(
|
||||
packet_batch: &PacketBatch,
|
||||
packet_indexes: &[usize],
|
||||
) -> HashMap<usize, DeserializedPacket> {
|
||||
packet_indexes
|
||||
.iter()
|
||||
.filter_map(|packet_index| {
|
||||
let deserialized_packet =
|
||||
Self::deserialize_packet(&packet_batch.packets[*packet_index])?;
|
||||
Some((*packet_index, deserialized_packet))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn deserialize_packet(packet: &Packet) -> Option<DeserializedPacket> {
|
||||
let versioned_transaction: VersionedTransaction =
|
||||
match limited_deserialize(&packet.data[0..packet.meta.size]) {
|
||||
Ok(tx) => tx,
|
||||
Err(_) => return None,
|
||||
};
|
||||
|
||||
if let Some(message_bytes) = Self::packet_message(packet) {
|
||||
let message_hash = Message::hash_raw_message(message_bytes);
|
||||
let is_simple_vote = packet.meta.is_simple_vote_tx();
|
||||
Some(DeserializedPacket {
|
||||
versioned_transaction,
|
||||
message_hash,
|
||||
is_simple_vote,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the transaction message from packet data
|
||||
pub fn packet_message(packet: &Packet) -> Option<&[u8]> {
|
||||
let (sig_len, sig_size) = decode_shortu16_len(&packet.data).ok()?;
|
||||
let msg_start = sig_len
|
||||
.checked_mul(size_of::<Signature>())
|
||||
.and_then(|v| v.checked_add(sig_size))?;
|
||||
let msg_end = packet.meta.size;
|
||||
Some(&packet.data[msg_start..msg_end])
|
||||
}
|
||||
|
||||
// Returns whether the given `PacketBatch` has any more remaining unprocessed
|
||||
// transactions
|
||||
pub fn update_buffered_packets_with_new_unprocessed(
|
||||
&mut self,
|
||||
_original_unprocessed_indexes: &[usize],
|
||||
new_unprocessed_indexes: &[usize],
|
||||
) -> bool {
|
||||
let has_more_unprocessed_transactions = !new_unprocessed_indexes.is_empty();
|
||||
if has_more_unprocessed_transactions {
|
||||
self.unprocessed_packets
|
||||
.retain(|index, _| new_unprocessed_indexes.contains(index));
|
||||
} else {
|
||||
self.unprocessed_packets.clear();
|
||||
}
|
||||
|
||||
has_more_unprocessed_transactions
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use {
|
||||
super::*,
|
||||
solana_sdk::{signature::Keypair, system_transaction},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_packet_message() {
|
||||
let keypair = Keypair::new();
|
||||
let pubkey = solana_sdk::pubkey::new_rand();
|
||||
let blockhash = Hash::new_unique();
|
||||
let transaction = system_transaction::transfer(&keypair, &pubkey, 1, blockhash);
|
||||
let packet = Packet::from_data(None, &transaction).unwrap();
|
||||
assert_eq!(
|
||||
DeserializedPacketBatch::packet_message(&packet)
|
||||
.unwrap()
|
||||
.to_vec(),
|
||||
transaction.message_data()
|
||||
);
|
||||
}
|
||||
}
|
@@ -36,7 +36,7 @@ use {
|
||||
solana_ledger::{
|
||||
bank_forks_utils,
|
||||
blockstore::{Blockstore, BlockstoreSignals, CompletedSlotsReceiver, PurgeType},
|
||||
blockstore_db::{BlockstoreOptions, BlockstoreRecoveryMode},
|
||||
blockstore_db::{BlockstoreAdvancedOptions, BlockstoreOptions, BlockstoreRecoveryMode},
|
||||
blockstore_processor::{self, TransactionStatusSender},
|
||||
leader_schedule::FixedSchedule,
|
||||
leader_schedule_cache::LeaderScheduleCache,
|
||||
@@ -165,6 +165,7 @@ pub struct ValidatorConfig {
|
||||
pub no_wait_for_vote_to_start_leader: bool,
|
||||
pub accounts_shrink_ratio: AccountShrinkThreshold,
|
||||
pub wait_to_vote_slot: Option<Slot>,
|
||||
pub blockstore_advanced_options: BlockstoreAdvancedOptions,
|
||||
}
|
||||
|
||||
impl Default for ValidatorConfig {
|
||||
@@ -225,6 +226,7 @@ impl Default for ValidatorConfig {
|
||||
accounts_shrink_ratio: AccountShrinkThreshold::default(),
|
||||
accounts_db_config: None,
|
||||
wait_to_vote_slot: None,
|
||||
blockstore_advanced_options: BlockstoreAdvancedOptions::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -809,7 +811,8 @@ impl Validator {
|
||||
|
||||
let vote_tracker = Arc::<VoteTracker>::default();
|
||||
let mut cost_model = CostModel::default();
|
||||
cost_model.initialize_cost_table(&blockstore.read_program_costs().unwrap());
|
||||
// initialize cost model with built-in instruction costs only
|
||||
cost_model.initialize_cost_table(&[]);
|
||||
let cost_model = Arc::new(RwLock::new(cost_model));
|
||||
|
||||
let (retransmit_slots_sender, retransmit_slots_receiver) = unbounded();
|
||||
@@ -1256,6 +1259,7 @@ fn new_banks_from_ledger(
|
||||
BlockstoreOptions {
|
||||
recovery_mode: config.wal_recovery_mode.clone(),
|
||||
enforce_ulimit_nofile,
|
||||
advanced_options: config.blockstore_advanced_options.clone(),
|
||||
..BlockstoreOptions::default()
|
||||
},
|
||||
)
|
||||
|
@@ -1,13 +1,13 @@
|
||||
//! Fork Selection Simulation
|
||||
//!
|
||||
//! Description of the algorithm can be found in [docs/src/fork-selection.md](docs/src/fork-selection.md).
|
||||
//! Description of the algorithm can be found in [docs/src/cluster/managing-forks.md](docs/src/cluster/managing-forks.md).
|
||||
//!
|
||||
//! A test library function exists for configuring networks.
|
||||
//! ```
|
||||
//! /// * num_partitions - 1 to 100 partitions
|
||||
//! /// * fail_rate - 0 to 1.0 rate of packet receive failure
|
||||
//! /// * delay_count - number of forks to observe before voting
|
||||
//! /// * parasite_rate - number of parasite nodes that vote opposite the greedy choice
|
||||
//! /// * parasite_rate - percentage of parasite nodes that vote opposite the greedy choice
|
||||
//! fn test_with_partitions(num_partitions: usize, fail_rate: f64, delay_count: usize, parasite_rate: f64);
|
||||
//! ```
|
||||
//! Modify the test function
|
||||
@@ -497,7 +497,7 @@ fn test_no_partitions() {
|
||||
/// * num_partitions - 1 to 100 partitions
|
||||
/// * fail_rate - 0 to 1.0 rate of packet receive failure
|
||||
/// * delay_count - number of forks to observe before voting
|
||||
/// * parasite_rate - number of parasite nodes that vote opposite the greedy choice
|
||||
/// * parasite_rate - percentage of parasite nodes that vote opposite the greedy choice
|
||||
fn test_with_partitions(
|
||||
num_partitions: usize,
|
||||
fail_rate: f64,
|
||||
|
@@ -9,7 +9,10 @@ mod tests {
|
||||
solana_core::ledger_cleanup_service::LedgerCleanupService,
|
||||
solana_ledger::{
|
||||
blockstore::{make_many_slot_shreds, Blockstore},
|
||||
blockstore_db::{BlockstoreOptions, BlockstoreRocksFifoOptions, ShredStorageType},
|
||||
blockstore_db::{
|
||||
BlockstoreAdvancedOptions, BlockstoreOptions, BlockstoreRocksFifoOptions,
|
||||
ShredStorageType,
|
||||
},
|
||||
get_tmp_ledger_path,
|
||||
},
|
||||
solana_measure::measure::Measure,
|
||||
@@ -294,7 +297,7 @@ mod tests {
|
||||
|
||||
*time_previous = time_now;
|
||||
*storage_previous = storage_now;
|
||||
*data_shred_storage_previous = data_shred_storage_now;
|
||||
*data_shred_storage_previous = data_shred_storage_now.try_into().unwrap();
|
||||
}
|
||||
|
||||
/// Helper function of the benchmark `test_ledger_cleanup_compaction` which
|
||||
@@ -348,10 +351,14 @@ mod tests {
|
||||
&ledger_path,
|
||||
if config.fifo_compaction {
|
||||
BlockstoreOptions {
|
||||
shred_storage_type: ShredStorageType::RocksFifo(BlockstoreRocksFifoOptions {
|
||||
shred_data_cf_size: config.shred_data_cf_size,
|
||||
..BlockstoreRocksFifoOptions::default()
|
||||
}),
|
||||
advanced_options: BlockstoreAdvancedOptions {
|
||||
shred_storage_type: ShredStorageType::RocksFifo(
|
||||
BlockstoreRocksFifoOptions {
|
||||
shred_data_cf_size: config.shred_data_cf_size,
|
||||
..BlockstoreRocksFifoOptions::default()
|
||||
},
|
||||
),
|
||||
},
|
||||
..BlockstoreOptions::default()
|
||||
}
|
||||
} else {
|
||||
|
@@ -175,7 +175,7 @@ mod tests {
|
||||
|
||||
let check_hash_calculation = false;
|
||||
let full_snapshot_archive_path = snapshot_utils::build_full_snapshot_archive_path(
|
||||
snapshot_archives_dir.to_path_buf(),
|
||||
snapshot_archives_dir,
|
||||
old_last_bank.slot(),
|
||||
&old_last_bank.get_accounts_hash(),
|
||||
ArchiveFormat::TarBzip2,
|
||||
@@ -432,7 +432,7 @@ mod tests {
|
||||
// Only save off the files returned by `get_snapshot_storages`. This is because
|
||||
// some of the storage entries in the accounts directory may be filtered out by
|
||||
// `get_snapshot_storages()` and will not be included in the snapshot. Ultimately,
|
||||
// this means copying naitvely everything in `accounts_dir` to the `saved_accounts_dir`
|
||||
// this means copying natively everything in `accounts_dir` to the `saved_accounts_dir`
|
||||
// will lead to test failure by mismatch when `saved_accounts_dir` is compared to
|
||||
// the unpacked snapshot later in this test's call to `verify_snapshot_archive()`.
|
||||
for file in snapshot_storage_files {
|
||||
@@ -461,7 +461,7 @@ mod tests {
|
||||
fs_extra::dir::copy(&last_snapshot_path, &saved_snapshots_dir, &options).unwrap();
|
||||
|
||||
saved_archive_path = Some(snapshot_utils::build_full_snapshot_archive_path(
|
||||
snapshot_archives_dir.to_path_buf(),
|
||||
snapshot_archives_dir,
|
||||
slot,
|
||||
&accounts_hash,
|
||||
ArchiveFormat::TarBzip2,
|
||||
@@ -544,7 +544,7 @@ mod tests {
|
||||
snapshot_utils::serialize_snapshot_data_file(
|
||||
&saved_snapshots_dir
|
||||
.path()
|
||||
.join(snapshot_utils::SNAPSHOT_STATUS_CACHE_FILE_NAME),
|
||||
.join(snapshot_utils::SNAPSHOT_STATUS_CACHE_FILENAME),
|
||||
|stream| {
|
||||
serialize_into(stream, &[] as &[BankSlotDelta])?;
|
||||
Ok(())
|
||||
|
@@ -8,11 +8,17 @@ set -e
|
||||
cd "$(dirname "$0")"
|
||||
output_dir=static/img
|
||||
|
||||
svgbob_cli="$(command -v svgbob_cli || true)"
|
||||
if [[ -z "$svgbob_cli" ]]; then
|
||||
svgbob_cli="$(command -v svgbob || true)"
|
||||
[[ -n "$svgbob_cli" ]] || ( echo "svgbob_cli binary not found" && exit 1 )
|
||||
fi
|
||||
|
||||
mkdir -p "$output_dir"
|
||||
|
||||
while read -r bob_file; do
|
||||
out_file=$(basename "${bob_file%.*}".svg)
|
||||
svgbob "$bob_file" --output "$output_dir/$out_file"
|
||||
"$svgbob_cli" "$bob_file" --output "$output_dir/$out_file"
|
||||
done < <(find art/*.bob)
|
||||
|
||||
while read -r msc_file; do
|
||||
|
@@ -36,7 +36,7 @@ public RPC endpoints currently available and recommended for each public cluster
|
||||
|
||||
## Mainnet Beta
|
||||
|
||||
#### Endpoints
|
||||
#### Endpoints*
|
||||
|
||||
- `https://api.mainnet-beta.solana.com` - Solana-hosted api node cluster, backed by a load balancer; rate-limited
|
||||
- `https://solana-api.projectserum.com` - Project Serum-hosted api node
|
||||
@@ -48,3 +48,17 @@ public RPC endpoints currently available and recommended for each public cluster
|
||||
- Maximum concurrent connections per IP: 40
|
||||
- Maximum connection rate per 10 seconds per IP: 40
|
||||
- Maximum amount of data per 30 second: 100 MB
|
||||
|
||||
*The public RPC endpoints are not intended for production applications. Please
|
||||
use dedicated/private RPC servers when you launch your application, drop NFTs,
|
||||
etc. The public services are subject to abuse and rate limits may change
|
||||
without prior notice. Likewise, high-traffic websites may be blocked without
|
||||
prior notice.
|
||||
|
||||
## Common HTTP Error Codes
|
||||
|
||||
- 403 -- Your IP address or website has been blocked. It is time to run your own RPC server(s) or find a private service.
|
||||
- 429 -- Your IP address is exceeding the rate limits. Slow down! Use the
|
||||
[Retry-After](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After)
|
||||
HTTP response header to determine how long to wait before making another
|
||||
request.
|
||||
|
@@ -97,8 +97,9 @@ $ solana-validator \
|
||||
--identity validator-keypair.json \
|
||||
--vote-account vote-account-keypair.json \
|
||||
--known-validator 5D1fNXzvv5NjV1ysLjirC4WY92RNsVH18vjmcszZd8on \
|
||||
--known-validator 7XSY3MrYnK8vq693Rju17bbPkCN3Z7KvvfvJx4kdrsSY \
|
||||
--known-validator dDzy5SR3AXdYWVqbDEkVFdvSPCtS9ihF5kJkHCtXoFs \
|
||||
--known-validator Ft5fbkqNa76vnsjYNwjDZUXoTWpP7VYm3mtsaQckQADN \
|
||||
--known-validator eoKpUABi59aT4rR9HGS3LcMecfut9x7zJyodWWP43YQ \
|
||||
--known-validator 9QxCLckBiJc783jnMvXZubK4wH86Eqqvashtrwvcsgkv \
|
||||
--only-known-rpc \
|
||||
--ledger ledger \
|
||||
@@ -116,7 +117,9 @@ The identities of the
|
||||
[`--known-validator`s](running-validator/validator-start.md#known-validators) are:
|
||||
|
||||
- `5D1fNXzvv5NjV1ysLjirC4WY92RNsVH18vjmcszZd8on` - Solana Labs (testnet.solana.com)
|
||||
- `dDzy5SR3AXdYWVqbDEkVFdvSPCtS9ihF5kJkHCtXoFs` - MonkeDAO
|
||||
- `Ft5fbkqNa76vnsjYNwjDZUXoTWpP7VYm3mtsaQckQADN` - Certus One
|
||||
- `eoKpUABi59aT4rR9HGS3LcMecfut9x7zJyodWWP43YQ` - SerGo
|
||||
- `9QxCLckBiJc783jnMvXZubK4wH86Eqqvashtrwvcsgkv` - Algo|Stake
|
||||
|
||||
## Mainnet Beta
|
||||
|
@@ -389,6 +389,7 @@ Returns identity and transaction information about a confirmed block in the ledg
|
||||
- (optional) `transactionDetails: <string>` - level of transaction detail to return, either "full", "signatures", or "none". If parameter not provided, the default detail level is "full".
|
||||
- (optional) `rewards: bool` - whether to populate the `rewards` array. If parameter not provided, the default includes rewards.
|
||||
- (optional) [Commitment](jsonrpc-api.md#configuring-state-commitment); "processed" is not supported. If parameter not provided, the default is "finalized".
|
||||
- (optional) `maxSupportedTransactionVersion: <number>` - set the max transaction version to return in responses. If the requested block contains a transaction with a higher version, an error will be returned.
|
||||
|
||||
#### Results:
|
||||
|
||||
@@ -413,6 +414,10 @@ The result field will be an object with the following fields:
|
||||
- DEPRECATED: `status: <object>` - Transaction status
|
||||
- `"Ok": <null>` - Transaction was successful
|
||||
- `"Err": <ERR>` - Transaction failed with TransactionError
|
||||
- `loadedAddresses: <object|undefined>` - Transaction addresses loaded from address lookup tables. Undefined if `maxSupportedTransactionVersion` is not set in request params.
|
||||
- `writable: <array[string]>` - Ordered list of base-58 encoded addresses for writable loaded accounts
|
||||
- `readonly: <array[string]>` - Ordered list of base-58 encoded addresses for readonly loaded accounts
|
||||
- `version: <"legacy"|number|undefined>` - Transaction version. Undefined if `maxSupportedTransactionVersion` is not set in request params.
|
||||
- `signatures: <array>` - present if "signatures" are requested for transaction details; an array of signatures strings, corresponding to the transaction order in the block
|
||||
- `rewards: <array>` - present if rewards are requested; an array of JSON objects containing:
|
||||
- `pubkey: <string>` - The public key, as base-58 encoded string, of the account that received the reward
|
||||
@@ -559,6 +564,10 @@ The JSON structure of a transaction is defined as follows:
|
||||
- `programIdIndex: <number>` - Index into the `message.accountKeys` array indicating the program account that executes this instruction.
|
||||
- `accounts: <array[number]>` - List of ordered indices into the `message.accountKeys` array indicating which accounts to pass to the program.
|
||||
- `data: <string>` - The program input data encoded in a base-58 string.
|
||||
- `addressTableLookups: <array[object]|undefined>` - List of address table lookups used by a transaction to dynamically load addresses from on-chain address lookup tables. Undefined if `maxSupportedTransactionVersion` is not set.
|
||||
- `accountKey: <string>` - base-58 encoded public key for an address lookup table account.
|
||||
- `writableIndexes: <array[number]>` - List of indices used to load addresses of writable accounts from a lookup table.
|
||||
- `readonlyIndexes: <array[number]>` - List of indices used to load addresses of readonly accounts from a lookup table.
|
||||
|
||||
#### Inner Instructions Structure
|
||||
|
||||
@@ -2313,7 +2322,7 @@ Returns the slot leaders for a given slot range
|
||||
|
||||
#### Results:
|
||||
|
||||
- `<array<string>>` - Node identity public keys as base-58 encoded strings
|
||||
- `<array[string]>` - Node identity public keys as base-58 encoded strings
|
||||
|
||||
#### Example:
|
||||
|
||||
@@ -2847,6 +2856,7 @@ Returns transaction details for a confirmed transaction
|
||||
- (optional) `encoding: <string>` - encoding for each returned Transaction, either "json", "jsonParsed", "base58" (_slow_), "base64". If parameter not provided, the default encoding is "json".
|
||||
"jsonParsed" encoding attempts to use program-specific instruction parsers to return more human-readable and explicit data in the `transaction.message.instructions` list. If "jsonParsed" is requested but a parser cannot be found, the instruction falls back to regular JSON encoding (`accounts`, `data`, and `programIdIndex` fields).
|
||||
- (optional) [Commitment](jsonrpc-api.md#configuring-state-commitment); "processed" is not supported. If parameter not provided, the default is "finalized".
|
||||
- (optional) `maxSupportedTransactionVersion: <number>` - set the max transaction version to return in responses. If the requested transaction is a higher version, an error will be returned.
|
||||
|
||||
#### Results:
|
||||
|
||||
@@ -2873,6 +2883,10 @@ Returns transaction details for a confirmed transaction
|
||||
- `postBalance: <u64>` - account balance in lamports after the reward was applied
|
||||
- `rewardType: <string>` - type of reward: currently only "rent", other types may be added in the future
|
||||
- `commission: <u8|undefined>` - vote account commission when the reward was credited, only present for voting and staking rewards
|
||||
- `loadedAddresses: <object|undefined>` - Transaction addresses loaded from address lookup tables. Undefined if `maxSupportedTransactionVersion` is not set in request params.
|
||||
- `writable: <array[string]>` - Ordered list of base-58 encoded addresses for writable loaded accounts
|
||||
- `readonly: <array[string]>` - Ordered list of base-58 encoded addresses for readonly loaded accounts
|
||||
- `version: <"legacy"|number|undefined>` - Transaction version. Undefined if `maxSupportedTransactionVersion` is not set in request params.
|
||||
|
||||
#### Example:
|
||||
|
||||
@@ -3045,7 +3059,7 @@ curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d '
|
||||
Result:
|
||||
|
||||
```json
|
||||
{ "jsonrpc": "2.0", "result": { "solana-core": "1.10.0" }, "id": 1 }
|
||||
{ "jsonrpc": "2.0", "result": { "solana-core": "1.10.2" }, "id": 1 }
|
||||
```
|
||||
|
||||
### getVoteAccounts
|
||||
|
@@ -21,7 +21,7 @@ Create new accounts, allocate account data, assign accounts to owning programs,
|
||||
transfer lamports from System Program owned accounts and pay transaction fees.
|
||||
|
||||
- Program id: `11111111111111111111111111111111`
|
||||
- Instructions: [SystemInstruction](https://docs.rs/solana-sdk/VERSION_FOR_DOCS_RS/solana_sdk/system_instruction/enum.SystemInstruction.html)
|
||||
- Instructions: [SystemInstruction](https://docs.rs/solana-program/VERSION_FOR_DOCS_RS/solana_program/system_instruction/enum.SystemInstruction.html)
|
||||
|
||||
## Config Program
|
||||
|
||||
|
@@ -58,7 +58,7 @@ To sign a transaction offline, pass the following arguments on the command line
|
||||
Command
|
||||
|
||||
```bash
|
||||
solana@offline$ solana pay --sign-only --blockhash 5Tx8F3jgSHx21CbtjwmdaKPLM5tWmreWAnPrbqHomSJF \
|
||||
solana@offline$ solana transfer --sign-only --blockhash 5Tx8F3jgSHx21CbtjwmdaKPLM5tWmreWAnPrbqHomSJF \
|
||||
recipient-keypair.json 1
|
||||
```
|
||||
|
||||
@@ -88,7 +88,7 @@ following arguments on the command line
|
||||
Command
|
||||
|
||||
```bash
|
||||
solana@online$ solana pay --blockhash 5Tx8F3jgSHx21CbtjwmdaKPLM5tWmreWAnPrbqHomSJF \
|
||||
solana@online$ solana transfer --blockhash 5Tx8F3jgSHx21CbtjwmdaKPLM5tWmreWAnPrbqHomSJF \
|
||||
--signer FhtzLVsmcV7S5XqGD79ErgoseCLhZYmEZnz9kQg1Rp7j=4vC38p4bz7XyiXrk6HtaooUqwxTWKocf45cstASGtmrD398biNJnmTcUCVEojE7wVQvgdYbjHJqRFZPpzfCQpmUN
|
||||
recipient-keypair.json 1
|
||||
```
|
||||
|
@@ -205,7 +205,7 @@ Alice attempts to pay Bob, but takes too long to sign. The specified blockhash
|
||||
expires and the transaction fails
|
||||
|
||||
```bash
|
||||
$ solana pay -k alice.json --blockhash expiredDTaxfagttWjQweib42b6ZHADSx94Tw8gHx3W7 bob.json 0.01
|
||||
$ solana transfer -k alice.json --blockhash expiredDTaxfagttWjQweib42b6ZHADSx94Tw8gHx11 bob.json 0.01
|
||||
[2020-01-02T18:48:28.462911000Z ERROR solana_cli::cli] Io(Custom { kind: Other, error: "Transaction \"33gQQaoPc9jWePMvDAeyJpcnSPiGUAdtVg8zREWv4GiKjkcGNufgpcbFyRKRrA25NkgjZySEeKue5rawyeH5TzsV\" failed: None" })
|
||||
Error: Io(Custom { kind: Other, error: "Transaction \"33gQQaoPc9jWePMvDAeyJpcnSPiGUAdtVg8zREWv4GiKjkcGNufgpcbFyRKRrA25NkgjZySEeKue5rawyeH5TzsV\" failed: None" })
|
||||
```
|
||||
@@ -225,7 +225,7 @@ nonce: F7vmkY3DTaxfagttWjQweib42b6ZHADSx94Tw8gHx3W7
|
||||
```
|
||||
|
||||
```bash
|
||||
$ solana pay -k alice.json --blockhash F7vmkY3DTaxfagttWjQweib42b6ZHADSx94Tw8gHx3W7 --nonce nonce.json bob.json 0.01
|
||||
$ solana transfer -k alice.json --blockhash F7vmkY3DTaxfagttWjQweib42b6ZHADSx94Tw8gHx3W7 --nonce nonce.json bob.json 0.01
|
||||
HR1368UKHVZyenmH7yVz5sBAijV6XAPeWbEiXEGVYQorRMcoijeNAbzZqEZiH8cDB8tk65ckqeegFjK8dHwNFgQ
|
||||
```
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
edition = "2021"
|
||||
name = "solana-dos"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://solana.com/"
|
||||
@@ -13,18 +13,18 @@ bincode = "1.3.3"
|
||||
clap = "2.33.1"
|
||||
log = "0.4.14"
|
||||
rand = "0.7.0"
|
||||
solana-core = { path = "../core", version = "=1.10.0" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.0" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.0" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.0" }
|
||||
solana-version = { path = "../version", version = "=1.10.0" }
|
||||
solana-client = { path = "../client", version = "=1.10.0" }
|
||||
solana-client = { path = "../client", version = "=1.10.2" }
|
||||
solana-core = { path = "../core", version = "=1.10.2" }
|
||||
solana-gossip = { path = "../gossip", version = "=1.10.2" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
solana-net-utils = { path = "../net-utils", version = "=1.10.2" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
solana-streamer = { path = "../streamer", version = "=1.10.2" }
|
||||
solana-version = { path = "../version", version = "=1.10.2" }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
||||
[dev-dependencies]
|
||||
solana-local-cluster = { path = "../local-cluster", version = "=1.10.0" }
|
||||
solana-local-cluster = { path = "../local-cluster", version = "=1.10.2" }
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-download-utils"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana Download Utils"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -14,8 +14,8 @@ console = "0.15.0"
|
||||
indicatif = "0.16.2"
|
||||
log = "0.4.14"
|
||||
reqwest = { version = "0.11.9", default-features = false, features = ["blocking", "rustls-tls", "json"] }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.0" }
|
||||
solana-runtime = { path = "../runtime", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
|
||||
[lib]
|
||||
crate-type = ["lib"]
|
||||
|
@@ -276,14 +276,14 @@ pub fn download_snapshot_archive<'a, 'b>(
|
||||
] {
|
||||
let destination_path = match snapshot_type {
|
||||
SnapshotType::FullSnapshot => snapshot_utils::build_full_snapshot_archive_path(
|
||||
snapshot_archives_dir.to_path_buf(),
|
||||
snapshot_archives_dir,
|
||||
desired_snapshot_hash.0,
|
||||
&desired_snapshot_hash.1,
|
||||
archive_format,
|
||||
),
|
||||
SnapshotType::IncrementalSnapshot(base_slot) => {
|
||||
snapshot_utils::build_incremental_snapshot_archive_path(
|
||||
snapshot_archives_dir.to_path_buf(),
|
||||
snapshot_archives_dir,
|
||||
base_slot,
|
||||
desired_snapshot_hash.0,
|
||||
&desired_snapshot_hash.1,
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-entry"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
description = "Solana Entry"
|
||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@@ -10,6 +10,7 @@ documentation = "https://docs.rs/solana-poh"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
bincode = "1.3.3"
|
||||
crossbeam-channel = "0.5"
|
||||
dlopen = "0.1.8"
|
||||
dlopen_derive = "0.1.4"
|
||||
@@ -17,17 +18,16 @@ log = "0.4.11"
|
||||
rand = "0.7.0"
|
||||
rayon = "1.5.1"
|
||||
serde = "1.0.136"
|
||||
solana-measure = { path = "../measure", version = "=1.10.0" }
|
||||
solana-merkle-tree = { path = "../merkle-tree", version = "=1.10.0" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.0" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.0" }
|
||||
solana-rayon-threadlimit = { path = "../rayon-threadlimit", version = "=1.10.0" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.0" }
|
||||
bincode = "1.3.3"
|
||||
solana-measure = { path = "../measure", version = "=1.10.2" }
|
||||
solana-merkle-tree = { path = "../merkle-tree", version = "=1.10.2" }
|
||||
solana-metrics = { path = "../metrics", version = "=1.10.2" }
|
||||
solana-perf = { path = "../perf", version = "=1.10.2" }
|
||||
solana-rayon-threadlimit = { path = "../rayon-threadlimit", version = "=1.10.2" }
|
||||
solana-sdk = { path = "../sdk", version = "=1.10.2" }
|
||||
|
||||
[dev-dependencies]
|
||||
matches = "0.1.9"
|
||||
solana-logger = { path = "../logger", version = "=1.10.0" }
|
||||
solana-logger = { path = "../logger", version = "=1.10.2" }
|
||||
|
||||
[lib]
|
||||
crate-type = ["lib"]
|
||||
|
2
explorer/.gitignore
vendored
2
explorer/.gitignore
vendored
@@ -24,3 +24,5 @@
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
#comment
|
||||
|
409
explorer/package-lock.json
generated
409
explorer/package-lock.json
generated
@@ -17,6 +17,7 @@
|
||||
"@project-serum/serum": "^0.13.61",
|
||||
"@react-hook/debounce": "^4.0.0",
|
||||
"@sentry/react": "^6.16.1",
|
||||
"@solana/buffer-layout": "^3.0.0",
|
||||
"@solana/spl-token-registry": "^0.2.1143",
|
||||
"@solana/web3.js": "^1.31.0",
|
||||
"@testing-library/jest-dom": "^5.16.1",
|
||||
@@ -10297,24 +10298,30 @@
|
||||
}
|
||||
},
|
||||
"node_modules/es-abstract": {
|
||||
"version": "1.18.0-next.2",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.2.tgz",
|
||||
"integrity": "sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==",
|
||||
"version": "1.19.1",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz",
|
||||
"integrity": "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.2",
|
||||
"es-to-primitive": "^1.2.1",
|
||||
"function-bind": "^1.1.1",
|
||||
"get-intrinsic": "^1.0.2",
|
||||
"get-intrinsic": "^1.1.1",
|
||||
"get-symbol-description": "^1.0.0",
|
||||
"has": "^1.0.3",
|
||||
"has-symbols": "^1.0.1",
|
||||
"is-callable": "^1.2.2",
|
||||
"has-symbols": "^1.0.2",
|
||||
"internal-slot": "^1.0.3",
|
||||
"is-callable": "^1.2.4",
|
||||
"is-negative-zero": "^2.0.1",
|
||||
"is-regex": "^1.1.1",
|
||||
"object-inspect": "^1.9.0",
|
||||
"is-regex": "^1.1.4",
|
||||
"is-shared-array-buffer": "^1.0.1",
|
||||
"is-string": "^1.0.7",
|
||||
"is-weakref": "^1.0.1",
|
||||
"object-inspect": "^1.11.0",
|
||||
"object-keys": "^1.1.1",
|
||||
"object.assign": "^4.1.2",
|
||||
"string.prototype.trimend": "^1.0.3",
|
||||
"string.prototype.trimstart": "^1.0.3"
|
||||
"string.prototype.trimend": "^1.0.4",
|
||||
"string.prototype.trimstart": "^1.0.4",
|
||||
"unbox-primitive": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
@@ -12526,6 +12533,21 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/get-symbol-description": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz",
|
||||
"integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.2",
|
||||
"get-intrinsic": "^1.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-value": {
|
||||
"version": "2.0.6",
|
||||
"resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz",
|
||||
@@ -12696,6 +12718,14 @@
|
||||
"node": ">= 0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/has-bigints": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz",
|
||||
"integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-cors": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/has-cors/-/has-cors-1.1.0.tgz",
|
||||
@@ -12710,9 +12740,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/has-symbols": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz",
|
||||
"integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==",
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
|
||||
"integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz",
|
||||
"integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
@@ -13556,6 +13600,17 @@
|
||||
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
|
||||
"integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0="
|
||||
},
|
||||
"node_modules/is-bigint": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz",
|
||||
"integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==",
|
||||
"dependencies": {
|
||||
"has-bigints": "^1.0.1"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-binary-path": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
|
||||
@@ -13567,15 +13622,30 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/is-boolean-object": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz",
|
||||
"integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.2",
|
||||
"has-tostringtag": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-buffer": {
|
||||
"version": "1.1.6",
|
||||
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
|
||||
"integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
|
||||
},
|
||||
"node_modules/is-callable": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.2.tgz",
|
||||
"integrity": "sha512-dnMqspv5nU3LoewK2N/y7KLtxtakvTuaCsU9FU50/QDmdbHNy/4/JuRtMHqRU22o3q+W89YQndQEeCVwK+3qrA==",
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz",
|
||||
"integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
@@ -13763,6 +13833,20 @@
|
||||
"node": ">=0.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-number-object": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.6.tgz",
|
||||
"integrity": "sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g==",
|
||||
"dependencies": {
|
||||
"has-tostringtag": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-obj": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz",
|
||||
@@ -13826,11 +13910,12 @@
|
||||
"integrity": "sha1-DFLlS8yjkbssSUsh6GJtczbG45c="
|
||||
},
|
||||
"node_modules/is-regex": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.1.tgz",
|
||||
"integrity": "sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg==",
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz",
|
||||
"integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.1"
|
||||
"call-bind": "^1.0.2",
|
||||
"has-tostringtag": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
@@ -13860,6 +13945,14 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/is-shared-array-buffer": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz",
|
||||
"integrity": "sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA==",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-stream": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz",
|
||||
@@ -13869,9 +13962,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/is-string": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz",
|
||||
"integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==",
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz",
|
||||
"integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==",
|
||||
"dependencies": {
|
||||
"has-tostringtag": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
@@ -13898,6 +13994,17 @@
|
||||
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
|
||||
"integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
|
||||
},
|
||||
"node_modules/is-weakref": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz",
|
||||
"integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.2"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/is-windows": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz",
|
||||
@@ -19001,9 +19108,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/object-inspect": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz",
|
||||
"integrity": "sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==",
|
||||
"version": "1.12.0",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz",
|
||||
"integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
@@ -23864,11 +23971,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/string.prototype.trimend": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz",
|
||||
"integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==",
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz",
|
||||
"integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.0",
|
||||
"call-bind": "^1.0.2",
|
||||
"define-properties": "^1.1.3"
|
||||
},
|
||||
"funding": {
|
||||
@@ -23876,11 +23983,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/string.prototype.trimstart": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz",
|
||||
"integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==",
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz",
|
||||
"integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.0",
|
||||
"call-bind": "^1.0.2",
|
||||
"define-properties": "^1.1.3"
|
||||
},
|
||||
"funding": {
|
||||
@@ -24924,6 +25031,20 @@
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/unbox-primitive": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz",
|
||||
"integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==",
|
||||
"dependencies": {
|
||||
"function-bind": "^1.1.1",
|
||||
"has-bigints": "^1.0.1",
|
||||
"has-symbols": "^1.0.2",
|
||||
"which-boxed-primitive": "^1.0.2"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/unicode-canonical-property-names-ecmascript": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz",
|
||||
@@ -25259,30 +25380,6 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/util.promisify/node_modules/es-abstract": {
|
||||
"version": "1.17.7",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz",
|
||||
"integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==",
|
||||
"dependencies": {
|
||||
"es-to-primitive": "^1.2.1",
|
||||
"function-bind": "^1.1.1",
|
||||
"has": "^1.0.3",
|
||||
"has-symbols": "^1.0.1",
|
||||
"is-callable": "^1.2.2",
|
||||
"is-regex": "^1.1.1",
|
||||
"object-inspect": "^1.8.0",
|
||||
"object-keys": "^1.1.1",
|
||||
"object.assign": "^4.1.1",
|
||||
"string.prototype.trimend": "^1.0.1",
|
||||
"string.prototype.trimstart": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/util/node_modules/inherits": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
|
||||
@@ -26604,6 +26701,21 @@
|
||||
"which": "bin/which"
|
||||
}
|
||||
},
|
||||
"node_modules/which-boxed-primitive": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz",
|
||||
"integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==",
|
||||
"dependencies": {
|
||||
"is-bigint": "^1.0.1",
|
||||
"is-boolean-object": "^1.1.0",
|
||||
"is-number-object": "^1.0.4",
|
||||
"is-string": "^1.0.5",
|
||||
"is-symbol": "^1.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/which-module": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
|
||||
@@ -35124,24 +35236,30 @@
|
||||
}
|
||||
},
|
||||
"es-abstract": {
|
||||
"version": "1.18.0-next.2",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.2.tgz",
|
||||
"integrity": "sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==",
|
||||
"version": "1.19.1",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz",
|
||||
"integrity": "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==",
|
||||
"requires": {
|
||||
"call-bind": "^1.0.2",
|
||||
"es-to-primitive": "^1.2.1",
|
||||
"function-bind": "^1.1.1",
|
||||
"get-intrinsic": "^1.0.2",
|
||||
"get-intrinsic": "^1.1.1",
|
||||
"get-symbol-description": "^1.0.0",
|
||||
"has": "^1.0.3",
|
||||
"has-symbols": "^1.0.1",
|
||||
"is-callable": "^1.2.2",
|
||||
"has-symbols": "^1.0.2",
|
||||
"internal-slot": "^1.0.3",
|
||||
"is-callable": "^1.2.4",
|
||||
"is-negative-zero": "^2.0.1",
|
||||
"is-regex": "^1.1.1",
|
||||
"object-inspect": "^1.9.0",
|
||||
"is-regex": "^1.1.4",
|
||||
"is-shared-array-buffer": "^1.0.1",
|
||||
"is-string": "^1.0.7",
|
||||
"is-weakref": "^1.0.1",
|
||||
"object-inspect": "^1.11.0",
|
||||
"object-keys": "^1.1.1",
|
||||
"object.assign": "^4.1.2",
|
||||
"string.prototype.trimend": "^1.0.3",
|
||||
"string.prototype.trimstart": "^1.0.3"
|
||||
"string.prototype.trimend": "^1.0.4",
|
||||
"string.prototype.trimstart": "^1.0.4",
|
||||
"unbox-primitive": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"es-to-primitive": {
|
||||
@@ -36818,6 +36936,15 @@
|
||||
"pump": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"get-symbol-description": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz",
|
||||
"integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==",
|
||||
"requires": {
|
||||
"call-bind": "^1.0.2",
|
||||
"get-intrinsic": "^1.1.1"
|
||||
}
|
||||
},
|
||||
"get-value": {
|
||||
"version": "2.0.6",
|
||||
"resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz",
|
||||
@@ -36947,6 +37074,11 @@
|
||||
"function-bind": "^1.1.1"
|
||||
}
|
||||
},
|
||||
"has-bigints": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz",
|
||||
"integrity": "sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA=="
|
||||
},
|
||||
"has-cors": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/has-cors/-/has-cors-1.1.0.tgz",
|
||||
@@ -36958,9 +37090,17 @@
|
||||
"integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0="
|
||||
},
|
||||
"has-symbols": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz",
|
||||
"integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg=="
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
|
||||
"integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A=="
|
||||
},
|
||||
"has-tostringtag": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz",
|
||||
"integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==",
|
||||
"requires": {
|
||||
"has-symbols": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"has-value": {
|
||||
"version": "1.0.0",
|
||||
@@ -37637,6 +37777,14 @@
|
||||
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
|
||||
"integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0="
|
||||
},
|
||||
"is-bigint": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz",
|
||||
"integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==",
|
||||
"requires": {
|
||||
"has-bigints": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"is-binary-path": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
|
||||
@@ -37645,15 +37793,24 @@
|
||||
"binary-extensions": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"is-boolean-object": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz",
|
||||
"integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==",
|
||||
"requires": {
|
||||
"call-bind": "^1.0.2",
|
||||
"has-tostringtag": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"is-buffer": {
|
||||
"version": "1.1.6",
|
||||
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
|
||||
"integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
|
||||
},
|
||||
"is-callable": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.2.tgz",
|
||||
"integrity": "sha512-dnMqspv5nU3LoewK2N/y7KLtxtakvTuaCsU9FU50/QDmdbHNy/4/JuRtMHqRU22o3q+W89YQndQEeCVwK+3qrA=="
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz",
|
||||
"integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w=="
|
||||
},
|
||||
"is-capitalized": {
|
||||
"version": "1.0.0",
|
||||
@@ -37782,6 +37939,14 @@
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
||||
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="
|
||||
},
|
||||
"is-number-object": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.6.tgz",
|
||||
"integrity": "sha512-bEVOqiRcvo3zO1+G2lVMy+gkkEm9Yh7cDMRusKKu5ZJKPUYSJwICTKZrNKHA2EbSP0Tu0+6B/emsYNHZyn6K8g==",
|
||||
"requires": {
|
||||
"has-tostringtag": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"is-obj": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz",
|
||||
@@ -37827,11 +37992,12 @@
|
||||
"integrity": "sha1-DFLlS8yjkbssSUsh6GJtczbG45c="
|
||||
},
|
||||
"is-regex": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.1.tgz",
|
||||
"integrity": "sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg==",
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz",
|
||||
"integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==",
|
||||
"requires": {
|
||||
"has-symbols": "^1.0.1"
|
||||
"call-bind": "^1.0.2",
|
||||
"has-tostringtag": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"is-regexp": {
|
||||
@@ -37849,15 +38015,23 @@
|
||||
"resolved": "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz",
|
||||
"integrity": "sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg=="
|
||||
},
|
||||
"is-shared-array-buffer": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz",
|
||||
"integrity": "sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA=="
|
||||
},
|
||||
"is-stream": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz",
|
||||
"integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ="
|
||||
},
|
||||
"is-string": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz",
|
||||
"integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ=="
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz",
|
||||
"integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==",
|
||||
"requires": {
|
||||
"has-tostringtag": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"is-symbol": {
|
||||
"version": "1.0.3",
|
||||
@@ -37872,6 +38046,14 @@
|
||||
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
|
||||
"integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
|
||||
},
|
||||
"is-weakref": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz",
|
||||
"integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==",
|
||||
"requires": {
|
||||
"call-bind": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"is-windows": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz",
|
||||
@@ -41775,9 +41957,9 @@
|
||||
}
|
||||
},
|
||||
"object-inspect": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz",
|
||||
"integrity": "sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw=="
|
||||
"version": "1.12.0",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz",
|
||||
"integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g=="
|
||||
},
|
||||
"object-is": {
|
||||
"version": "1.1.5",
|
||||
@@ -45657,20 +45839,20 @@
|
||||
}
|
||||
},
|
||||
"string.prototype.trimend": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz",
|
||||
"integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==",
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz",
|
||||
"integrity": "sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==",
|
||||
"requires": {
|
||||
"call-bind": "^1.0.0",
|
||||
"call-bind": "^1.0.2",
|
||||
"define-properties": "^1.1.3"
|
||||
}
|
||||
},
|
||||
"string.prototype.trimstart": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz",
|
||||
"integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==",
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz",
|
||||
"integrity": "sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==",
|
||||
"requires": {
|
||||
"call-bind": "^1.0.0",
|
||||
"call-bind": "^1.0.2",
|
||||
"define-properties": "^1.1.3"
|
||||
}
|
||||
},
|
||||
@@ -46462,6 +46644,17 @@
|
||||
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.28.tgz",
|
||||
"integrity": "sha512-6Gurc1n//gjp9eQNXjD9O3M/sMwVtN5S8Lv9bvOYBfKfDNiIIhqiyi01vMBO45u4zkDE420w/e0se7Vs+sIg+g=="
|
||||
},
|
||||
"unbox-primitive": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz",
|
||||
"integrity": "sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==",
|
||||
"requires": {
|
||||
"function-bind": "^1.1.1",
|
||||
"has-bigints": "^1.0.1",
|
||||
"has-symbols": "^1.0.2",
|
||||
"which-boxed-primitive": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"unicode-canonical-property-names-ecmascript": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz",
|
||||
@@ -46717,26 +46910,6 @@
|
||||
"es-abstract": "^1.17.2",
|
||||
"has-symbols": "^1.0.1",
|
||||
"object.getownpropertydescriptors": "^2.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"es-abstract": {
|
||||
"version": "1.17.7",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz",
|
||||
"integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==",
|
||||
"requires": {
|
||||
"es-to-primitive": "^1.2.1",
|
||||
"function-bind": "^1.1.1",
|
||||
"has": "^1.0.3",
|
||||
"has-symbols": "^1.0.1",
|
||||
"is-callable": "^1.2.2",
|
||||
"is-regex": "^1.1.1",
|
||||
"object-inspect": "^1.8.0",
|
||||
"object-keys": "^1.1.1",
|
||||
"object.assign": "^4.1.1",
|
||||
"string.prototype.trimend": "^1.0.1",
|
||||
"string.prototype.trimstart": "^1.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"utila": {
|
||||
@@ -47807,6 +47980,18 @@
|
||||
"isexe": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"which-boxed-primitive": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz",
|
||||
"integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==",
|
||||
"requires": {
|
||||
"is-bigint": "^1.0.1",
|
||||
"is-boolean-object": "^1.1.0",
|
||||
"is-number-object": "^1.0.4",
|
||||
"is-string": "^1.0.5",
|
||||
"is-symbol": "^1.0.3"
|
||||
}
|
||||
},
|
||||
"which-module": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
|
||||
|
@@ -12,6 +12,7 @@
|
||||
"@project-serum/serum": "^0.13.61",
|
||||
"@react-hook/debounce": "^4.0.0",
|
||||
"@sentry/react": "^6.16.1",
|
||||
"@solana/buffer-layout": "^3.0.0",
|
||||
"@solana/spl-token-registry": "^0.2.1143",
|
||||
"@solana/web3.js": "^1.31.0",
|
||||
"@testing-library/jest-dom": "^5.16.1",
|
||||
|
@@ -5,11 +5,10 @@ import Select, { InputActionMeta, ActionMeta, ValueType } from "react-select";
|
||||
import StateManager from "react-select";
|
||||
import {
|
||||
LOADER_IDS,
|
||||
PROGRAM_NAME_BY_ID,
|
||||
PROGRAM_INFO_BY_ID,
|
||||
SPECIAL_IDS,
|
||||
SYSVAR_IDS,
|
||||
LoaderName,
|
||||
programLabel,
|
||||
} from "utils/tx";
|
||||
import { Cluster, useCluster } from "providers/cluster";
|
||||
import { useTokenRegistry } from "providers/mints/token-registry";
|
||||
@@ -73,10 +72,9 @@ export function SearchBar() {
|
||||
}
|
||||
|
||||
function buildProgramOptions(search: string, cluster: Cluster) {
|
||||
const matchedPrograms = Object.entries(PROGRAM_NAME_BY_ID).filter(
|
||||
([address]) => {
|
||||
const name = programLabel(address, cluster);
|
||||
if (!name) return false;
|
||||
const matchedPrograms = Object.entries(PROGRAM_INFO_BY_ID).filter(
|
||||
([address, { name, deployments }]) => {
|
||||
if (!deployments.includes(cluster)) return false;
|
||||
return (
|
||||
name.toLowerCase().includes(search.toLowerCase()) ||
|
||||
address.includes(search)
|
||||
@@ -87,10 +85,10 @@ function buildProgramOptions(search: string, cluster: Cluster) {
|
||||
if (matchedPrograms.length > 0) {
|
||||
return {
|
||||
label: "Programs",
|
||||
options: matchedPrograms.map(([id, name]) => ({
|
||||
options: matchedPrograms.map(([address, { name }]) => ({
|
||||
label: name,
|
||||
value: [name, id],
|
||||
pathname: "/address/" + id,
|
||||
value: [name, address],
|
||||
pathname: "/address/" + address,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
239
explorer/src/components/SolanaPingCard.tsx
Normal file
239
explorer/src/components/SolanaPingCard.tsx
Normal file
@@ -0,0 +1,239 @@
|
||||
import React from "react";
|
||||
import classNames from "classnames";
|
||||
import {
|
||||
PingRollupInfo,
|
||||
PingStatus,
|
||||
useSolanaPingInfo,
|
||||
} from "providers/stats/SolanaPingProvider";
|
||||
import { Bar } from "react-chartjs-2";
|
||||
import { ChartOptions, ChartTooltipModel } from "chart.js";
|
||||
import { Cluster, useCluster } from "providers/cluster";
|
||||
|
||||
export function SolanaPingCard() {
|
||||
const { cluster } = useCluster();
|
||||
|
||||
if (cluster === Cluster.Custom) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="card">
|
||||
<div className="card-header">
|
||||
<h4 className="card-header-title">Solana Ping Stats</h4>
|
||||
</div>
|
||||
<PingBarBody />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function PingBarBody() {
|
||||
const pingInfo = useSolanaPingInfo();
|
||||
|
||||
if (pingInfo.status !== PingStatus.Ready) {
|
||||
return (
|
||||
<StatsNotReady
|
||||
error={pingInfo.status === PingStatus.Error}
|
||||
retry={pingInfo.retry}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return <PingBarChart pingInfo={pingInfo} />;
|
||||
}
|
||||
|
||||
type StatsNotReadyProps = { error: boolean; retry?: Function };
|
||||
function StatsNotReady({ error, retry }: StatsNotReadyProps) {
|
||||
if (error) {
|
||||
return (
|
||||
<div className="card-body text-center">
|
||||
There was a problem loading solana ping stats.{" "}
|
||||
{retry && (
|
||||
<button
|
||||
className="btn btn-white btn-sm"
|
||||
onClick={() => {
|
||||
retry();
|
||||
}}
|
||||
>
|
||||
<span className="fe fe-refresh-cw me-2"></span>
|
||||
Try Again
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="card-body text-center">
|
||||
<span className="spinner-grow spinner-grow-sm me-2"></span>
|
||||
Loading
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
type Series = "short" | "medium" | "long";
|
||||
const SERIES: Series[] = ["short", "medium", "long"];
|
||||
const SERIES_INFO = {
|
||||
short: {
|
||||
label: (index: number) => index,
|
||||
interval: "30m",
|
||||
},
|
||||
medium: {
|
||||
label: (index: number) => index * 4,
|
||||
interval: "2h",
|
||||
},
|
||||
long: {
|
||||
label: (index: number) => index * 12,
|
||||
interval: "6h",
|
||||
},
|
||||
};
|
||||
|
||||
const CUSTOM_TOOLTIP = function (this: any, tooltipModel: ChartTooltipModel) {
|
||||
// Tooltip Element
|
||||
let tooltipEl = document.getElementById("chartjs-tooltip");
|
||||
|
||||
// Create element on first render
|
||||
if (!tooltipEl) {
|
||||
tooltipEl = document.createElement("div");
|
||||
tooltipEl.id = "chartjs-tooltip";
|
||||
tooltipEl.innerHTML = `<div class="content"></div>`;
|
||||
document.body.appendChild(tooltipEl);
|
||||
}
|
||||
|
||||
// Hide if no tooltip
|
||||
if (tooltipModel.opacity === 0) {
|
||||
tooltipEl.style.opacity = "0";
|
||||
return;
|
||||
}
|
||||
|
||||
// Set Text
|
||||
if (tooltipModel.body) {
|
||||
const { label, value } = tooltipModel.dataPoints[0];
|
||||
const tooltipContent = tooltipEl.querySelector("div");
|
||||
if (tooltipContent) {
|
||||
let innerHtml = `<div class="value">${value} ms</div>`;
|
||||
innerHtml += `<div class="label">${label}</div>`;
|
||||
tooltipContent.innerHTML = innerHtml;
|
||||
}
|
||||
}
|
||||
|
||||
// Enable tooltip and set position
|
||||
const canvas: Element = this._chart.canvas;
|
||||
const position = canvas.getBoundingClientRect();
|
||||
tooltipEl.style.opacity = "1";
|
||||
tooltipEl.style.left =
|
||||
position.left + window.pageXOffset + tooltipModel.caretX + "px";
|
||||
tooltipEl.style.top =
|
||||
position.top + window.pageYOffset + tooltipModel.caretY + "px";
|
||||
};
|
||||
|
||||
const CHART_OPTION: ChartOptions = {
|
||||
tooltips: {
|
||||
intersect: false, // Show tooltip when cursor in between bars
|
||||
enabled: false, // Hide default tooltip
|
||||
custom: CUSTOM_TOOLTIP,
|
||||
},
|
||||
legend: {
|
||||
display: false,
|
||||
},
|
||||
scales: {
|
||||
xAxes: [
|
||||
{
|
||||
ticks: {
|
||||
display: false,
|
||||
},
|
||||
gridLines: {
|
||||
display: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
yAxes: [
|
||||
{
|
||||
ticks: {
|
||||
stepSize: 100,
|
||||
fontSize: 10,
|
||||
fontColor: "#EEE",
|
||||
beginAtZero: true,
|
||||
display: true,
|
||||
},
|
||||
gridLines: {
|
||||
display: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
animation: {
|
||||
duration: 0, // general animation time
|
||||
},
|
||||
hover: {
|
||||
animationDuration: 0, // duration of animations when hovering an item
|
||||
},
|
||||
responsiveAnimationDuration: 0, // animation duration after a resize
|
||||
};
|
||||
|
||||
function PingBarChart({ pingInfo }: { pingInfo: PingRollupInfo }) {
|
||||
const [series, setSeries] = React.useState<Series>("short");
|
||||
const seriesData = pingInfo[series] || [];
|
||||
|
||||
const seriesLength = seriesData.length;
|
||||
const chartData: Chart.ChartData = {
|
||||
labels: seriesData.map((val, i) => {
|
||||
return `
|
||||
<p class="mb-0">${val.confirmed} of ${val.submitted} confirmed</p>
|
||||
${
|
||||
val.loss
|
||||
? `<p class="mb-0">${val.loss.toLocaleString(undefined, {
|
||||
style: "percent",
|
||||
minimumFractionDigits: 2,
|
||||
})} loss</p>`
|
||||
: ""
|
||||
}
|
||||
${SERIES_INFO[series].label(seriesLength - i)}min ago
|
||||
`;
|
||||
}),
|
||||
datasets: [
|
||||
{
|
||||
backgroundColor: seriesData.map((val) =>
|
||||
val.loss > 0.5 ? "#f00" : "#00D192"
|
||||
),
|
||||
hoverBackgroundColor: seriesData.map((val) =>
|
||||
val.loss > 0.5 ? "#f00" : "#00D192"
|
||||
),
|
||||
borderWidth: 0,
|
||||
data: seriesData.map((val) => val.mean || 0),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="card-body py-3">
|
||||
<div className="align-box-row align-items-start justify-content-between">
|
||||
<div className="d-flex justify-content-between w-100">
|
||||
<span className="mb-0 font-size-sm">Average Ping Time</span>
|
||||
|
||||
<div className="font-size-sm">
|
||||
{SERIES.map((key) => (
|
||||
<button
|
||||
key={key}
|
||||
onClick={() => setSeries(key)}
|
||||
className={classNames("btn btn-sm btn-white ms-2", {
|
||||
active: series === key,
|
||||
})}
|
||||
>
|
||||
{SERIES_INFO[key].interval}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
id="perf-history"
|
||||
className="mt-3 d-flex justify-content-end flex-row w-100"
|
||||
>
|
||||
<div className="w-100">
|
||||
<Bar data={chartData} options={CHART_OPTION} height={80} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
@@ -1,11 +1,17 @@
|
||||
import React from "react";
|
||||
import "bootstrap/dist/js/bootstrap.min.js";
|
||||
import { NFTData } from "providers/accounts";
|
||||
import {
|
||||
NFTData,
|
||||
useFetchAccountInfo,
|
||||
useMintAccountInfo,
|
||||
} from "providers/accounts";
|
||||
import { programs } from "@metaplex/js";
|
||||
import { ArtContent } from "components/common/NFTArt";
|
||||
import { InfoTooltip } from "components/common/InfoTooltip";
|
||||
import { clusterPath } from "utils/url";
|
||||
import { Link } from "react-router-dom";
|
||||
import { EditionInfo } from "providers/accounts/utils/getEditionInfo";
|
||||
import { PublicKey } from "@solana/web3.js";
|
||||
|
||||
export function NFTHeader({
|
||||
nftData,
|
||||
@@ -14,8 +20,22 @@ export function NFTHeader({
|
||||
nftData: NFTData;
|
||||
address: string;
|
||||
}) {
|
||||
const collectionAddress = nftData.metadata.collection?.key;
|
||||
const collectionMintInfo = useMintAccountInfo(collectionAddress);
|
||||
const fetchAccountInfo = useFetchAccountInfo();
|
||||
|
||||
React.useEffect(() => {
|
||||
if (collectionAddress && !collectionMintInfo) {
|
||||
fetchAccountInfo(new PublicKey(collectionAddress));
|
||||
}
|
||||
}, [fetchAccountInfo, collectionAddress]); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const metadata = nftData.metadata;
|
||||
const data = nftData.json;
|
||||
const isVerifiedCollection =
|
||||
metadata.collection != null &&
|
||||
metadata.collection?.verified &&
|
||||
collectionMintInfo !== undefined;
|
||||
return (
|
||||
<div className="row">
|
||||
<div className="col-auto ms-2 d-flex align-items-center">
|
||||
@@ -30,6 +50,7 @@ export function NFTHeader({
|
||||
: "No NFT name was found"}
|
||||
</h2>
|
||||
{getEditionPill(nftData.editionInfo)}
|
||||
{isVerifiedCollection ? getVerifiedCollectionPill() : null}
|
||||
</div>
|
||||
<h4 className="header-pretitle ms-1 mt-1 no-overflow-with-ellipsis">
|
||||
{metadata.data.symbol !== ""
|
||||
@@ -174,3 +195,14 @@ function getIsMutablePill(isMutable: boolean) {
|
||||
}`}</span>
|
||||
);
|
||||
}
|
||||
|
||||
function getVerifiedCollectionPill() {
|
||||
const onchainVerifiedToolTip =
|
||||
"This NFT has been verified as a member of an on-chain collection. This tag guarantees authenticity.";
|
||||
return (
|
||||
<div className={"d-inline-flex align-items-center ms-2"}>
|
||||
<span className="badge badge-pill bg-dark">{"Verified Collection"}</span>
|
||||
<InfoTooltip bottom text={onchainVerifiedToolTip} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@@ -343,6 +343,18 @@ function NonFungibleTokenMintAccountCard({
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
{nftData?.metadata.collection?.verified && (
|
||||
<tr>
|
||||
<td>Verified Collection Address</td>
|
||||
<td className="text-lg-end">
|
||||
<Address
|
||||
pubkey={new PublicKey(nftData.metadata.collection.key)}
|
||||
alignRight
|
||||
link
|
||||
/>
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
{mintInfo.mintAuthority && (
|
||||
<tr>
|
||||
<td>Mint Authority</td>
|
||||
|
@@ -0,0 +1,60 @@
|
||||
import React from "react";
|
||||
import { SignatureResult, TransactionInstruction } from "@solana/web3.js";
|
||||
import { Address } from "components/common/Address";
|
||||
import { InstructionCard } from "../InstructionCard";
|
||||
import { AddMappingParams } from "./program";
|
||||
|
||||
export default function AddMappingDetailsCard({
|
||||
ix,
|
||||
index,
|
||||
result,
|
||||
info,
|
||||
innerCards,
|
||||
childIndex,
|
||||
}: {
|
||||
ix: TransactionInstruction;
|
||||
index: number;
|
||||
result: SignatureResult;
|
||||
info: AddMappingParams;
|
||||
innerCards?: JSX.Element[];
|
||||
childIndex?: number;
|
||||
}) {
|
||||
return (
|
||||
<InstructionCard
|
||||
ix={ix}
|
||||
index={index}
|
||||
result={result}
|
||||
title="Pyth: Add Mapping Account"
|
||||
innerCards={innerCards}
|
||||
childIndex={childIndex}
|
||||
>
|
||||
<tr>
|
||||
<td>Program</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={ix.programId} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Funding Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.fundingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Mapping Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.mappingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Next Mapping Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.nextMappingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
</InstructionCard>
|
||||
);
|
||||
}
|
@@ -0,0 +1,70 @@
|
||||
import React from "react";
|
||||
import { SignatureResult, TransactionInstruction } from "@solana/web3.js";
|
||||
import { Address } from "components/common/Address";
|
||||
import { InstructionCard } from "../InstructionCard";
|
||||
import { AddPriceParams, PriceType } from "./program";
|
||||
|
||||
export default function AddPriceDetailsCard({
|
||||
ix,
|
||||
index,
|
||||
result,
|
||||
info,
|
||||
innerCards,
|
||||
childIndex,
|
||||
}: {
|
||||
ix: TransactionInstruction;
|
||||
index: number;
|
||||
result: SignatureResult;
|
||||
info: AddPriceParams;
|
||||
innerCards?: JSX.Element[];
|
||||
childIndex?: number;
|
||||
}) {
|
||||
return (
|
||||
<InstructionCard
|
||||
ix={ix}
|
||||
index={index}
|
||||
result={result}
|
||||
title="Pyth: Add Price Account"
|
||||
innerCards={innerCards}
|
||||
childIndex={childIndex}
|
||||
>
|
||||
<tr>
|
||||
<td>Program</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={ix.programId} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Funding Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.fundingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Product Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.productPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Price Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.pricePubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Exponent</td>
|
||||
<td className="text-lg-end">{info.exponent}</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Price Type</td>
|
||||
<td className="text-lg-end">{PriceType[info.priceType]}</td>
|
||||
</tr>
|
||||
</InstructionCard>
|
||||
);
|
||||
}
|
@@ -0,0 +1,60 @@
|
||||
import React from "react";
|
||||
import { SignatureResult, TransactionInstruction } from "@solana/web3.js";
|
||||
import { Address } from "components/common/Address";
|
||||
import { InstructionCard } from "../InstructionCard";
|
||||
import { AddProductParams } from "./program";
|
||||
|
||||
export default function AddProductDetailsCard({
|
||||
ix,
|
||||
index,
|
||||
result,
|
||||
info,
|
||||
innerCards,
|
||||
childIndex,
|
||||
}: {
|
||||
ix: TransactionInstruction;
|
||||
index: number;
|
||||
result: SignatureResult;
|
||||
info: AddProductParams;
|
||||
innerCards?: JSX.Element[];
|
||||
childIndex?: number;
|
||||
}) {
|
||||
return (
|
||||
<InstructionCard
|
||||
ix={ix}
|
||||
index={index}
|
||||
result={result}
|
||||
title="Pyth: Add Product"
|
||||
innerCards={innerCards}
|
||||
childIndex={childIndex}
|
||||
>
|
||||
<tr>
|
||||
<td>Program</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={ix.programId} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Funding Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.fundingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Mapping Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.mappingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Product Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.productPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
</InstructionCard>
|
||||
);
|
||||
}
|
@@ -0,0 +1,53 @@
|
||||
import React from "react";
|
||||
import { SignatureResult, TransactionInstruction } from "@solana/web3.js";
|
||||
import { Address } from "components/common/Address";
|
||||
import { InstructionCard } from "../InstructionCard";
|
||||
import { AggregatePriceParams } from "./program";
|
||||
|
||||
export default function AggregatePriceDetailsCard({
|
||||
ix,
|
||||
index,
|
||||
result,
|
||||
info,
|
||||
innerCards,
|
||||
childIndex,
|
||||
}: {
|
||||
ix: TransactionInstruction;
|
||||
index: number;
|
||||
result: SignatureResult;
|
||||
info: AggregatePriceParams;
|
||||
innerCards?: JSX.Element[];
|
||||
childIndex?: number;
|
||||
}) {
|
||||
return (
|
||||
<InstructionCard
|
||||
ix={ix}
|
||||
index={index}
|
||||
result={result}
|
||||
title="Pyth: Update Price"
|
||||
innerCards={innerCards}
|
||||
childIndex={childIndex}
|
||||
>
|
||||
<tr>
|
||||
<td>Program</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={ix.programId} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Funding Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.fundingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Price Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.pricePubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
</InstructionCard>
|
||||
);
|
||||
}
|
@@ -0,0 +1,55 @@
|
||||
import React from "react";
|
||||
import { SignatureResult, TransactionInstruction } from "@solana/web3.js";
|
||||
import { Address } from "components/common/Address";
|
||||
import { InstructionCard } from "../InstructionCard";
|
||||
import { BasePublisherOperationParams } from "./program";
|
||||
|
||||
export default function BasePublisherOperationCard({
|
||||
ix,
|
||||
index,
|
||||
result,
|
||||
operationName,
|
||||
info,
|
||||
innerCards,
|
||||
childIndex,
|
||||
}: {
|
||||
ix: TransactionInstruction;
|
||||
index: number;
|
||||
result: SignatureResult;
|
||||
operationName: string;
|
||||
info: BasePublisherOperationParams;
|
||||
innerCards?: JSX.Element[];
|
||||
childIndex?: number;
|
||||
}) {
|
||||
return (
|
||||
<InstructionCard
|
||||
ix={ix}
|
||||
index={index}
|
||||
result={result}
|
||||
title={`Pyth: ${operationName}`}
|
||||
innerCards={innerCards}
|
||||
childIndex={childIndex}
|
||||
>
|
||||
<tr>
|
||||
<td>Program</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={ix.programId} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Price Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.pricePubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Publisher</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.publisherPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
</InstructionCard>
|
||||
);
|
||||
}
|
@@ -0,0 +1,53 @@
|
||||
import React from "react";
|
||||
import { SignatureResult, TransactionInstruction } from "@solana/web3.js";
|
||||
import { Address } from "components/common/Address";
|
||||
import { InstructionCard } from "../InstructionCard";
|
||||
import { InitMappingParams } from "./program";
|
||||
|
||||
export default function InitMappingDetailsCard({
|
||||
ix,
|
||||
index,
|
||||
result,
|
||||
info,
|
||||
innerCards,
|
||||
childIndex,
|
||||
}: {
|
||||
ix: TransactionInstruction;
|
||||
index: number;
|
||||
result: SignatureResult;
|
||||
info: InitMappingParams;
|
||||
innerCards?: JSX.Element[];
|
||||
childIndex?: number;
|
||||
}) {
|
||||
return (
|
||||
<InstructionCard
|
||||
ix={ix}
|
||||
index={index}
|
||||
result={result}
|
||||
title="Pyth: Init Mapping Account"
|
||||
innerCards={innerCards}
|
||||
childIndex={childIndex}
|
||||
>
|
||||
<tr>
|
||||
<td>Program</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={ix.programId} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Funding Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.fundingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Mapping Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.mappingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
</InstructionCard>
|
||||
);
|
||||
}
|
@@ -0,0 +1,63 @@
|
||||
import React from "react";
|
||||
import { SignatureResult, TransactionInstruction } from "@solana/web3.js";
|
||||
import { Address } from "components/common/Address";
|
||||
import { InstructionCard } from "../InstructionCard";
|
||||
import { InitPriceParams, PriceType } from "./program";
|
||||
|
||||
export default function InitPriceDetailsCard({
|
||||
ix,
|
||||
index,
|
||||
result,
|
||||
info,
|
||||
innerCards,
|
||||
childIndex,
|
||||
}: {
|
||||
ix: TransactionInstruction;
|
||||
index: number;
|
||||
result: SignatureResult;
|
||||
info: InitPriceParams;
|
||||
innerCards?: JSX.Element[];
|
||||
childIndex?: number;
|
||||
}) {
|
||||
return (
|
||||
<InstructionCard
|
||||
ix={ix}
|
||||
index={index}
|
||||
result={result}
|
||||
title="Pyth: Init Price Account"
|
||||
innerCards={innerCards}
|
||||
childIndex={childIndex}
|
||||
>
|
||||
<tr>
|
||||
<td>Program</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={ix.programId} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Funding Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.fundingPubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Price Account</td>
|
||||
<td className="text-lg-end">
|
||||
<Address pubkey={info.pricePubkey} alignRight link />
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Exponent</td>
|
||||
<td className="text-lg-end">{info.exponent}</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Price Type</td>
|
||||
<td className="text-lg-end">{PriceType[info.priceType]}</td>
|
||||
</tr>
|
||||
</InstructionCard>
|
||||
);
|
||||
}
|
123
explorer/src/components/instruction/pyth/PythDetailsCard.tsx
Normal file
123
explorer/src/components/instruction/pyth/PythDetailsCard.tsx
Normal file
@@ -0,0 +1,123 @@
|
||||
import React from "react";
|
||||
import { SignatureResult, TransactionInstruction } from "@solana/web3.js";
|
||||
import { useCluster } from "providers/cluster";
|
||||
import { reportError } from "utils/sentry";
|
||||
import { InstructionCard } from "../InstructionCard";
|
||||
import { PythInstruction } from "./program";
|
||||
import UpdatePriceDetailsCard from "./UpdatePriceDetailsCard";
|
||||
import BasePublisherOperationCard from "./BasePublisherOperationCard";
|
||||
import AddProductDetailsCard from "./AddProductDetailsCard";
|
||||
import AddPriceDetailsCard from "./AddPriceDetailsCard";
|
||||
import UpdateProductDetailsCard from "./UpdateProductDetailsCard";
|
||||
import InitMappingDetailsCard from "./InitMappingDetailsCard";
|
||||
import AddMappingDetailsCard from "./AddMappingDetailsCard";
|
||||
import AggregatePriceDetailsCard from "./AggregatePriceDetailsCard";
|
||||
import InitPriceDetailsCard from "./InitPriceDetailsCard";
|
||||
|
||||
export function PythDetailsCard(props: {
|
||||
ix: TransactionInstruction;
|
||||
index: number;
|
||||
result: SignatureResult;
|
||||
signature: string;
|
||||
innerCards?: JSX.Element[];
|
||||
childIndex?: number;
|
||||
}) {
|
||||
const { url } = useCluster();
|
||||
const { ix, index, result, signature, innerCards, childIndex } = props;
|
||||
|
||||
try {
|
||||
let ixType = PythInstruction.decodeInstructionType(ix);
|
||||
|
||||
switch (ixType) {
|
||||
case "InitMapping":
|
||||
return (
|
||||
<InitMappingDetailsCard
|
||||
info={PythInstruction.decodeInitMapping(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
case "AddMapping":
|
||||
return (
|
||||
<AddMappingDetailsCard
|
||||
info={PythInstruction.decodeAddMapping(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
case "AddProduct":
|
||||
return (
|
||||
<AddProductDetailsCard
|
||||
info={PythInstruction.decodeAddProduct(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
case "UpdateProduct":
|
||||
return (
|
||||
<UpdateProductDetailsCard
|
||||
info={PythInstruction.decodeUpdateProduct(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
case "AddPrice":
|
||||
return (
|
||||
<AddPriceDetailsCard
|
||||
info={PythInstruction.decodeAddPrice(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
case "AddPublisher":
|
||||
return (
|
||||
<BasePublisherOperationCard
|
||||
operationName="Add Publisher"
|
||||
info={PythInstruction.decodeAddPublisher(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
case "DeletePublisher":
|
||||
return (
|
||||
<BasePublisherOperationCard
|
||||
operationName="Delete Publisher"
|
||||
info={PythInstruction.decodeDeletePublisher(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
case "UpdatePrice":
|
||||
return (
|
||||
<UpdatePriceDetailsCard
|
||||
info={PythInstruction.decodeUpdatePrice(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
case "AggregatePrice":
|
||||
return (
|
||||
<AggregatePriceDetailsCard
|
||||
info={PythInstruction.decodeAggregatePrice(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
case "InitPrice":
|
||||
return (
|
||||
<InitPriceDetailsCard
|
||||
info={PythInstruction.decodeInitPrice(ix)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
reportError(error, {
|
||||
url: url,
|
||||
signature: signature,
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<InstructionCard
|
||||
ix={ix}
|
||||
index={index}
|
||||
result={result}
|
||||
title={`Pyth: Unknown`}
|
||||
innerCards={innerCards}
|
||||
childIndex={childIndex}
|
||||
defaultRaw
|
||||
/>
|
||||
);
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user