Compare commits
	
		
			10 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					c9cbc39ec9 | ||
| 
						 | 
					606a392d50 | ||
| 
						 | 
					c67596ceb4 | ||
| 
						 | 
					9a42cc7555 | ||
| 
						 | 
					2e5ef2a802 | ||
| 
						 | 
					8c8e2c4b2b | ||
| 
						 | 
					0578801f99 | ||
| 
						 | 
					6141e1410a | ||
| 
						 | 
					4fc86807ff | ||
| 
						 | 
					d2a2eba69e | 
							
								
								
									
										9
									
								
								.buildkite/env/secrets.ejson
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								.buildkite/env/secrets.ejson
									
									
									
									
										vendored
									
									
								
							@@ -1,7 +1,12 @@
 | 
			
		||||
{
 | 
			
		||||
    "_public_key": "ae29f4f7ad2fc92de70d470e411c8426d5d48db8817c9e3dae574b122192335f",
 | 
			
		||||
    "_comment": "These credentials are encrypted and pose no risk",
 | 
			
		||||
    "environment": {
 | 
			
		||||
      "CODECOV_TOKEN": "EJ[1:Z7OneT3RdJJ0DipCHQ7rC84snQ+FPbgHwZADQiz54wk=:3K68mE38LJ2RB98VWmjuNLFBNn1XTGR4:cR4r05/TOZQKmEZp1v4CSgUJtC6QJiOaL85QjXW0qZ061fMnsBA8AtAPMDoDq4WCGOZM1A==]"
 | 
			
		||||
      "CODECOV_TOKEN": "EJ[1:yGpTmjdbyjW2kjgIHkFoJv7Ue7EbUvUbqHyw6anGgWg=:JnxhrIxh09AvqdJgrVSYmb7PxSrh19aE:07WzVExCHEd1lJ1m8QizRRthGri+WBNeZRKjjEvsy5eo4gv3HD7zVEm42tVTGkqITKkBNQ==]",
 | 
			
		||||
      "CRATES_IO_TOKEN": "EJ[1:yGpTmjdbyjW2kjgIHkFoJv7Ue7EbUvUbqHyw6anGgWg=:d0jJqC32/axwzq/N7kMRmpxKhnRrhtpt:zvcPHwkOzGnjhNkAQSejwdy1Jkr9wR1qXFFCnfIjyt/XQYubzB1tLkoly/qdmeb5]",
 | 
			
		||||
      "GEOLOCATION_API_KEY": "EJ[1:yGpTmjdbyjW2kjgIHkFoJv7Ue7EbUvUbqHyw6anGgWg=:R4gfB6Ey4i50HyfLt4UZDLBqg3qHEUye:UfZCOgt8XI6Y2g+ivCRVoS1fjFycFs7/GSevvCqh1B50mG0+hzpEyzXQLuKG5OeI]",
 | 
			
		||||
      "GITHUB_TOKEN": "EJ[1:yGpTmjdbyjW2kjgIHkFoJv7Ue7EbUvUbqHyw6anGgWg=:Vq2dkGTOzfEpRht0BAGHFp/hDogMvXJe:tFXHg1epVt2mq9hkuc5sRHe+KAnVREi/p8S+IZu67XRyzdiA/nGak1k860FXYuuzuaE0QWekaEc=]",
 | 
			
		||||
      "INFLUX_DATABASE": "EJ[1:yGpTmjdbyjW2kjgIHkFoJv7Ue7EbUvUbqHyw6anGgWg=:5KI9WBkXx3R/W4m256mU5MJOE7N8aAT9:Cb8QFELZ9I60t5zhJ9h55Kcs]",
 | 
			
		||||
      "INFLUX_PASSWORD": "EJ[1:yGpTmjdbyjW2kjgIHkFoJv7Ue7EbUvUbqHyw6anGgWg=:hQRMpLCrav+OYkNphkeM4hagdVoZv5Iw:AUO76rr6+gF1OLJA8ZLSG8wHKXgYCPNk6gRCV8rBhZBJ4KwDaxpvOhMl7bxxXG6jol7v4aRa/Lk=]",
 | 
			
		||||
      "INFLUX_USERNAME": "EJ[1:yGpTmjdbyjW2kjgIHkFoJv7Ue7EbUvUbqHyw6anGgWg=:R7BNmQjfeqoGDAFTJu9bYTGHol2NgnYN:Q2tOT/EBcFvhFk+DKLKmVU7tLCpVC3Ui]"
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -3,19 +3,16 @@
 | 
			
		||||
#
 | 
			
		||||
# Save target/ for the next CI build on this machine
 | 
			
		||||
#
 | 
			
		||||
if [[ -z $CARGO_TARGET_CACHE ]]; then
 | 
			
		||||
  echo "+++ CARGO_TARGET_CACHE not defined" # pre-command should have defined it
 | 
			
		||||
else
 | 
			
		||||
(
 | 
			
		||||
  set -x
 | 
			
		||||
    mkdir -p "$CARGO_TARGET_CACHE"
 | 
			
		||||
  d=$HOME/cargo-target-cache/"$BUILDKITE_LABEL"
 | 
			
		||||
  mkdir -p "$d"
 | 
			
		||||
  set -x
 | 
			
		||||
    rsync -a --delete --link-dest="$PWD" target "$CARGO_TARGET_CACHE"
 | 
			
		||||
    du -hs "$CARGO_TARGET_CACHE"
 | 
			
		||||
    read -r cacheSizeInGB _ < <(du -s --block-size=1800000000 "$CARGO_TARGET_CACHE")
 | 
			
		||||
    echo "--- ${cacheSizeInGB}GB: $CARGO_TARGET_CACHE"
 | 
			
		||||
  rsync -a --delete --link-dest="$PWD" target "$d"
 | 
			
		||||
  du -hs "$d"
 | 
			
		||||
  read -r cacheSizeInGB _ < <(du -s --block-size=1800000000 "$d")
 | 
			
		||||
  echo "--- ${cacheSizeInGB}GB: $d"
 | 
			
		||||
)
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
#
 | 
			
		||||
# Add job_stats data point
 | 
			
		||||
 
 | 
			
		||||
@@ -11,29 +11,23 @@ export PS4="++"
 | 
			
		||||
#
 | 
			
		||||
# Restore target/ from the previous CI build on this machine
 | 
			
		||||
#
 | 
			
		||||
eval "$(ci/channel-info.sh)"
 | 
			
		||||
export CARGO_TARGET_CACHE=$HOME/cargo-target-cache/"$CHANNEL"-"$BUILDKITE_LABEL"
 | 
			
		||||
(
 | 
			
		||||
  set -x
 | 
			
		||||
  d=$HOME/cargo-target-cache/"$BUILDKITE_LABEL"
 | 
			
		||||
  MAX_CACHE_SIZE=18 # gigabytes
 | 
			
		||||
 | 
			
		||||
  if [[ -d $CARGO_TARGET_CACHE ]]; then
 | 
			
		||||
    du -hs "$CARGO_TARGET_CACHE"
 | 
			
		||||
    read -r cacheSizeInGB _ < <(du -s --block-size=1800000000 "$CARGO_TARGET_CACHE")
 | 
			
		||||
    echo "--- ${cacheSizeInGB}GB: $CARGO_TARGET_CACHE"
 | 
			
		||||
  if [[ -d $d ]]; then
 | 
			
		||||
    du -hs "$d"
 | 
			
		||||
    read -r cacheSizeInGB _ < <(du -s --block-size=1800000000 "$d")
 | 
			
		||||
    echo "--- ${cacheSizeInGB}GB: $d"
 | 
			
		||||
    if [[ $cacheSizeInGB -gt $MAX_CACHE_SIZE ]]; then
 | 
			
		||||
      echo "--- $CARGO_TARGET_CACHE is too large, removing it"
 | 
			
		||||
      rm -rf "$CARGO_TARGET_CACHE"
 | 
			
		||||
      echo "--- $d is too large, removing it"
 | 
			
		||||
      rm -rf "$d"
 | 
			
		||||
    fi
 | 
			
		||||
  else
 | 
			
		||||
    echo "--- $CARGO_TARGET_CACHE not present"
 | 
			
		||||
    echo "--- $d not present"
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  mkdir -p "$CARGO_TARGET_CACHE"/target
 | 
			
		||||
  rsync -a --delete --link-dest="$CARGO_TARGET_CACHE" "$CARGO_TARGET_CACHE"/target .
 | 
			
		||||
 | 
			
		||||
  # Don't reuse BPF target build artifacts due to incremental build issues with
 | 
			
		||||
  # `std:
 | 
			
		||||
  #    "found possibly newer version of crate `std` which `xyz` depends on
 | 
			
		||||
  rm -rf target/bpfel-unknown-unknown
 | 
			
		||||
  mkdir -p "$d"/target
 | 
			
		||||
  rsync -a --delete --link-dest="$d" "$d"/target .
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
@@ -9,10 +9,23 @@
 | 
			
		||||
 | 
			
		||||
set -e
 | 
			
		||||
cd "$(dirname "$0")"/..
 | 
			
		||||
source ci/_
 | 
			
		||||
 | 
			
		||||
_ ci/buildkite-pipeline.sh pipeline.yml
 | 
			
		||||
echo +++ pipeline
 | 
			
		||||
cat pipeline.yml
 | 
			
		||||
if [[ -n $BUILDKITE_TAG ]]; then
 | 
			
		||||
  buildkite-agent annotate --style info --context release-tag \
 | 
			
		||||
    "https://github.com/solana-labs/solana/releases/$BUILDKITE_TAG"
 | 
			
		||||
  buildkite-agent pipeline upload ci/buildkite-release.yml
 | 
			
		||||
else
 | 
			
		||||
  if [[ $BUILDKITE_BRANCH =~ ^pull ]]; then
 | 
			
		||||
    # Add helpful link back to the corresponding Github Pull Request
 | 
			
		||||
    buildkite-agent annotate --style info --context pr-backlink \
 | 
			
		||||
      "Github Pull Request: https://github.com/solana-labs/solana/$BUILDKITE_BRANCH"
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
_ buildkite-agent pipeline upload pipeline.yml
 | 
			
		||||
  if [[ $BUILDKITE_MESSAGE =~ GitBook: ]]; then
 | 
			
		||||
    buildkite-agent annotate --style info --context gitbook-ci-skip \
 | 
			
		||||
      "GitBook commit detected, CI skipped"
 | 
			
		||||
    exit
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  buildkite-agent pipeline upload ci/buildkite.yml
 | 
			
		||||
fi
 | 
			
		||||
 
 | 
			
		||||
										
											Binary file not shown.
										
									
								
							@@ -1,4 +0,0 @@
 | 
			
		||||
Signature: 8a477f597d28d172789f06886806bc55
 | 
			
		||||
# This file is a cache directory tag created by fontconfig.
 | 
			
		||||
# For information about cache directory tags, see:
 | 
			
		||||
#       http://www.brynosaurus.com/cachedir/
 | 
			
		||||
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										18
									
								
								.gitbook.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								.gitbook.yaml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,18 @@
 | 
			
		||||
root: ./docs/src
 | 
			
		||||
 | 
			
		||||
structure:
 | 
			
		||||
    readme: introduction.md
 | 
			
		||||
    summary: SUMMARY.md
 | 
			
		||||
 | 
			
		||||
redirects:
 | 
			
		||||
    wallet: ./wallet-guide/README.md
 | 
			
		||||
    wallet/app-wallets: ./wallet-guide/apps.md
 | 
			
		||||
    wallet/app-wallets/trust-wallet: ./wallet-guide/trust-wallet.md
 | 
			
		||||
    wallet/app-wallets/ledger-live:  ./wallet-guide/ledger-live.md
 | 
			
		||||
    wallet/cli-wallets:  ./wallet-guide/cli.md
 | 
			
		||||
    wallet/cli-wallets/paper-wallet:  ./paper-wallet/README.md
 | 
			
		||||
    wallet/cli-wallets/paper-wallet/paper-wallet-usage: ./paper-wallet/paper-wallet-usage.md
 | 
			
		||||
    wallet/cli-wallets/remote-wallet: ./hardware-wallets/README.md
 | 
			
		||||
    wallet/cli-wallets/remote-wallet/ledger: ./hardware-wallets/ledger.md
 | 
			
		||||
    wallet/cli-wallets/file-system-wallet: ./file-system-wallet/README.md
 | 
			
		||||
    wallet/support: ./wallet-guide/support.md
 | 
			
		||||
							
								
								
									
										41
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										41
									
								
								.github/dependabot.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,41 +0,0 @@
 | 
			
		||||
# To get started with Dependabot version updates, you'll need to specify which
 | 
			
		||||
# package ecosystems to update and where the package manifests are located.
 | 
			
		||||
# Please see the documentation for all configuration options:
 | 
			
		||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
 | 
			
		||||
 | 
			
		||||
version: 2
 | 
			
		||||
updates:
 | 
			
		||||
- package-ecosystem: cargo
 | 
			
		||||
  directory: "/"
 | 
			
		||||
  schedule:
 | 
			
		||||
    interval: daily
 | 
			
		||||
    time: "01:00"
 | 
			
		||||
    timezone: America/Los_Angeles
 | 
			
		||||
  #labels:
 | 
			
		||||
  #  - "automerge"
 | 
			
		||||
  open-pull-requests-limit: 3
 | 
			
		||||
  
 | 
			
		||||
- package-ecosystem: npm
 | 
			
		||||
  directory: "/web3.js"
 | 
			
		||||
  schedule:
 | 
			
		||||
    interval: daily
 | 
			
		||||
    time: "01:00"
 | 
			
		||||
    timezone: America/Los_Angeles
 | 
			
		||||
  labels:
 | 
			
		||||
    - "automerge"
 | 
			
		||||
  commit-message:
 | 
			
		||||
    prefix: "chore:"
 | 
			
		||||
  open-pull-requests-limit: 3
 | 
			
		||||
  
 | 
			
		||||
- package-ecosystem: npm
 | 
			
		||||
  directory: "/explorer"
 | 
			
		||||
  schedule:
 | 
			
		||||
    interval: daily
 | 
			
		||||
    time: "01:00"
 | 
			
		||||
    timezone: America/Los_Angeles
 | 
			
		||||
  labels:
 | 
			
		||||
    - "automerge"
 | 
			
		||||
  commit-message:
 | 
			
		||||
    prefix: "chore:"
 | 
			
		||||
    include: "scope"
 | 
			
		||||
  open-pull-requests-limit: 3
 | 
			
		||||
							
								
								
									
										33
									
								
								.github/stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										33
									
								
								.github/stale.yml
									
									
									
									
										vendored
									
									
								
							@@ -1,17 +1,18 @@
 | 
			
		||||
# Issues with these labels will never be considered stale
 | 
			
		||||
exemptLabels:
 | 
			
		||||
  - security
 | 
			
		||||
  - blocked
 | 
			
		||||
only: pulls
 | 
			
		||||
 | 
			
		||||
# Label to use when marking a pull request as stale
 | 
			
		||||
staleLabel: stale
 | 
			
		||||
 | 
			
		||||
pulls:
 | 
			
		||||
# Number of days of inactivity before a pull request becomes stale
 | 
			
		||||
daysUntilStale: 7
 | 
			
		||||
 | 
			
		||||
# Number of days of inactivity before a stale pull request is closed
 | 
			
		||||
daysUntilClose: 7
 | 
			
		||||
 | 
			
		||||
# Issues with these labels will never be considered stale
 | 
			
		||||
exemptLabels:
 | 
			
		||||
  - security
 | 
			
		||||
 | 
			
		||||
# Label to use when marking a pull request as stale
 | 
			
		||||
staleLabel: stale
 | 
			
		||||
 | 
			
		||||
# Comment to post when marking a pull request as stale. Set to `false` to disable
 | 
			
		||||
markComment: >
 | 
			
		||||
  This pull request has been automatically marked as stale because it has not had
 | 
			
		||||
@@ -21,19 +22,3 @@ pulls:
 | 
			
		||||
closeComment: >
 | 
			
		||||
  This stale pull request has been automatically closed.
 | 
			
		||||
  Thank you for your contributions.
 | 
			
		||||
 | 
			
		||||
issues:
 | 
			
		||||
  # Number of days of inactivity before a issue becomes stale
 | 
			
		||||
  daysUntilStale: 365
 | 
			
		||||
 | 
			
		||||
  # Number of days of inactivity before a stale issue is closed
 | 
			
		||||
  daysUntilClose: 7
 | 
			
		||||
  # Comment to post when marking a issue as stale. Set to `false` to disable
 | 
			
		||||
  markComment: >
 | 
			
		||||
    This issue has been automatically marked as stale because it has not had
 | 
			
		||||
    recent activity. It will be closed if no further activity occurs.
 | 
			
		||||
 | 
			
		||||
  # Comment to post when closing a stale issue. Set to `false` to disable
 | 
			
		||||
  closeComment: >
 | 
			
		||||
    This stale issue has been automatically closed.
 | 
			
		||||
    Thank you for your contributions.
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										6
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@@ -23,9 +23,3 @@ log-*/
 | 
			
		||||
/.idea/
 | 
			
		||||
/solana.iml
 | 
			
		||||
/.vscode/
 | 
			
		||||
 | 
			
		||||
# fetch-spl.sh artifacts
 | 
			
		||||
/spl-genesis-args.sh
 | 
			
		||||
/spl_*.so
 | 
			
		||||
 | 
			
		||||
.DS_Store
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										65
									
								
								.mergify.yml
									
									
									
									
									
								
							
							
						
						
									
										65
									
								
								.mergify.yml
									
									
									
									
									
								
							@@ -1,40 +1,9 @@
 | 
			
		||||
# Validate your changes with:
 | 
			
		||||
#
 | 
			
		||||
#   $ curl -F 'data=@.mergify.yml' https://gh.mergify.io/validate/
 | 
			
		||||
#   $ curl -F 'data=@.mergify.yml' https://gh.mergify.io/validate
 | 
			
		||||
#
 | 
			
		||||
# https://doc.mergify.io/
 | 
			
		||||
pull_request_rules:
 | 
			
		||||
  - name: automatic merge (squash) on CI success
 | 
			
		||||
    conditions:
 | 
			
		||||
      - status-success=buildkite/solana
 | 
			
		||||
      - status-success=Travis CI - Pull Request
 | 
			
		||||
      - status-success=ci-gate
 | 
			
		||||
      - label=automerge
 | 
			
		||||
      - author≠@dont-squash-my-commits
 | 
			
		||||
    actions:
 | 
			
		||||
      merge:
 | 
			
		||||
        method: squash
 | 
			
		||||
  # Join the dont-squash-my-commits group if you won't like your commits squashed
 | 
			
		||||
  - name: automatic merge (rebase) on CI success
 | 
			
		||||
    conditions:
 | 
			
		||||
      - status-success=buildkite/solana
 | 
			
		||||
      - status-success=Travis CI - Pull Request
 | 
			
		||||
      - status-success=ci-gate
 | 
			
		||||
      - label=automerge
 | 
			
		||||
      - author=@dont-squash-my-commits
 | 
			
		||||
    actions:
 | 
			
		||||
      merge:
 | 
			
		||||
        method: rebase
 | 
			
		||||
  - name: remove automerge label on CI failure
 | 
			
		||||
    conditions:
 | 
			
		||||
      - label=automerge
 | 
			
		||||
      - "#status-failure!=0"
 | 
			
		||||
    actions:
 | 
			
		||||
      label:
 | 
			
		||||
        remove:
 | 
			
		||||
          - automerge
 | 
			
		||||
      comment:
 | 
			
		||||
        message: automerge label removed due to a CI failure
 | 
			
		||||
  - name: remove outdated reviews
 | 
			
		||||
    conditions:
 | 
			
		||||
      - base=master
 | 
			
		||||
@@ -50,6 +19,22 @@ pull_request_rules:
 | 
			
		||||
      label:
 | 
			
		||||
        add:
 | 
			
		||||
          - automerge
 | 
			
		||||
  - name: v1.0 backport
 | 
			
		||||
    conditions:
 | 
			
		||||
      - label=v1.0
 | 
			
		||||
    actions:
 | 
			
		||||
      backport:
 | 
			
		||||
        ignore_conflicts: true
 | 
			
		||||
        branches:
 | 
			
		||||
          - v1.0
 | 
			
		||||
  - name: v1.1 backport
 | 
			
		||||
    conditions:
 | 
			
		||||
      - label=v1.1
 | 
			
		||||
    actions:
 | 
			
		||||
      backport:
 | 
			
		||||
        ignore_conflicts: true
 | 
			
		||||
        branches:
 | 
			
		||||
          - v1.1
 | 
			
		||||
  - name: v1.2 backport
 | 
			
		||||
    conditions:
 | 
			
		||||
      - label=v1.2
 | 
			
		||||
@@ -58,19 +43,3 @@ pull_request_rules:
 | 
			
		||||
        ignore_conflicts: true
 | 
			
		||||
        branches:
 | 
			
		||||
          - v1.2
 | 
			
		||||
  - name: v1.3 backport
 | 
			
		||||
    conditions:
 | 
			
		||||
      - label=v1.3
 | 
			
		||||
    actions:
 | 
			
		||||
      backport:
 | 
			
		||||
        ignore_conflicts: true
 | 
			
		||||
        branches:
 | 
			
		||||
          - v1.3
 | 
			
		||||
  - name: v1.4 backport
 | 
			
		||||
    conditions:
 | 
			
		||||
      - label=v1.4
 | 
			
		||||
    actions:
 | 
			
		||||
      backport:
 | 
			
		||||
        ignore_conflicts: true
 | 
			
		||||
        branches:
 | 
			
		||||
          - v1.4
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										125
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										125
									
								
								.travis.yml
									
									
									
									
									
								
							@@ -1,44 +1,28 @@
 | 
			
		||||
os:
 | 
			
		||||
  - osx
 | 
			
		||||
  - windows
 | 
			
		||||
 | 
			
		||||
language: rust
 | 
			
		||||
rust:
 | 
			
		||||
  - stable
 | 
			
		||||
 | 
			
		||||
install:
 | 
			
		||||
  - source ci/rust-version.sh
 | 
			
		||||
 | 
			
		||||
script:
 | 
			
		||||
  - source ci/env.sh
 | 
			
		||||
  - ci/publish-tarball.sh
 | 
			
		||||
 | 
			
		||||
branches:
 | 
			
		||||
  only:
 | 
			
		||||
    - master
 | 
			
		||||
    - /^v\d+\.\d+/
 | 
			
		||||
 | 
			
		||||
notifications:
 | 
			
		||||
  email: false
 | 
			
		||||
  slack:
 | 
			
		||||
    on_success: change
 | 
			
		||||
    if: NOT type = pull_request
 | 
			
		||||
    secure: F4IjOE05MyaMOdPRL+r8qhs7jBvv4yDM3RmFKE1zNXnfUOqV4X38oQM1EI+YVsgpMQLj/pxnEB7wcTE4Bf86N6moLssEULCpvAuMVoXj4QbWdomLX+01WbFa6fLVeNQIg45NHrz2XzVBhoKOrMNnl+QI5mbR2AlS5oqsudHsXDnyLzZtd4Y5SDMdYG1zVWM01+oNNjgNfjcCGmOE/K0CnOMl6GPi3X9C34tJ19P2XT7MTDsz1/IfEF7fro2Q8DHEYL9dchJMoisXSkem5z7IDQkGzXsWdWT4NnndUvmd1MlTCE9qgoXDqRf95Qh8sB1Dz08HtvgfaosP2XjtNTfDI9BBYS15Ibw9y7PchAJE1luteNjF35EOy6OgmCLw/YpnweqfuNViBZz+yOPWXVC0kxnPIXKZ1wyH9ibeH6E4hr7a8o9SV/6SiWIlbYF+IR9jPXyTCLP/cc3sYljPWxDnhWFwFdRVIi3PbVAhVu7uWtVUO17Oc9gtGPgs/GrhOMkJfwQPXaudRJDpVZowxTX4x9kefNotlMAMRgq+Drbmgt4eEBiCNp0ITWgh17BiE1U09WS3myuduhoct85+FoVeaUkp1sxzHVtGsNQH0hcz7WcpZyOM+AwistJA/qzeEDQao5zi1eKWPbO2xAhi2rV1bDH6bPf/4lDBwLRqSiwvlWU=
 | 
			
		||||
 | 
			
		||||
os: linux
 | 
			
		||||
dist: bionic
 | 
			
		||||
language: minimal
 | 
			
		||||
 | 
			
		||||
jobs:
 | 
			
		||||
  include:
 | 
			
		||||
    - name: "Export Github Repositories"
 | 
			
		||||
      if: type IN (push, cron) AND branch = master
 | 
			
		||||
      language: python
 | 
			
		||||
      git:
 | 
			
		||||
        depth: false
 | 
			
		||||
      script:
 | 
			
		||||
        - .travis/export-github-repo.sh web3.js/ solana-web3.js
 | 
			
		||||
        - .travis/export-github-repo.sh explorer/ explorer
 | 
			
		||||
 | 
			
		||||
    - &release-artifacts
 | 
			
		||||
      if: type IN (api, cron) OR tag IS present
 | 
			
		||||
      name: "macOS release artifacts"
 | 
			
		||||
      os: osx
 | 
			
		||||
      language: rust
 | 
			
		||||
      rust:
 | 
			
		||||
        - stable
 | 
			
		||||
      install:
 | 
			
		||||
        - source ci/rust-version.sh
 | 
			
		||||
        - PATH="/usr/local/opt/coreutils/libexec/gnubin:$PATH"
 | 
			
		||||
        - readlink -f .
 | 
			
		||||
      script:
 | 
			
		||||
        - source ci/env.sh
 | 
			
		||||
        - ci/publish-tarball.sh
 | 
			
		||||
deploy:
 | 
			
		||||
  - provider: s3
 | 
			
		||||
    access_key_id: $AWS_ACCESS_KEY_ID
 | 
			
		||||
@@ -51,86 +35,9 @@ jobs:
 | 
			
		||||
    on:
 | 
			
		||||
      all_branches: true
 | 
			
		||||
  - provider: releases
 | 
			
		||||
          token: $GITHUB_TOKEN
 | 
			
		||||
    api_key: $GITHUB_TOKEN
 | 
			
		||||
    skip_cleanup: true
 | 
			
		||||
    file_glob: true
 | 
			
		||||
    file: travis-release-upload/*
 | 
			
		||||
    on:
 | 
			
		||||
      tags: true
 | 
			
		||||
    - <<: *release-artifacts
 | 
			
		||||
      name: "Windows release artifacts"
 | 
			
		||||
      os: windows
 | 
			
		||||
    #  Linux release artifacts are still built by ci/buildkite-secondary.yml
 | 
			
		||||
    #- <<: *release-artifacts
 | 
			
		||||
    #  name: "Linux release artifacts"
 | 
			
		||||
    #  os: linux
 | 
			
		||||
    #  before_install:
 | 
			
		||||
    #    - sudo apt-get install libssl-dev libudev-dev
 | 
			
		||||
 | 
			
		||||
    # explorer pull request
 | 
			
		||||
    - name: "explorer"
 | 
			
		||||
      if: type = pull_request AND branch = master
 | 
			
		||||
 | 
			
		||||
      language: node_js
 | 
			
		||||
      node_js:
 | 
			
		||||
        - "node"
 | 
			
		||||
 | 
			
		||||
      cache:
 | 
			
		||||
        directories:
 | 
			
		||||
          - ~/.npm
 | 
			
		||||
 | 
			
		||||
      before_install:
 | 
			
		||||
        - .travis/affects.sh explorer/ .travis || travis_terminate 0
 | 
			
		||||
        - cd explorer
 | 
			
		||||
 | 
			
		||||
      script:
 | 
			
		||||
        - npm run build
 | 
			
		||||
        - npm run format
 | 
			
		||||
 | 
			
		||||
    # web3.js pull request
 | 
			
		||||
    - name: "web3.js"
 | 
			
		||||
      if: type = pull_request AND branch = master
 | 
			
		||||
 | 
			
		||||
      language: node_js
 | 
			
		||||
      node_js:
 | 
			
		||||
        - "lts/*"
 | 
			
		||||
 | 
			
		||||
      services:
 | 
			
		||||
        - docker
 | 
			
		||||
 | 
			
		||||
      cache:
 | 
			
		||||
        directories:
 | 
			
		||||
          - ~/.npm
 | 
			
		||||
 | 
			
		||||
      before_install:
 | 
			
		||||
        - .travis/affects.sh web3.js/ .travis || travis_terminate 0
 | 
			
		||||
        - cd web3.js/
 | 
			
		||||
        - source .travis/before_install.sh
 | 
			
		||||
 | 
			
		||||
      script:
 | 
			
		||||
        - ../.travis/commitlint.sh
 | 
			
		||||
        - source .travis/script.sh
 | 
			
		||||
 | 
			
		||||
    # docs pull request
 | 
			
		||||
    - name: "docs"
 | 
			
		||||
      if: type IN (push, pull_request) OR tag IS present
 | 
			
		||||
      language: node_js
 | 
			
		||||
      node_js:
 | 
			
		||||
        - "node"
 | 
			
		||||
 | 
			
		||||
      services:
 | 
			
		||||
        - docker
 | 
			
		||||
 | 
			
		||||
      cache:
 | 
			
		||||
        directories:
 | 
			
		||||
          - ~/.npm
 | 
			
		||||
 | 
			
		||||
      before_install:
 | 
			
		||||
        - source ci/env.sh
 | 
			
		||||
        - .travis/channel_restriction.sh edge beta || travis_terminate 0
 | 
			
		||||
        - .travis/affects.sh docs/ .travis || travis_terminate 0
 | 
			
		||||
        - cd docs/
 | 
			
		||||
        - source .travis/before_install.sh
 | 
			
		||||
 | 
			
		||||
      script:
 | 
			
		||||
        - source .travis/script.sh
 | 
			
		||||
 
 | 
			
		||||
@@ -1,25 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
#
 | 
			
		||||
# Check if files in the commit range match one or more prefixes
 | 
			
		||||
#
 | 
			
		||||
 | 
			
		||||
# Always run the job if we are on a tagged release
 | 
			
		||||
if [[ -n "$TRAVIS_TAG" ]]; then
 | 
			
		||||
  exit 0
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
(
 | 
			
		||||
  set -x
 | 
			
		||||
  git diff --name-only "$TRAVIS_COMMIT_RANGE"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
for file in $(git diff --name-only "$TRAVIS_COMMIT_RANGE"); do
 | 
			
		||||
  for prefix in "$@"; do
 | 
			
		||||
    if [[ $file =~ ^"$prefix" ]]; then
 | 
			
		||||
      exit 0
 | 
			
		||||
    fi
 | 
			
		||||
    done
 | 
			
		||||
done
 | 
			
		||||
 | 
			
		||||
echo "No modifications to $*"
 | 
			
		||||
exit 1
 | 
			
		||||
@@ -1,19 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
#
 | 
			
		||||
# Only proceed if we are on one of the channels passed in, or a tag build
 | 
			
		||||
#
 | 
			
		||||
 | 
			
		||||
set -ex
 | 
			
		||||
 | 
			
		||||
[[ -n $CI_TAG ]] && exit 0
 | 
			
		||||
 | 
			
		||||
eval "$(ci/channel-info.sh)"
 | 
			
		||||
 | 
			
		||||
for acceptable_channel in "$@"; do
 | 
			
		||||
  if [[ "$CHANNEL" == "$acceptable_channel" ]]; then
 | 
			
		||||
    exit 0
 | 
			
		||||
  fi
 | 
			
		||||
done
 | 
			
		||||
 | 
			
		||||
echo "Not running from one of the following channels: $*"
 | 
			
		||||
exit 1
 | 
			
		||||
@@ -1,32 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
#
 | 
			
		||||
# Runs commitlint in the provided subdirectory
 | 
			
		||||
#
 | 
			
		||||
 | 
			
		||||
set -e
 | 
			
		||||
 | 
			
		||||
basedir=$1
 | 
			
		||||
if [[ -z "$basedir" ]]; then
 | 
			
		||||
  basedir=.
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if [[ ! -d "$basedir" ]]; then
 | 
			
		||||
  echo "Error: not a directory: $basedir"
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if [[ ! -f "$basedir"/commitlint.config.js ]]; then
 | 
			
		||||
  echo "Error: No commitlint configuration found"
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if [[ -z $TRAVIS_COMMIT_RANGE ]]; then
 | 
			
		||||
  echo "Error: TRAVIS_COMMIT_RANGE not defined"
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
cd "$basedir"
 | 
			
		||||
echo "Checking commits in TRAVIS_COMMIT_RANGE: $TRAVIS_COMMIT_RANGE"
 | 
			
		||||
while IFS= read -r line; do
 | 
			
		||||
  echo "$line" | npx commitlint
 | 
			
		||||
done < <(git log "$TRAVIS_COMMIT_RANGE" --format=%s -- .)
 | 
			
		||||
@@ -1,34 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
#
 | 
			
		||||
# Exports a subdirectory into another github repository
 | 
			
		||||
#
 | 
			
		||||
 | 
			
		||||
set -e
 | 
			
		||||
if [[ -z $GITHUB_TOKEN ]]; then
 | 
			
		||||
  echo GITHUB_TOKEN not defined
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
cd "$(dirname "$0")/.."
 | 
			
		||||
 | 
			
		||||
pip3 install git-filter-repo
 | 
			
		||||
 | 
			
		||||
declare subdir=$1
 | 
			
		||||
declare repo_name=$2
 | 
			
		||||
 | 
			
		||||
[[ -n "$subdir" ]] || {
 | 
			
		||||
  echo "Error: subdir not specified"
 | 
			
		||||
  exit 1
 | 
			
		||||
}
 | 
			
		||||
[[ -n "$repo_name" ]] || {
 | 
			
		||||
  echo "Error: repo_name not specified"
 | 
			
		||||
  exit 1
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
echo "Exporting $subdir"
 | 
			
		||||
 | 
			
		||||
set -x
 | 
			
		||||
rm -rf .github_export/"$repo_name"
 | 
			
		||||
git clone https://"$GITHUB_TOKEN"@github.com/solana-labs/"$repo_name" .github_export/"$repo_name"
 | 
			
		||||
git filter-repo --subdirectory-filter "$subdir" --target .github_export/"$repo_name"
 | 
			
		||||
git -C .github_export/"$repo_name" push https://"$GITHUB_TOKEN"@github.com/solana-labs/"$repo_name"
 | 
			
		||||
@@ -232,7 +232,7 @@ confused with 3-letter acronyms.
 | 
			
		||||
Solana's architecture is described by docs generated from markdown files in
 | 
			
		||||
the `docs/src/` directory, maintained by an *editor* (currently @garious). To
 | 
			
		||||
add a design proposal, you'll need to include it in the
 | 
			
		||||
[Accepted Design Proposals](https://docs.solana.com/proposals/accepted-design-proposals)
 | 
			
		||||
[Accepted Design Proposals](https://docs.solana.com/proposals)
 | 
			
		||||
section of the Solana docs.  Here's the full process:
 | 
			
		||||
 | 
			
		||||
1. Propose a design by creating a PR that adds a markdown document to the
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										3910
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										3910
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										19
									
								
								Cargo.toml
									
									
									
									
									
								
							
							
						
						
									
										19
									
								
								Cargo.toml
									
									
									
									
									
								
							@@ -5,21 +5,16 @@ members = [
 | 
			
		||||
    "bench-tps",
 | 
			
		||||
    "accounts-bench",
 | 
			
		||||
    "banking-bench",
 | 
			
		||||
    "banks-client",
 | 
			
		||||
    "banks-interface",
 | 
			
		||||
    "banks-server",
 | 
			
		||||
    "clap-utils",
 | 
			
		||||
    "cli-config",
 | 
			
		||||
    "cli-output",
 | 
			
		||||
    "client",
 | 
			
		||||
    "core",
 | 
			
		||||
    "dos",
 | 
			
		||||
    "download-utils",
 | 
			
		||||
    "faucet",
 | 
			
		||||
    "frozen-abi",
 | 
			
		||||
    "perf",
 | 
			
		||||
    "validator",
 | 
			
		||||
    "genesis",
 | 
			
		||||
    "genesis-programs",
 | 
			
		||||
    "gossip",
 | 
			
		||||
    "install",
 | 
			
		||||
    "keygen",
 | 
			
		||||
@@ -30,18 +25,15 @@ members = [
 | 
			
		||||
    "log-analyzer",
 | 
			
		||||
    "merkle-tree",
 | 
			
		||||
    "stake-o-matic",
 | 
			
		||||
    "storage-bigtable",
 | 
			
		||||
    "storage-proto",
 | 
			
		||||
    "streamer",
 | 
			
		||||
    "measure",
 | 
			
		||||
    "metrics",
 | 
			
		||||
    "net-shaper",
 | 
			
		||||
    "notifier",
 | 
			
		||||
    "poh-bench",
 | 
			
		||||
    "program-test",
 | 
			
		||||
    "programs/secp256k1",
 | 
			
		||||
    "programs/bpf_loader",
 | 
			
		||||
    "programs/budget",
 | 
			
		||||
    "programs/btc_spv",
 | 
			
		||||
    "programs/btc_spv_bin",
 | 
			
		||||
    "programs/config",
 | 
			
		||||
    "programs/exchange",
 | 
			
		||||
    "programs/failure",
 | 
			
		||||
@@ -54,15 +46,12 @@ members = [
 | 
			
		||||
    "ramp-tps",
 | 
			
		||||
    "runtime",
 | 
			
		||||
    "sdk",
 | 
			
		||||
    "sdk/cargo-build-bpf",
 | 
			
		||||
    "sdk/cargo-test-bpf",
 | 
			
		||||
    "scripts",
 | 
			
		||||
    "stake-accounts",
 | 
			
		||||
    "stake-monitor",
 | 
			
		||||
    "sys-tuner",
 | 
			
		||||
    "tokens",
 | 
			
		||||
    "transaction-status",
 | 
			
		||||
    "account-decoder",
 | 
			
		||||
    "upload-perf",
 | 
			
		||||
    "net-utils",
 | 
			
		||||
    "version",
 | 
			
		||||
@@ -74,4 +63,6 @@ members = [
 | 
			
		||||
 | 
			
		||||
exclude = [
 | 
			
		||||
    "programs/bpf",
 | 
			
		||||
    "programs/move_loader",
 | 
			
		||||
    "programs/librapay",
 | 
			
		||||
]
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							@@ -1,4 +1,4 @@
 | 
			
		||||
Copyright 2020 Solana Foundation.
 | 
			
		||||
Copyright 2018 Solana Labs, Inc.
 | 
			
		||||
 | 
			
		||||
Licensed under the Apache License, Version 2.0 (the "License");
 | 
			
		||||
you may not use this file except in compliance with the License.
 | 
			
		||||
 
 | 
			
		||||
@@ -19,7 +19,7 @@ $ source $HOME/.cargo/env
 | 
			
		||||
$ rustup component add rustfmt
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
Please sure you are always using the latest stable rust version by running:
 | 
			
		||||
If your rustc version is lower than 1.39.0, please update it:
 | 
			
		||||
 | 
			
		||||
```bash
 | 
			
		||||
$ rustup update
 | 
			
		||||
@@ -59,11 +59,10 @@ $ cargo test
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
### Starting a local testnet
 | 
			
		||||
Start your own testnet locally, instructions are in the [online docs](https://docs.solana.com/cluster/bench-tps).
 | 
			
		||||
Start your own testnet locally, instructions are in the [online docs](https://docs.solana.com/bench-tps).
 | 
			
		||||
 | 
			
		||||
### Accessing the remote development cluster
 | 
			
		||||
* `devnet` - stable public cluster for development accessible via
 | 
			
		||||
devnet.solana.com. Runs 24/7. Learn more about the [public clusters](https://docs.solana.com/clusters)
 | 
			
		||||
### Accessing the remote testnet
 | 
			
		||||
* `testnet` - public stable testnet accessible via devnet.solana.com. Runs 24/7
 | 
			
		||||
 | 
			
		||||
# Benchmarking
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										107
									
								
								RELEASE.md
									
									
									
									
									
								
							
							
						
						
									
										107
									
								
								RELEASE.md
									
									
									
									
									
								
							@@ -76,20 +76,21 @@ There are three release channels that map to branches as follows:
 | 
			
		||||
    git push -u origin <branchname>
 | 
			
		||||
    ```
 | 
			
		||||
 | 
			
		||||
Alternatively use the Github UI.
 | 
			
		||||
 | 
			
		||||
### Update master branch to the next release minor version
 | 
			
		||||
### Update master branch with the next version
 | 
			
		||||
 | 
			
		||||
1. After the new branch has been created and pushed, update the Cargo.toml files on **master** to the next semantic version (e.g. 0.9.0 -> 0.10.0) with:
 | 
			
		||||
     ```
 | 
			
		||||
     $ scripts/increment-cargo-version.sh minor
 | 
			
		||||
     $ ./scripts/cargo-for-all-lock-files.sh update
 | 
			
		||||
     scripts/increment-cargo-version.sh minor
 | 
			
		||||
     ```
 | 
			
		||||
1. Rebuild to get an updated version of `Cargo.lock`:
 | 
			
		||||
    ```
 | 
			
		||||
    cargo build
 | 
			
		||||
    ```
 | 
			
		||||
1. Push all the changed Cargo.toml and Cargo.lock files to the `master` branch with something like:
 | 
			
		||||
    ```
 | 
			
		||||
    git co -b version_update
 | 
			
		||||
    git ls-files -m | xargs git add
 | 
			
		||||
    git commit -m 'Bump version to X.Y+1.0'
 | 
			
		||||
    git commit -m 'Update Cargo.toml versions from X.Y to X.Y+1'
 | 
			
		||||
    git push -u origin version_update
 | 
			
		||||
    ```
 | 
			
		||||
1. Confirm that your freshly cut release branch is shown as `BETA_CHANNEL` and the previous release branch as `STABLE_CHANNEL`:
 | 
			
		||||
@@ -101,56 +102,84 @@ Alternatively use the Github UI.
 | 
			
		||||
 | 
			
		||||
### Create the Release Tag on GitHub
 | 
			
		||||
 | 
			
		||||
1. Go to [GitHub Releases](https://github.com/solana-labs/solana/releases) for tagging a release.
 | 
			
		||||
1. Go to [GitHub's Releases UI](https://github.com/solana-labs/solana/releases) for tagging a release.
 | 
			
		||||
1. Click "Draft new release".  The release tag must exactly match the `version`
 | 
			
		||||
   field in `/Cargo.toml` prefixed by `v`.
 | 
			
		||||
   1.  If the Cargo.toml version field is **0.12.3**, then the release tag must be **v0.12.3**
 | 
			
		||||
   1.  If the Cargo.toml verion field is **0.12.3**, then the release tag must be **v0.12.3**
 | 
			
		||||
1. Make sure the Target Branch field matches the branch you want to make a release on.
 | 
			
		||||
   1.  If you want to release v0.12.0, the target branch must be v0.12
 | 
			
		||||
1. Fill the release notes.
 | 
			
		||||
1. If this is the first release on the branch (e.g. v0.13.**0**), paste in [this
 | 
			
		||||
   template](https://raw.githubusercontent.com/solana-labs/solana/master/.github/RELEASE_TEMPLATE.md).  Engineering Lead can provide summary contents for release notes if needed.
 | 
			
		||||
   1. If this is a patch release, review all the commits since the previous release on this branch and add details as needed.
 | 
			
		||||
1. Click "Save Draft", then confirm the release notes look good and the tag name and branch are correct.
 | 
			
		||||
1. Ensure all desired commits (usually backports) are landed on the branch by now.
 | 
			
		||||
1. Ensure the release is marked **"This is a pre-release"**.  This flag will need to be be removed manually after confirming the the Linux binary artifacts appear at a later step.
 | 
			
		||||
1. Go back into edit the release and click "Publish release" while being marked as a pre-release.
 | 
			
		||||
1. Confirm there is new git tag with intended version number at the intended revision after running `git fetch` locally.
 | 
			
		||||
 | 
			
		||||
1. Click "Save Draft", then confirm the release notes look good and the tag name and branch are correct.  Go back into edit the release and click "Publish release" when ready.
 | 
			
		||||
 | 
			
		||||
### Update release branch with the next patch version
 | 
			
		||||
 | 
			
		||||
1. After the new release has been tagged, update the Cargo.toml files on **release branch** to the next semantic version (e.g. 0.9.0 -> 0.9.1) with:
 | 
			
		||||
     ```
 | 
			
		||||
     $ scripts/increment-cargo-version.sh patch
 | 
			
		||||
     $ ./scripts/cargo-for-all-lock-files.sh tree
 | 
			
		||||
     scripts/increment-cargo-version.sh patch
 | 
			
		||||
     ```
 | 
			
		||||
1. Rebuild to get an updated version of `Cargo.lock`:
 | 
			
		||||
    ```
 | 
			
		||||
    cargo build
 | 
			
		||||
    ```
 | 
			
		||||
1. Push all the changed Cargo.toml and Cargo.lock files to the **release branch** with something like:
 | 
			
		||||
    ```
 | 
			
		||||
    git co -b version_update origin/vX.Y
 | 
			
		||||
    git add -u
 | 
			
		||||
    git commit -m 'Bump version to X.Y.Z+1'
 | 
			
		||||
    git push -u <user-remote> version_update
 | 
			
		||||
    git co -b version_update
 | 
			
		||||
    git ls-files -m | xargs git add
 | 
			
		||||
    git commit -m 'Update Cargo.toml versions from X.Y.Z to X.Y.Z+1'
 | 
			
		||||
    git push -u origin version_update
 | 
			
		||||
    ```
 | 
			
		||||
1. Open a PR against origin/vX.Y and then merge the PR after passing CI.
 | 
			
		||||
 | 
			
		||||
### Prepare for the next release
 | 
			
		||||
1.  Go to [GitHub Releases](https://github.com/solana-labs/solana/releases) and create a new draft release for `X.Y.Z+1` with empty release notes.  This allows people to incrementally add new release notes until it's time for the next release
 | 
			
		||||
    1. Also, point the branch field to the same branch and mark the relese as **"This is a pre-release"**.
 | 
			
		||||
1.  Go to the [Github Milestones](https://github.com/solana-labs/solana/milestones).  Create a new milestone for the `X.Y.Z+1`, move over
 | 
			
		||||
unresolved issues still in the `X.Y.Z` milestone, then close the `X.Y.Z` milestone.
 | 
			
		||||
 | 
			
		||||
### Verify release automation success
 | 
			
		||||
Go to [Solana Releases](https://github.com/solana-labs/solana/releases) and click on the latest release that you just published.
 | 
			
		||||
Verify that all of the build artifacts are present, then the uncheck **"This is a pre-release"** for the release.
 | 
			
		||||
1. Go to [Solana Releases](https://github.com/solana-labs/solana/releases) and click on the latest release that you just published.  Verify that all of the build artifacts are present.  This can take up to 90 minutes after creating the tag.
 | 
			
		||||
1. The `solana-secondary` Buildkite pipeline handles creating the binary tarballs and updated crates.  Look for a job under the tag name of the release: https://buildkite.com/solana-labs/solana-secondary
 | 
			
		||||
1. [Crates.io](https://crates.io/crates/solana) should have an updated Solana version.
 | 
			
		||||
 | 
			
		||||
Build artifacts can take up to 60 minutes after creating the tag before
 | 
			
		||||
appearing.  To check for progress:
 | 
			
		||||
* The `solana-secondary` Buildkite pipeline handles creating the Linux release artifacts and updated crates.  Look for a job under the tag name of the release: https://buildkite.com/solana-labs/solana-secondary.
 | 
			
		||||
* The macOS and Windows release artifacts are produced by Travis CI: https://travis-ci.com/github/solana-labs/solana/branches
 | 
			
		||||
### Update documentation
 | 
			
		||||
TODO: Documentation update procedure is WIP as we move to gitbook
 | 
			
		||||
 | 
			
		||||
[Crates.io](https://crates.io/crates/solana) should have an updated Solana version.  This can take 2-3 hours, and sometimes fails in the `solana-secondary` job.
 | 
			
		||||
If this happens and the error is non-fatal, click "Retry" on the "publish crate" job
 | 
			
		||||
Document the new recommended version by updating `docs/src/running-archiver.md` and `docs/src/validator-testnet.md` on the release (beta) branch to point at the `solana-install` for the upcoming release version.
 | 
			
		||||
 | 
			
		||||
### Update software on devnet.solana.com/testnet.solana.com/mainnet-beta.solana.com
 | 
			
		||||
See the documentation at https://github.com/solana-labs/cluster-ops/
 | 
			
		||||
### Update software on devnet.solana.com
 | 
			
		||||
 | 
			
		||||
The testnet running on devnet.solana.com is set to use a fixed release tag
 | 
			
		||||
which is set in the Buildkite testnet-management pipeline.
 | 
			
		||||
This tag needs to be updated and the testnet restarted after a new release
 | 
			
		||||
tag is created.
 | 
			
		||||
 | 
			
		||||
#### Update testnet schedules
 | 
			
		||||
 | 
			
		||||
Go to https://buildkite.com/solana-labs and click through: Pipelines ->
 | 
			
		||||
testnet-management -> Pipeline Settings -> Schedules
 | 
			
		||||
Or just click here:
 | 
			
		||||
https://buildkite.com/solana-labs/testnet-management/settings/schedules
 | 
			
		||||
 | 
			
		||||
There are two scheduled jobs for testnet: a daily restart and an hourly sanity-or-restart. \
 | 
			
		||||
https://buildkite.com/solana-labs/testnet-management/settings/schedules/0efd7856-7143-4713-8817-47e6bdb05387
 | 
			
		||||
https://buildkite.com/solana-labs/testnet-management/settings/schedules/2a926646-d972-42b5-aeb9-bb6759592a53
 | 
			
		||||
 | 
			
		||||
On each schedule:
 | 
			
		||||
1.  Set TESTNET_TAG environment variable to the desired release tag.
 | 
			
		||||
    1. Example, TESTNET_TAG=v0.13.2
 | 
			
		||||
1.  Set the Build Branch to the branch that TESTNET_TAG is from.
 | 
			
		||||
    1. Example: v0.13
 | 
			
		||||
 | 
			
		||||
#### Restart the testnet
 | 
			
		||||
 | 
			
		||||
Trigger a TESTNET_OP=create-and-start to refresh the cluster with the new version
 | 
			
		||||
 | 
			
		||||
1.  Go to https://buildkite.com/solana-labs/testnet-management
 | 
			
		||||
2.  Click "New Build" and use the following settings, then click "Create Build"
 | 
			
		||||
    1.  Commit: HEAD
 | 
			
		||||
    1.  Branch: [channel branch as set in the schedules]
 | 
			
		||||
    1.  Environment Variables:
 | 
			
		||||
```
 | 
			
		||||
TESTNET=testnet
 | 
			
		||||
TESTNET_TAG=[same value as used in TESTNET_TAG in the schedules]
 | 
			
		||||
TESTNET_OP=create-and-start
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
### Alert the community
 | 
			
		||||
 | 
			
		||||
Notify Discord users on #validator-support that a new release for
 | 
			
		||||
devnet.solana.com is available
 | 
			
		||||
 
 | 
			
		||||
@@ -1,30 +0,0 @@
 | 
			
		||||
[package]
 | 
			
		||||
name = "solana-account-decoder"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
description = "Solana account decoder"
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
edition = "2018"
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
base64 = "0.12.3"
 | 
			
		||||
bincode = "1.3.1"
 | 
			
		||||
bs58 = "0.3.1"
 | 
			
		||||
bv = "0.11.1"
 | 
			
		||||
Inflector = "0.11.4"
 | 
			
		||||
lazy_static = "1.4.0"
 | 
			
		||||
serde = "1.0.112"
 | 
			
		||||
serde_derive = "1.0.103"
 | 
			
		||||
serde_json = "1.0.56"
 | 
			
		||||
solana-config-program = { path = "../programs/config", version = "1.4.18" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.4.18" }
 | 
			
		||||
solana-stake-program = { path = "../programs/stake", version = "1.4.18" }
 | 
			
		||||
solana-vote-program = { path = "../programs/vote", version = "1.4.18" }
 | 
			
		||||
spl-token-v2-0 = { package = "spl-token", version = "=3.0.1", features = ["no-entrypoint"] }
 | 
			
		||||
thiserror = "1.0"
 | 
			
		||||
zstd = "0.5.1"
 | 
			
		||||
 | 
			
		||||
[package.metadata.docs.rs]
 | 
			
		||||
targets = ["x86_64-unknown-linux-gnu"]
 | 
			
		||||
@@ -1,233 +0,0 @@
 | 
			
		||||
#[macro_use]
 | 
			
		||||
extern crate lazy_static;
 | 
			
		||||
#[macro_use]
 | 
			
		||||
extern crate serde_derive;
 | 
			
		||||
 | 
			
		||||
pub mod parse_account_data;
 | 
			
		||||
pub mod parse_config;
 | 
			
		||||
pub mod parse_nonce;
 | 
			
		||||
pub mod parse_stake;
 | 
			
		||||
pub mod parse_sysvar;
 | 
			
		||||
pub mod parse_token;
 | 
			
		||||
pub mod parse_vote;
 | 
			
		||||
pub mod validator_info;
 | 
			
		||||
 | 
			
		||||
use {
 | 
			
		||||
    crate::parse_account_data::{parse_account_data, AccountAdditionalData, ParsedAccount},
 | 
			
		||||
    solana_sdk::{account::Account, clock::Epoch, fee_calculator::FeeCalculator, pubkey::Pubkey},
 | 
			
		||||
    std::{
 | 
			
		||||
        io::{Read, Write},
 | 
			
		||||
        str::FromStr,
 | 
			
		||||
    },
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
pub type StringAmount = String;
 | 
			
		||||
 | 
			
		||||
/// A duplicate representation of an Account for pretty JSON serialization
 | 
			
		||||
#[derive(Serialize, Deserialize, Clone, Debug)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiAccount {
 | 
			
		||||
    pub lamports: u64,
 | 
			
		||||
    pub data: UiAccountData,
 | 
			
		||||
    pub owner: String,
 | 
			
		||||
    pub executable: bool,
 | 
			
		||||
    pub rent_epoch: Epoch,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
 | 
			
		||||
#[serde(rename_all = "camelCase", untagged)]
 | 
			
		||||
pub enum UiAccountData {
 | 
			
		||||
    LegacyBinary(String), // Legacy. Retained for RPC backwards compatibility
 | 
			
		||||
    Json(ParsedAccount),
 | 
			
		||||
    Binary(String, UiAccountEncoding),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub enum UiAccountEncoding {
 | 
			
		||||
    Binary, // Legacy. Retained for RPC backwards compatibility
 | 
			
		||||
    Base58,
 | 
			
		||||
    Base64,
 | 
			
		||||
    JsonParsed,
 | 
			
		||||
    #[serde(rename = "base64+zstd")]
 | 
			
		||||
    Base64Zstd,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl UiAccount {
 | 
			
		||||
    pub fn encode(
 | 
			
		||||
        pubkey: &Pubkey,
 | 
			
		||||
        account: Account,
 | 
			
		||||
        encoding: UiAccountEncoding,
 | 
			
		||||
        additional_data: Option<AccountAdditionalData>,
 | 
			
		||||
        data_slice_config: Option<UiDataSliceConfig>,
 | 
			
		||||
    ) -> Self {
 | 
			
		||||
        let data = match encoding {
 | 
			
		||||
            UiAccountEncoding::Binary => UiAccountData::LegacyBinary(
 | 
			
		||||
                bs58::encode(slice_data(&account.data, data_slice_config)).into_string(),
 | 
			
		||||
            ),
 | 
			
		||||
            UiAccountEncoding::Base58 => UiAccountData::Binary(
 | 
			
		||||
                bs58::encode(slice_data(&account.data, data_slice_config)).into_string(),
 | 
			
		||||
                encoding,
 | 
			
		||||
            ),
 | 
			
		||||
            UiAccountEncoding::Base64 => UiAccountData::Binary(
 | 
			
		||||
                base64::encode(slice_data(&account.data, data_slice_config)),
 | 
			
		||||
                encoding,
 | 
			
		||||
            ),
 | 
			
		||||
            UiAccountEncoding::Base64Zstd => {
 | 
			
		||||
                let mut encoder = zstd::stream::write::Encoder::new(Vec::new(), 0).unwrap();
 | 
			
		||||
                match encoder
 | 
			
		||||
                    .write_all(slice_data(&account.data, data_slice_config))
 | 
			
		||||
                    .and_then(|()| encoder.finish())
 | 
			
		||||
                {
 | 
			
		||||
                    Ok(zstd_data) => UiAccountData::Binary(base64::encode(zstd_data), encoding),
 | 
			
		||||
                    Err(_) => UiAccountData::Binary(
 | 
			
		||||
                        base64::encode(slice_data(&account.data, data_slice_config)),
 | 
			
		||||
                        UiAccountEncoding::Base64,
 | 
			
		||||
                    ),
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            UiAccountEncoding::JsonParsed => {
 | 
			
		||||
                if let Ok(parsed_data) =
 | 
			
		||||
                    parse_account_data(pubkey, &account.owner, &account.data, additional_data)
 | 
			
		||||
                {
 | 
			
		||||
                    UiAccountData::Json(parsed_data)
 | 
			
		||||
                } else {
 | 
			
		||||
                    UiAccountData::Binary(base64::encode(&account.data), UiAccountEncoding::Base64)
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        };
 | 
			
		||||
        UiAccount {
 | 
			
		||||
            lamports: account.lamports,
 | 
			
		||||
            data,
 | 
			
		||||
            owner: account.owner.to_string(),
 | 
			
		||||
            executable: account.executable,
 | 
			
		||||
            rent_epoch: account.rent_epoch,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn decode(&self) -> Option<Account> {
 | 
			
		||||
        let data = match &self.data {
 | 
			
		||||
            UiAccountData::Json(_) => None,
 | 
			
		||||
            UiAccountData::LegacyBinary(blob) => bs58::decode(blob).into_vec().ok(),
 | 
			
		||||
            UiAccountData::Binary(blob, encoding) => match encoding {
 | 
			
		||||
                UiAccountEncoding::Base58 => bs58::decode(blob).into_vec().ok(),
 | 
			
		||||
                UiAccountEncoding::Base64 => base64::decode(blob).ok(),
 | 
			
		||||
                UiAccountEncoding::Base64Zstd => base64::decode(blob)
 | 
			
		||||
                    .ok()
 | 
			
		||||
                    .map(|zstd_data| {
 | 
			
		||||
                        let mut data = vec![];
 | 
			
		||||
                        zstd::stream::read::Decoder::new(zstd_data.as_slice())
 | 
			
		||||
                            .and_then(|mut reader| reader.read_to_end(&mut data))
 | 
			
		||||
                            .map(|_| data)
 | 
			
		||||
                            .ok()
 | 
			
		||||
                    })
 | 
			
		||||
                    .flatten(),
 | 
			
		||||
                UiAccountEncoding::Binary | UiAccountEncoding::JsonParsed => None,
 | 
			
		||||
            },
 | 
			
		||||
        }?;
 | 
			
		||||
        Some(Account {
 | 
			
		||||
            lamports: self.lamports,
 | 
			
		||||
            data,
 | 
			
		||||
            owner: Pubkey::from_str(&self.owner).ok()?,
 | 
			
		||||
            executable: self.executable,
 | 
			
		||||
            rent_epoch: self.rent_epoch,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiFeeCalculator {
 | 
			
		||||
    pub lamports_per_signature: StringAmount,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<FeeCalculator> for UiFeeCalculator {
 | 
			
		||||
    fn from(fee_calculator: FeeCalculator) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            lamports_per_signature: fee_calculator.lamports_per_signature.to_string(),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl Default for UiFeeCalculator {
 | 
			
		||||
    fn default() -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            lamports_per_signature: "0".to_string(),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiDataSliceConfig {
 | 
			
		||||
    pub offset: usize,
 | 
			
		||||
    pub length: usize,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn slice_data(data: &[u8], data_slice_config: Option<UiDataSliceConfig>) -> &[u8] {
 | 
			
		||||
    if let Some(UiDataSliceConfig { offset, length }) = data_slice_config {
 | 
			
		||||
        if offset >= data.len() {
 | 
			
		||||
            &[]
 | 
			
		||||
        } else if length > data.len() - offset {
 | 
			
		||||
            &data[offset..]
 | 
			
		||||
        } else {
 | 
			
		||||
            &data[offset..offset + length]
 | 
			
		||||
        }
 | 
			
		||||
    } else {
 | 
			
		||||
        data
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod test {
 | 
			
		||||
    use super::*;
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_slice_data() {
 | 
			
		||||
        let data = vec![1, 2, 3, 4, 5];
 | 
			
		||||
        let slice_config = Some(UiDataSliceConfig {
 | 
			
		||||
            offset: 0,
 | 
			
		||||
            length: 5,
 | 
			
		||||
        });
 | 
			
		||||
        assert_eq!(slice_data(&data, slice_config), &data[..]);
 | 
			
		||||
 | 
			
		||||
        let slice_config = Some(UiDataSliceConfig {
 | 
			
		||||
            offset: 0,
 | 
			
		||||
            length: 10,
 | 
			
		||||
        });
 | 
			
		||||
        assert_eq!(slice_data(&data, slice_config), &data[..]);
 | 
			
		||||
 | 
			
		||||
        let slice_config = Some(UiDataSliceConfig {
 | 
			
		||||
            offset: 1,
 | 
			
		||||
            length: 2,
 | 
			
		||||
        });
 | 
			
		||||
        assert_eq!(slice_data(&data, slice_config), &data[1..3]);
 | 
			
		||||
 | 
			
		||||
        let slice_config = Some(UiDataSliceConfig {
 | 
			
		||||
            offset: 10,
 | 
			
		||||
            length: 2,
 | 
			
		||||
        });
 | 
			
		||||
        assert_eq!(slice_data(&data, slice_config), &[] as &[u8]);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_base64_zstd() {
 | 
			
		||||
        let encoded_account = UiAccount::encode(
 | 
			
		||||
            &Pubkey::default(),
 | 
			
		||||
            Account {
 | 
			
		||||
                data: vec![0; 1024],
 | 
			
		||||
                ..Account::default()
 | 
			
		||||
            },
 | 
			
		||||
            UiAccountEncoding::Base64Zstd,
 | 
			
		||||
            None,
 | 
			
		||||
            None,
 | 
			
		||||
        );
 | 
			
		||||
        assert!(matches!(
 | 
			
		||||
            encoded_account.data,
 | 
			
		||||
            UiAccountData::Binary(_, UiAccountEncoding::Base64Zstd)
 | 
			
		||||
        ));
 | 
			
		||||
 | 
			
		||||
        let decoded_account = encoded_account.decode().unwrap();
 | 
			
		||||
        assert_eq!(decoded_account.data, vec![0; 1024]);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,145 +0,0 @@
 | 
			
		||||
use crate::{
 | 
			
		||||
    parse_config::parse_config,
 | 
			
		||||
    parse_nonce::parse_nonce,
 | 
			
		||||
    parse_stake::parse_stake,
 | 
			
		||||
    parse_sysvar::parse_sysvar,
 | 
			
		||||
    parse_token::{parse_token, spl_token_id_v2_0},
 | 
			
		||||
    parse_vote::parse_vote,
 | 
			
		||||
};
 | 
			
		||||
use inflector::Inflector;
 | 
			
		||||
use serde_json::Value;
 | 
			
		||||
use solana_sdk::{instruction::InstructionError, pubkey::Pubkey, system_program, sysvar};
 | 
			
		||||
use std::collections::HashMap;
 | 
			
		||||
use thiserror::Error;
 | 
			
		||||
 | 
			
		||||
lazy_static! {
 | 
			
		||||
    static ref CONFIG_PROGRAM_ID: Pubkey = solana_config_program::id();
 | 
			
		||||
    static ref STAKE_PROGRAM_ID: Pubkey = solana_stake_program::id();
 | 
			
		||||
    static ref SYSTEM_PROGRAM_ID: Pubkey = system_program::id();
 | 
			
		||||
    static ref SYSVAR_PROGRAM_ID: Pubkey = sysvar::id();
 | 
			
		||||
    static ref TOKEN_PROGRAM_ID: Pubkey = spl_token_id_v2_0();
 | 
			
		||||
    static ref VOTE_PROGRAM_ID: Pubkey = solana_vote_program::id();
 | 
			
		||||
    pub static ref PARSABLE_PROGRAM_IDS: HashMap<Pubkey, ParsableAccount> = {
 | 
			
		||||
        let mut m = HashMap::new();
 | 
			
		||||
        m.insert(*CONFIG_PROGRAM_ID, ParsableAccount::Config);
 | 
			
		||||
        m.insert(*SYSTEM_PROGRAM_ID, ParsableAccount::Nonce);
 | 
			
		||||
        m.insert(*TOKEN_PROGRAM_ID, ParsableAccount::SplToken);
 | 
			
		||||
        m.insert(*STAKE_PROGRAM_ID, ParsableAccount::Stake);
 | 
			
		||||
        m.insert(*SYSVAR_PROGRAM_ID, ParsableAccount::Sysvar);
 | 
			
		||||
        m.insert(*VOTE_PROGRAM_ID, ParsableAccount::Vote);
 | 
			
		||||
        m
 | 
			
		||||
    };
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Error, Debug)]
 | 
			
		||||
pub enum ParseAccountError {
 | 
			
		||||
    #[error("{0:?} account not parsable")]
 | 
			
		||||
    AccountNotParsable(ParsableAccount),
 | 
			
		||||
 | 
			
		||||
    #[error("Program not parsable")]
 | 
			
		||||
    ProgramNotParsable,
 | 
			
		||||
 | 
			
		||||
    #[error("Additional data required to parse: {0}")]
 | 
			
		||||
    AdditionalDataMissing(String),
 | 
			
		||||
 | 
			
		||||
    #[error("Instruction error")]
 | 
			
		||||
    InstructionError(#[from] InstructionError),
 | 
			
		||||
 | 
			
		||||
    #[error("Serde json error")]
 | 
			
		||||
    SerdeJsonError(#[from] serde_json::error::Error),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct ParsedAccount {
 | 
			
		||||
    pub program: String,
 | 
			
		||||
    pub parsed: Value,
 | 
			
		||||
    pub space: u64,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub enum ParsableAccount {
 | 
			
		||||
    Config,
 | 
			
		||||
    Nonce,
 | 
			
		||||
    SplToken,
 | 
			
		||||
    Stake,
 | 
			
		||||
    Sysvar,
 | 
			
		||||
    Vote,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Default)]
 | 
			
		||||
pub struct AccountAdditionalData {
 | 
			
		||||
    pub spl_token_decimals: Option<u8>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn parse_account_data(
 | 
			
		||||
    pubkey: &Pubkey,
 | 
			
		||||
    program_id: &Pubkey,
 | 
			
		||||
    data: &[u8],
 | 
			
		||||
    additional_data: Option<AccountAdditionalData>,
 | 
			
		||||
) -> Result<ParsedAccount, ParseAccountError> {
 | 
			
		||||
    let program_name = PARSABLE_PROGRAM_IDS
 | 
			
		||||
        .get(program_id)
 | 
			
		||||
        .ok_or_else(|| ParseAccountError::ProgramNotParsable)?;
 | 
			
		||||
    let additional_data = additional_data.unwrap_or_default();
 | 
			
		||||
    let parsed_json = match program_name {
 | 
			
		||||
        ParsableAccount::Config => serde_json::to_value(parse_config(data, pubkey)?)?,
 | 
			
		||||
        ParsableAccount::Nonce => serde_json::to_value(parse_nonce(data)?)?,
 | 
			
		||||
        ParsableAccount::SplToken => {
 | 
			
		||||
            serde_json::to_value(parse_token(data, additional_data.spl_token_decimals)?)?
 | 
			
		||||
        }
 | 
			
		||||
        ParsableAccount::Stake => serde_json::to_value(parse_stake(data)?)?,
 | 
			
		||||
        ParsableAccount::Sysvar => serde_json::to_value(parse_sysvar(data, pubkey)?)?,
 | 
			
		||||
        ParsableAccount::Vote => serde_json::to_value(parse_vote(data)?)?,
 | 
			
		||||
    };
 | 
			
		||||
    Ok(ParsedAccount {
 | 
			
		||||
        program: format!("{:?}", program_name).to_kebab_case(),
 | 
			
		||||
        parsed: parsed_json,
 | 
			
		||||
        space: data.len() as u64,
 | 
			
		||||
    })
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod test {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use solana_sdk::nonce::{
 | 
			
		||||
        state::{Data, Versions},
 | 
			
		||||
        State,
 | 
			
		||||
    };
 | 
			
		||||
    use solana_vote_program::vote_state::{VoteState, VoteStateVersions};
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_parse_account_data() {
 | 
			
		||||
        let account_pubkey = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let other_program = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let data = vec![0; 4];
 | 
			
		||||
        assert!(parse_account_data(&account_pubkey, &other_program, &data, None).is_err());
 | 
			
		||||
 | 
			
		||||
        let vote_state = VoteState::default();
 | 
			
		||||
        let mut vote_account_data: Vec<u8> = vec![0; VoteState::size_of()];
 | 
			
		||||
        let versioned = VoteStateVersions::Current(Box::new(vote_state));
 | 
			
		||||
        VoteState::serialize(&versioned, &mut vote_account_data).unwrap();
 | 
			
		||||
        let parsed = parse_account_data(
 | 
			
		||||
            &account_pubkey,
 | 
			
		||||
            &solana_vote_program::id(),
 | 
			
		||||
            &vote_account_data,
 | 
			
		||||
            None,
 | 
			
		||||
        )
 | 
			
		||||
        .unwrap();
 | 
			
		||||
        assert_eq!(parsed.program, "vote".to_string());
 | 
			
		||||
        assert_eq!(parsed.space, VoteState::size_of() as u64);
 | 
			
		||||
 | 
			
		||||
        let nonce_data = Versions::new_current(State::Initialized(Data::default()));
 | 
			
		||||
        let nonce_account_data = bincode::serialize(&nonce_data).unwrap();
 | 
			
		||||
        let parsed = parse_account_data(
 | 
			
		||||
            &account_pubkey,
 | 
			
		||||
            &system_program::id(),
 | 
			
		||||
            &nonce_account_data,
 | 
			
		||||
            None,
 | 
			
		||||
        )
 | 
			
		||||
        .unwrap();
 | 
			
		||||
        assert_eq!(parsed.program, "nonce".to_string());
 | 
			
		||||
        assert_eq!(parsed.space, State::size() as u64);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,146 +0,0 @@
 | 
			
		||||
use crate::{
 | 
			
		||||
    parse_account_data::{ParsableAccount, ParseAccountError},
 | 
			
		||||
    validator_info,
 | 
			
		||||
};
 | 
			
		||||
use bincode::deserialize;
 | 
			
		||||
use serde_json::Value;
 | 
			
		||||
use solana_config_program::{get_config_data, ConfigKeys};
 | 
			
		||||
use solana_sdk::pubkey::Pubkey;
 | 
			
		||||
use solana_stake_program::config::Config as StakeConfig;
 | 
			
		||||
 | 
			
		||||
pub fn parse_config(data: &[u8], pubkey: &Pubkey) -> Result<ConfigAccountType, ParseAccountError> {
 | 
			
		||||
    let parsed_account = if pubkey == &solana_stake_program::config::id() {
 | 
			
		||||
        get_config_data(data)
 | 
			
		||||
            .ok()
 | 
			
		||||
            .and_then(|data| deserialize::<StakeConfig>(data).ok())
 | 
			
		||||
            .map(|config| ConfigAccountType::StakeConfig(config.into()))
 | 
			
		||||
    } else {
 | 
			
		||||
        deserialize::<ConfigKeys>(data).ok().and_then(|key_list| {
 | 
			
		||||
            if !key_list.keys.is_empty() && key_list.keys[0].0 == validator_info::id() {
 | 
			
		||||
                parse_config_data::<String>(data, key_list.keys).and_then(|validator_info| {
 | 
			
		||||
                    Some(ConfigAccountType::ValidatorInfo(UiConfig {
 | 
			
		||||
                        keys: validator_info.keys,
 | 
			
		||||
                        config_data: serde_json::from_str(&validator_info.config_data).ok()?,
 | 
			
		||||
                    }))
 | 
			
		||||
                })
 | 
			
		||||
            } else {
 | 
			
		||||
                None
 | 
			
		||||
            }
 | 
			
		||||
        })
 | 
			
		||||
    };
 | 
			
		||||
    parsed_account.ok_or(ParseAccountError::AccountNotParsable(
 | 
			
		||||
        ParsableAccount::Config,
 | 
			
		||||
    ))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn parse_config_data<T>(data: &[u8], keys: Vec<(Pubkey, bool)>) -> Option<UiConfig<T>>
 | 
			
		||||
where
 | 
			
		||||
    T: serde::de::DeserializeOwned,
 | 
			
		||||
{
 | 
			
		||||
    let config_data: T = deserialize(&get_config_data(data).ok()?).ok()?;
 | 
			
		||||
    let keys = keys
 | 
			
		||||
        .iter()
 | 
			
		||||
        .map(|key| UiConfigKey {
 | 
			
		||||
            pubkey: key.0.to_string(),
 | 
			
		||||
            signer: key.1,
 | 
			
		||||
        })
 | 
			
		||||
        .collect();
 | 
			
		||||
    Some(UiConfig { keys, config_data })
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase", tag = "type", content = "info")]
 | 
			
		||||
pub enum ConfigAccountType {
 | 
			
		||||
    StakeConfig(UiStakeConfig),
 | 
			
		||||
    ValidatorInfo(UiConfig<Value>),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiConfigKey {
 | 
			
		||||
    pub pubkey: String,
 | 
			
		||||
    pub signer: bool,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiStakeConfig {
 | 
			
		||||
    pub warmup_cooldown_rate: f64,
 | 
			
		||||
    pub slash_penalty: u8,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<StakeConfig> for UiStakeConfig {
 | 
			
		||||
    fn from(config: StakeConfig) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            warmup_cooldown_rate: config.warmup_cooldown_rate,
 | 
			
		||||
            slash_penalty: config.slash_penalty,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiConfig<T> {
 | 
			
		||||
    pub keys: Vec<UiConfigKey>,
 | 
			
		||||
    pub config_data: T,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod test {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use crate::validator_info::ValidatorInfo;
 | 
			
		||||
    use serde_json::json;
 | 
			
		||||
    use solana_config_program::create_config_account;
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_parse_config() {
 | 
			
		||||
        let stake_config = StakeConfig {
 | 
			
		||||
            warmup_cooldown_rate: 0.25,
 | 
			
		||||
            slash_penalty: 50,
 | 
			
		||||
        };
 | 
			
		||||
        let stake_config_account = create_config_account(vec![], &stake_config, 10);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_config(
 | 
			
		||||
                &stake_config_account.data,
 | 
			
		||||
                &solana_stake_program::config::id()
 | 
			
		||||
            )
 | 
			
		||||
            .unwrap(),
 | 
			
		||||
            ConfigAccountType::StakeConfig(UiStakeConfig {
 | 
			
		||||
                warmup_cooldown_rate: 0.25,
 | 
			
		||||
                slash_penalty: 50,
 | 
			
		||||
            }),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let validator_info = ValidatorInfo {
 | 
			
		||||
            info: serde_json::to_string(&json!({
 | 
			
		||||
                "name": "Solana",
 | 
			
		||||
            }))
 | 
			
		||||
            .unwrap(),
 | 
			
		||||
        };
 | 
			
		||||
        let info_pubkey = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let validator_info_config_account = create_config_account(
 | 
			
		||||
            vec![(validator_info::id(), false), (info_pubkey, true)],
 | 
			
		||||
            &validator_info,
 | 
			
		||||
            10,
 | 
			
		||||
        );
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_config(&validator_info_config_account.data, &info_pubkey).unwrap(),
 | 
			
		||||
            ConfigAccountType::ValidatorInfo(UiConfig {
 | 
			
		||||
                keys: vec![
 | 
			
		||||
                    UiConfigKey {
 | 
			
		||||
                        pubkey: validator_info::id().to_string(),
 | 
			
		||||
                        signer: false,
 | 
			
		||||
                    },
 | 
			
		||||
                    UiConfigKey {
 | 
			
		||||
                        pubkey: info_pubkey.to_string(),
 | 
			
		||||
                        signer: true,
 | 
			
		||||
                    }
 | 
			
		||||
                ],
 | 
			
		||||
                config_data: serde_json::from_str(r#"{"name":"Solana"}"#).unwrap(),
 | 
			
		||||
            }),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let bad_data = vec![0; 4];
 | 
			
		||||
        assert!(parse_config(&bad_data, &info_pubkey).is_err());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,67 +0,0 @@
 | 
			
		||||
use crate::{parse_account_data::ParseAccountError, UiFeeCalculator};
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    instruction::InstructionError,
 | 
			
		||||
    nonce::{state::Versions, State},
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
pub fn parse_nonce(data: &[u8]) -> Result<UiNonceState, ParseAccountError> {
 | 
			
		||||
    let nonce_state: Versions = bincode::deserialize(data)
 | 
			
		||||
        .map_err(|_| ParseAccountError::from(InstructionError::InvalidAccountData))?;
 | 
			
		||||
    let nonce_state = nonce_state.convert_to_current();
 | 
			
		||||
    match nonce_state {
 | 
			
		||||
        State::Uninitialized => Ok(UiNonceState::Uninitialized),
 | 
			
		||||
        State::Initialized(data) => Ok(UiNonceState::Initialized(UiNonceData {
 | 
			
		||||
            authority: data.authority.to_string(),
 | 
			
		||||
            blockhash: data.blockhash.to_string(),
 | 
			
		||||
            fee_calculator: data.fee_calculator.into(),
 | 
			
		||||
        })),
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// A duplicate representation of NonceState for pretty JSON serialization
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase", tag = "type", content = "info")]
 | 
			
		||||
pub enum UiNonceState {
 | 
			
		||||
    Uninitialized,
 | 
			
		||||
    Initialized(UiNonceData),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiNonceData {
 | 
			
		||||
    pub authority: String,
 | 
			
		||||
    pub blockhash: String,
 | 
			
		||||
    pub fee_calculator: UiFeeCalculator,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod test {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use solana_sdk::{
 | 
			
		||||
        hash::Hash,
 | 
			
		||||
        nonce::{
 | 
			
		||||
            state::{Data, Versions},
 | 
			
		||||
            State,
 | 
			
		||||
        },
 | 
			
		||||
        pubkey::Pubkey,
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_parse_nonce() {
 | 
			
		||||
        let nonce_data = Versions::new_current(State::Initialized(Data::default()));
 | 
			
		||||
        let nonce_account_data = bincode::serialize(&nonce_data).unwrap();
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_nonce(&nonce_account_data).unwrap(),
 | 
			
		||||
            UiNonceState::Initialized(UiNonceData {
 | 
			
		||||
                authority: Pubkey::default().to_string(),
 | 
			
		||||
                blockhash: Hash::default().to_string(),
 | 
			
		||||
                fee_calculator: UiFeeCalculator {
 | 
			
		||||
                    lamports_per_signature: 0.to_string(),
 | 
			
		||||
                },
 | 
			
		||||
            }),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let bad_data = vec![0; 4];
 | 
			
		||||
        assert!(parse_nonce(&bad_data).is_err());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,234 +0,0 @@
 | 
			
		||||
use crate::{
 | 
			
		||||
    parse_account_data::{ParsableAccount, ParseAccountError},
 | 
			
		||||
    StringAmount,
 | 
			
		||||
};
 | 
			
		||||
use bincode::deserialize;
 | 
			
		||||
use solana_sdk::clock::{Epoch, UnixTimestamp};
 | 
			
		||||
use solana_stake_program::stake_state::{Authorized, Delegation, Lockup, Meta, Stake, StakeState};
 | 
			
		||||
 | 
			
		||||
pub fn parse_stake(data: &[u8]) -> Result<StakeAccountType, ParseAccountError> {
 | 
			
		||||
    let stake_state: StakeState = deserialize(data)
 | 
			
		||||
        .map_err(|_| ParseAccountError::AccountNotParsable(ParsableAccount::Stake))?;
 | 
			
		||||
    let parsed_account = match stake_state {
 | 
			
		||||
        StakeState::Uninitialized => StakeAccountType::Uninitialized,
 | 
			
		||||
        StakeState::Initialized(meta) => StakeAccountType::Initialized(UiStakeAccount {
 | 
			
		||||
            meta: meta.into(),
 | 
			
		||||
            stake: None,
 | 
			
		||||
        }),
 | 
			
		||||
        StakeState::Stake(meta, stake) => StakeAccountType::Delegated(UiStakeAccount {
 | 
			
		||||
            meta: meta.into(),
 | 
			
		||||
            stake: Some(stake.into()),
 | 
			
		||||
        }),
 | 
			
		||||
        StakeState::RewardsPool => StakeAccountType::RewardsPool,
 | 
			
		||||
    };
 | 
			
		||||
    Ok(parsed_account)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase", tag = "type", content = "info")]
 | 
			
		||||
pub enum StakeAccountType {
 | 
			
		||||
    Uninitialized,
 | 
			
		||||
    Initialized(UiStakeAccount),
 | 
			
		||||
    Delegated(UiStakeAccount),
 | 
			
		||||
    RewardsPool,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiStakeAccount {
 | 
			
		||||
    pub meta: UiMeta,
 | 
			
		||||
    pub stake: Option<UiStake>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiMeta {
 | 
			
		||||
    pub rent_exempt_reserve: StringAmount,
 | 
			
		||||
    pub authorized: UiAuthorized,
 | 
			
		||||
    pub lockup: UiLockup,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<Meta> for UiMeta {
 | 
			
		||||
    fn from(meta: Meta) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            rent_exempt_reserve: meta.rent_exempt_reserve.to_string(),
 | 
			
		||||
            authorized: meta.authorized.into(),
 | 
			
		||||
            lockup: meta.lockup.into(),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiLockup {
 | 
			
		||||
    pub unix_timestamp: UnixTimestamp,
 | 
			
		||||
    pub epoch: Epoch,
 | 
			
		||||
    pub custodian: String,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<Lockup> for UiLockup {
 | 
			
		||||
    fn from(lockup: Lockup) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            unix_timestamp: lockup.unix_timestamp,
 | 
			
		||||
            epoch: lockup.epoch,
 | 
			
		||||
            custodian: lockup.custodian.to_string(),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiAuthorized {
 | 
			
		||||
    pub staker: String,
 | 
			
		||||
    pub withdrawer: String,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<Authorized> for UiAuthorized {
 | 
			
		||||
    fn from(authorized: Authorized) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            staker: authorized.staker.to_string(),
 | 
			
		||||
            withdrawer: authorized.withdrawer.to_string(),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiStake {
 | 
			
		||||
    pub delegation: UiDelegation,
 | 
			
		||||
    pub credits_observed: u64,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<Stake> for UiStake {
 | 
			
		||||
    fn from(stake: Stake) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            delegation: stake.delegation.into(),
 | 
			
		||||
            credits_observed: stake.credits_observed,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiDelegation {
 | 
			
		||||
    pub voter: String,
 | 
			
		||||
    pub stake: StringAmount,
 | 
			
		||||
    pub activation_epoch: StringAmount,
 | 
			
		||||
    pub deactivation_epoch: StringAmount,
 | 
			
		||||
    pub warmup_cooldown_rate: f64,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<Delegation> for UiDelegation {
 | 
			
		||||
    fn from(delegation: Delegation) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            voter: delegation.voter_pubkey.to_string(),
 | 
			
		||||
            stake: delegation.stake.to_string(),
 | 
			
		||||
            activation_epoch: delegation.activation_epoch.to_string(),
 | 
			
		||||
            deactivation_epoch: delegation.deactivation_epoch.to_string(),
 | 
			
		||||
            warmup_cooldown_rate: delegation.warmup_cooldown_rate,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod test {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use bincode::serialize;
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_parse_stake() {
 | 
			
		||||
        let stake_state = StakeState::Uninitialized;
 | 
			
		||||
        let stake_data = serialize(&stake_state).unwrap();
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_stake(&stake_data).unwrap(),
 | 
			
		||||
            StakeAccountType::Uninitialized
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let pubkey = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let custodian = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let authorized = Authorized::auto(&pubkey);
 | 
			
		||||
        let lockup = Lockup {
 | 
			
		||||
            unix_timestamp: 0,
 | 
			
		||||
            epoch: 1,
 | 
			
		||||
            custodian,
 | 
			
		||||
        };
 | 
			
		||||
        let meta = Meta {
 | 
			
		||||
            rent_exempt_reserve: 42,
 | 
			
		||||
            authorized,
 | 
			
		||||
            lockup,
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        let stake_state = StakeState::Initialized(meta);
 | 
			
		||||
        let stake_data = serialize(&stake_state).unwrap();
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_stake(&stake_data).unwrap(),
 | 
			
		||||
            StakeAccountType::Initialized(UiStakeAccount {
 | 
			
		||||
                meta: UiMeta {
 | 
			
		||||
                    rent_exempt_reserve: 42.to_string(),
 | 
			
		||||
                    authorized: UiAuthorized {
 | 
			
		||||
                        staker: pubkey.to_string(),
 | 
			
		||||
                        withdrawer: pubkey.to_string(),
 | 
			
		||||
                    },
 | 
			
		||||
                    lockup: UiLockup {
 | 
			
		||||
                        unix_timestamp: 0,
 | 
			
		||||
                        epoch: 1,
 | 
			
		||||
                        custodian: custodian.to_string(),
 | 
			
		||||
                    }
 | 
			
		||||
                },
 | 
			
		||||
                stake: None,
 | 
			
		||||
            })
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let voter_pubkey = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let stake = Stake {
 | 
			
		||||
            delegation: Delegation {
 | 
			
		||||
                voter_pubkey,
 | 
			
		||||
                stake: 20,
 | 
			
		||||
                activation_epoch: 2,
 | 
			
		||||
                deactivation_epoch: std::u64::MAX,
 | 
			
		||||
                warmup_cooldown_rate: 0.25,
 | 
			
		||||
            },
 | 
			
		||||
            credits_observed: 10,
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        let stake_state = StakeState::Stake(meta, stake);
 | 
			
		||||
        let stake_data = serialize(&stake_state).unwrap();
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_stake(&stake_data).unwrap(),
 | 
			
		||||
            StakeAccountType::Delegated(UiStakeAccount {
 | 
			
		||||
                meta: UiMeta {
 | 
			
		||||
                    rent_exempt_reserve: 42.to_string(),
 | 
			
		||||
                    authorized: UiAuthorized {
 | 
			
		||||
                        staker: pubkey.to_string(),
 | 
			
		||||
                        withdrawer: pubkey.to_string(),
 | 
			
		||||
                    },
 | 
			
		||||
                    lockup: UiLockup {
 | 
			
		||||
                        unix_timestamp: 0,
 | 
			
		||||
                        epoch: 1,
 | 
			
		||||
                        custodian: custodian.to_string(),
 | 
			
		||||
                    }
 | 
			
		||||
                },
 | 
			
		||||
                stake: Some(UiStake {
 | 
			
		||||
                    delegation: UiDelegation {
 | 
			
		||||
                        voter: voter_pubkey.to_string(),
 | 
			
		||||
                        stake: 20.to_string(),
 | 
			
		||||
                        activation_epoch: 2.to_string(),
 | 
			
		||||
                        deactivation_epoch: std::u64::MAX.to_string(),
 | 
			
		||||
                        warmup_cooldown_rate: 0.25,
 | 
			
		||||
                    },
 | 
			
		||||
                    credits_observed: 10,
 | 
			
		||||
                })
 | 
			
		||||
            })
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let stake_state = StakeState::RewardsPool;
 | 
			
		||||
        let stake_data = serialize(&stake_state).unwrap();
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_stake(&stake_data).unwrap(),
 | 
			
		||||
            StakeAccountType::RewardsPool
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let bad_data = vec![1, 2, 3, 4];
 | 
			
		||||
        assert!(parse_stake(&bad_data).is_err());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,329 +0,0 @@
 | 
			
		||||
use crate::{
 | 
			
		||||
    parse_account_data::{ParsableAccount, ParseAccountError},
 | 
			
		||||
    StringAmount, UiFeeCalculator,
 | 
			
		||||
};
 | 
			
		||||
use bincode::deserialize;
 | 
			
		||||
use bv::BitVec;
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    clock::{Clock, Epoch, Slot, UnixTimestamp},
 | 
			
		||||
    epoch_schedule::EpochSchedule,
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    rent::Rent,
 | 
			
		||||
    slot_hashes::SlotHashes,
 | 
			
		||||
    slot_history::{self, SlotHistory},
 | 
			
		||||
    stake_history::{StakeHistory, StakeHistoryEntry},
 | 
			
		||||
    sysvar::{self, fees::Fees, recent_blockhashes::RecentBlockhashes, rewards::Rewards},
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
pub fn parse_sysvar(data: &[u8], pubkey: &Pubkey) -> Result<SysvarAccountType, ParseAccountError> {
 | 
			
		||||
    let parsed_account = {
 | 
			
		||||
        if pubkey == &sysvar::clock::id() {
 | 
			
		||||
            deserialize::<Clock>(data)
 | 
			
		||||
                .ok()
 | 
			
		||||
                .map(|clock| SysvarAccountType::Clock(clock.into()))
 | 
			
		||||
        } else if pubkey == &sysvar::epoch_schedule::id() {
 | 
			
		||||
            deserialize(data).ok().map(SysvarAccountType::EpochSchedule)
 | 
			
		||||
        } else if pubkey == &sysvar::fees::id() {
 | 
			
		||||
            deserialize::<Fees>(data)
 | 
			
		||||
                .ok()
 | 
			
		||||
                .map(|fees| SysvarAccountType::Fees(fees.into()))
 | 
			
		||||
        } else if pubkey == &sysvar::recent_blockhashes::id() {
 | 
			
		||||
            deserialize::<RecentBlockhashes>(data)
 | 
			
		||||
                .ok()
 | 
			
		||||
                .map(|recent_blockhashes| {
 | 
			
		||||
                    let recent_blockhashes = recent_blockhashes
 | 
			
		||||
                        .iter()
 | 
			
		||||
                        .map(|entry| UiRecentBlockhashesEntry {
 | 
			
		||||
                            blockhash: entry.blockhash.to_string(),
 | 
			
		||||
                            fee_calculator: entry.fee_calculator.clone().into(),
 | 
			
		||||
                        })
 | 
			
		||||
                        .collect();
 | 
			
		||||
                    SysvarAccountType::RecentBlockhashes(recent_blockhashes)
 | 
			
		||||
                })
 | 
			
		||||
        } else if pubkey == &sysvar::rent::id() {
 | 
			
		||||
            deserialize::<Rent>(data)
 | 
			
		||||
                .ok()
 | 
			
		||||
                .map(|rent| SysvarAccountType::Rent(rent.into()))
 | 
			
		||||
        } else if pubkey == &sysvar::rewards::id() {
 | 
			
		||||
            deserialize::<Rewards>(data)
 | 
			
		||||
                .ok()
 | 
			
		||||
                .map(|rewards| SysvarAccountType::Rewards(rewards.into()))
 | 
			
		||||
        } else if pubkey == &sysvar::slot_hashes::id() {
 | 
			
		||||
            deserialize::<SlotHashes>(data).ok().map(|slot_hashes| {
 | 
			
		||||
                let slot_hashes = slot_hashes
 | 
			
		||||
                    .iter()
 | 
			
		||||
                    .map(|slot_hash| UiSlotHashEntry {
 | 
			
		||||
                        slot: slot_hash.0,
 | 
			
		||||
                        hash: slot_hash.1.to_string(),
 | 
			
		||||
                    })
 | 
			
		||||
                    .collect();
 | 
			
		||||
                SysvarAccountType::SlotHashes(slot_hashes)
 | 
			
		||||
            })
 | 
			
		||||
        } else if pubkey == &sysvar::slot_history::id() {
 | 
			
		||||
            deserialize::<SlotHistory>(data).ok().map(|slot_history| {
 | 
			
		||||
                SysvarAccountType::SlotHistory(UiSlotHistory {
 | 
			
		||||
                    next_slot: slot_history.next_slot,
 | 
			
		||||
                    bits: format!("{:?}", SlotHistoryBits(slot_history.bits)),
 | 
			
		||||
                })
 | 
			
		||||
            })
 | 
			
		||||
        } else if pubkey == &sysvar::stake_history::id() {
 | 
			
		||||
            deserialize::<StakeHistory>(data).ok().map(|stake_history| {
 | 
			
		||||
                let stake_history = stake_history
 | 
			
		||||
                    .iter()
 | 
			
		||||
                    .map(|entry| UiStakeHistoryEntry {
 | 
			
		||||
                        epoch: entry.0,
 | 
			
		||||
                        stake_history: entry.1.clone(),
 | 
			
		||||
                    })
 | 
			
		||||
                    .collect();
 | 
			
		||||
                SysvarAccountType::StakeHistory(stake_history)
 | 
			
		||||
            })
 | 
			
		||||
        } else {
 | 
			
		||||
            None
 | 
			
		||||
        }
 | 
			
		||||
    };
 | 
			
		||||
    parsed_account.ok_or(ParseAccountError::AccountNotParsable(
 | 
			
		||||
        ParsableAccount::Sysvar,
 | 
			
		||||
    ))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase", tag = "type", content = "info")]
 | 
			
		||||
pub enum SysvarAccountType {
 | 
			
		||||
    Clock(UiClock),
 | 
			
		||||
    EpochSchedule(EpochSchedule),
 | 
			
		||||
    Fees(UiFees),
 | 
			
		||||
    RecentBlockhashes(Vec<UiRecentBlockhashesEntry>),
 | 
			
		||||
    Rent(UiRent),
 | 
			
		||||
    Rewards(UiRewards),
 | 
			
		||||
    SlotHashes(Vec<UiSlotHashEntry>),
 | 
			
		||||
    SlotHistory(UiSlotHistory),
 | 
			
		||||
    StakeHistory(Vec<UiStakeHistoryEntry>),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Default)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiClock {
 | 
			
		||||
    pub slot: Slot,
 | 
			
		||||
    pub epoch: Epoch,
 | 
			
		||||
    pub epoch_start_timestamp: UnixTimestamp,
 | 
			
		||||
    pub leader_schedule_epoch: Epoch,
 | 
			
		||||
    pub unix_timestamp: UnixTimestamp,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<Clock> for UiClock {
 | 
			
		||||
    fn from(clock: Clock) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            slot: clock.slot,
 | 
			
		||||
            epoch: clock.epoch,
 | 
			
		||||
            epoch_start_timestamp: clock.epoch_start_timestamp,
 | 
			
		||||
            leader_schedule_epoch: clock.leader_schedule_epoch,
 | 
			
		||||
            unix_timestamp: clock.unix_timestamp,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Default)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiFees {
 | 
			
		||||
    pub fee_calculator: UiFeeCalculator,
 | 
			
		||||
}
 | 
			
		||||
impl From<Fees> for UiFees {
 | 
			
		||||
    fn from(fees: Fees) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            fee_calculator: fees.fee_calculator.into(),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Default)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiRent {
 | 
			
		||||
    pub lamports_per_byte_year: StringAmount,
 | 
			
		||||
    pub exemption_threshold: f64,
 | 
			
		||||
    pub burn_percent: u8,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<Rent> for UiRent {
 | 
			
		||||
    fn from(rent: Rent) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            lamports_per_byte_year: rent.lamports_per_byte_year.to_string(),
 | 
			
		||||
            exemption_threshold: rent.exemption_threshold,
 | 
			
		||||
            burn_percent: rent.burn_percent,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Default)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiRewards {
 | 
			
		||||
    pub validator_point_value: f64,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<Rewards> for UiRewards {
 | 
			
		||||
    fn from(rewards: Rewards) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            validator_point_value: rewards.validator_point_value,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiRecentBlockhashesEntry {
 | 
			
		||||
    pub blockhash: String,
 | 
			
		||||
    pub fee_calculator: UiFeeCalculator,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiSlotHashEntry {
 | 
			
		||||
    pub slot: Slot,
 | 
			
		||||
    pub hash: String,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiSlotHistory {
 | 
			
		||||
    pub next_slot: Slot,
 | 
			
		||||
    pub bits: String,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
struct SlotHistoryBits(BitVec<u64>);
 | 
			
		||||
 | 
			
		||||
impl std::fmt::Debug for SlotHistoryBits {
 | 
			
		||||
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
 | 
			
		||||
        for i in 0..slot_history::MAX_ENTRIES {
 | 
			
		||||
            if self.0.get(i) {
 | 
			
		||||
                write!(f, "1")?;
 | 
			
		||||
            } else {
 | 
			
		||||
                write!(f, "0")?;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        Ok(())
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiStakeHistoryEntry {
 | 
			
		||||
    pub epoch: Epoch,
 | 
			
		||||
    pub stake_history: StakeHistoryEntry,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod test {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use solana_sdk::{
 | 
			
		||||
        account::create_account, fee_calculator::FeeCalculator, hash::Hash,
 | 
			
		||||
        sysvar::recent_blockhashes::IterItem,
 | 
			
		||||
    };
 | 
			
		||||
    use std::iter::FromIterator;
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_parse_sysvars() {
 | 
			
		||||
        let clock_sysvar = create_account(&Clock::default(), 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_sysvar(&clock_sysvar.data, &sysvar::clock::id()).unwrap(),
 | 
			
		||||
            SysvarAccountType::Clock(UiClock::default()),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let epoch_schedule = EpochSchedule {
 | 
			
		||||
            slots_per_epoch: 12,
 | 
			
		||||
            leader_schedule_slot_offset: 0,
 | 
			
		||||
            warmup: false,
 | 
			
		||||
            first_normal_epoch: 1,
 | 
			
		||||
            first_normal_slot: 12,
 | 
			
		||||
        };
 | 
			
		||||
        let epoch_schedule_sysvar = create_account(&epoch_schedule, 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_sysvar(&epoch_schedule_sysvar.data, &sysvar::epoch_schedule::id()).unwrap(),
 | 
			
		||||
            SysvarAccountType::EpochSchedule(epoch_schedule),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let fees_sysvar = create_account(&Fees::default(), 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_sysvar(&fees_sysvar.data, &sysvar::fees::id()).unwrap(),
 | 
			
		||||
            SysvarAccountType::Fees(UiFees::default()),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let hash = Hash::new(&[1; 32]);
 | 
			
		||||
        let fee_calculator = FeeCalculator {
 | 
			
		||||
            lamports_per_signature: 10,
 | 
			
		||||
        };
 | 
			
		||||
        let recent_blockhashes =
 | 
			
		||||
            RecentBlockhashes::from_iter(vec![IterItem(0, &hash, &fee_calculator)].into_iter());
 | 
			
		||||
        let recent_blockhashes_sysvar = create_account(&recent_blockhashes, 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_sysvar(
 | 
			
		||||
                &recent_blockhashes_sysvar.data,
 | 
			
		||||
                &sysvar::recent_blockhashes::id()
 | 
			
		||||
            )
 | 
			
		||||
            .unwrap(),
 | 
			
		||||
            SysvarAccountType::RecentBlockhashes(vec![UiRecentBlockhashesEntry {
 | 
			
		||||
                blockhash: hash.to_string(),
 | 
			
		||||
                fee_calculator: fee_calculator.into(),
 | 
			
		||||
            }]),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let rent = Rent {
 | 
			
		||||
            lamports_per_byte_year: 10,
 | 
			
		||||
            exemption_threshold: 2.0,
 | 
			
		||||
            burn_percent: 5,
 | 
			
		||||
        };
 | 
			
		||||
        let rent_sysvar = create_account(&rent, 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_sysvar(&rent_sysvar.data, &sysvar::rent::id()).unwrap(),
 | 
			
		||||
            SysvarAccountType::Rent(rent.into()),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let rewards_sysvar = create_account(&Rewards::default(), 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_sysvar(&rewards_sysvar.data, &sysvar::rewards::id()).unwrap(),
 | 
			
		||||
            SysvarAccountType::Rewards(UiRewards::default()),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let mut slot_hashes = SlotHashes::default();
 | 
			
		||||
        slot_hashes.add(1, hash);
 | 
			
		||||
        let slot_hashes_sysvar = create_account(&slot_hashes, 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_sysvar(&slot_hashes_sysvar.data, &sysvar::slot_hashes::id()).unwrap(),
 | 
			
		||||
            SysvarAccountType::SlotHashes(vec![UiSlotHashEntry {
 | 
			
		||||
                slot: 1,
 | 
			
		||||
                hash: hash.to_string(),
 | 
			
		||||
            }]),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let mut slot_history = SlotHistory::default();
 | 
			
		||||
        slot_history.add(42);
 | 
			
		||||
        let slot_history_sysvar = create_account(&slot_history, 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_sysvar(&slot_history_sysvar.data, &sysvar::slot_history::id()).unwrap(),
 | 
			
		||||
            SysvarAccountType::SlotHistory(UiSlotHistory {
 | 
			
		||||
                next_slot: slot_history.next_slot,
 | 
			
		||||
                bits: format!("{:?}", SlotHistoryBits(slot_history.bits)),
 | 
			
		||||
            }),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let mut stake_history = StakeHistory::default();
 | 
			
		||||
        let stake_history_entry = StakeHistoryEntry {
 | 
			
		||||
            effective: 10,
 | 
			
		||||
            activating: 2,
 | 
			
		||||
            deactivating: 3,
 | 
			
		||||
        };
 | 
			
		||||
        stake_history.add(1, stake_history_entry.clone());
 | 
			
		||||
        let stake_history_sysvar = create_account(&stake_history, 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_sysvar(&stake_history_sysvar.data, &sysvar::stake_history::id()).unwrap(),
 | 
			
		||||
            SysvarAccountType::StakeHistory(vec![UiStakeHistoryEntry {
 | 
			
		||||
                epoch: 1,
 | 
			
		||||
                stake_history: stake_history_entry,
 | 
			
		||||
            }]),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let bad_pubkey = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        assert!(parse_sysvar(&stake_history_sysvar.data, &bad_pubkey).is_err());
 | 
			
		||||
 | 
			
		||||
        let bad_data = vec![0; 4];
 | 
			
		||||
        assert!(parse_sysvar(&bad_data, &sysvar::stake_history::id()).is_err());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,352 +0,0 @@
 | 
			
		||||
use crate::{
 | 
			
		||||
    parse_account_data::{ParsableAccount, ParseAccountError},
 | 
			
		||||
    StringAmount,
 | 
			
		||||
};
 | 
			
		||||
use solana_sdk::pubkey::Pubkey;
 | 
			
		||||
use spl_token_v2_0::{
 | 
			
		||||
    solana_program::{
 | 
			
		||||
        program_option::COption, program_pack::Pack, pubkey::Pubkey as SplTokenPubkey,
 | 
			
		||||
    },
 | 
			
		||||
    state::{Account, AccountState, Mint, Multisig},
 | 
			
		||||
};
 | 
			
		||||
use std::str::FromStr;
 | 
			
		||||
 | 
			
		||||
// A helper function to convert spl_token_v2_0::id() as spl_sdk::pubkey::Pubkey to
 | 
			
		||||
// solana_sdk::pubkey::Pubkey
 | 
			
		||||
pub fn spl_token_id_v2_0() -> Pubkey {
 | 
			
		||||
    Pubkey::from_str(&spl_token_v2_0::id().to_string()).unwrap()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// A helper function to convert spl_token_v2_0::native_mint::id() as spl_sdk::pubkey::Pubkey to
 | 
			
		||||
// solana_sdk::pubkey::Pubkey
 | 
			
		||||
pub fn spl_token_v2_0_native_mint() -> Pubkey {
 | 
			
		||||
    Pubkey::from_str(&spl_token_v2_0::native_mint::id().to_string()).unwrap()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// A helper function to convert a solana_sdk::pubkey::Pubkey to spl_sdk::pubkey::Pubkey
 | 
			
		||||
pub fn spl_token_v2_0_pubkey(pubkey: &Pubkey) -> SplTokenPubkey {
 | 
			
		||||
    SplTokenPubkey::from_str(&pubkey.to_string()).unwrap()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// A helper function to convert a spl_sdk::pubkey::Pubkey to solana_sdk::pubkey::Pubkey
 | 
			
		||||
pub fn pubkey_from_spl_token_v2_0(pubkey: &SplTokenPubkey) -> Pubkey {
 | 
			
		||||
    Pubkey::from_str(&pubkey.to_string()).unwrap()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn parse_token(
 | 
			
		||||
    data: &[u8],
 | 
			
		||||
    mint_decimals: Option<u8>,
 | 
			
		||||
) -> Result<TokenAccountType, ParseAccountError> {
 | 
			
		||||
    if data.len() == Account::get_packed_len() {
 | 
			
		||||
        let account = Account::unpack(data)
 | 
			
		||||
            .map_err(|_| ParseAccountError::AccountNotParsable(ParsableAccount::SplToken))?;
 | 
			
		||||
        let decimals = mint_decimals.ok_or_else(|| {
 | 
			
		||||
            ParseAccountError::AdditionalDataMissing(
 | 
			
		||||
                "no mint_decimals provided to parse spl-token account".to_string(),
 | 
			
		||||
            )
 | 
			
		||||
        })?;
 | 
			
		||||
        Ok(TokenAccountType::Account(UiTokenAccount {
 | 
			
		||||
            mint: account.mint.to_string(),
 | 
			
		||||
            owner: account.owner.to_string(),
 | 
			
		||||
            token_amount: token_amount_to_ui_amount(account.amount, decimals),
 | 
			
		||||
            delegate: match account.delegate {
 | 
			
		||||
                COption::Some(pubkey) => Some(pubkey.to_string()),
 | 
			
		||||
                COption::None => None,
 | 
			
		||||
            },
 | 
			
		||||
            state: account.state.into(),
 | 
			
		||||
            is_native: account.is_native(),
 | 
			
		||||
            rent_exempt_reserve: match account.is_native {
 | 
			
		||||
                COption::Some(reserve) => Some(token_amount_to_ui_amount(reserve, decimals)),
 | 
			
		||||
                COption::None => None,
 | 
			
		||||
            },
 | 
			
		||||
            delegated_amount: if account.delegate.is_none() {
 | 
			
		||||
                None
 | 
			
		||||
            } else {
 | 
			
		||||
                Some(token_amount_to_ui_amount(
 | 
			
		||||
                    account.delegated_amount,
 | 
			
		||||
                    decimals,
 | 
			
		||||
                ))
 | 
			
		||||
            },
 | 
			
		||||
            close_authority: match account.close_authority {
 | 
			
		||||
                COption::Some(pubkey) => Some(pubkey.to_string()),
 | 
			
		||||
                COption::None => None,
 | 
			
		||||
            },
 | 
			
		||||
        }))
 | 
			
		||||
    } else if data.len() == Mint::get_packed_len() {
 | 
			
		||||
        let mint = Mint::unpack(data)
 | 
			
		||||
            .map_err(|_| ParseAccountError::AccountNotParsable(ParsableAccount::SplToken))?;
 | 
			
		||||
        Ok(TokenAccountType::Mint(UiMint {
 | 
			
		||||
            mint_authority: match mint.mint_authority {
 | 
			
		||||
                COption::Some(pubkey) => Some(pubkey.to_string()),
 | 
			
		||||
                COption::None => None,
 | 
			
		||||
            },
 | 
			
		||||
            supply: mint.supply.to_string(),
 | 
			
		||||
            decimals: mint.decimals,
 | 
			
		||||
            is_initialized: mint.is_initialized,
 | 
			
		||||
            freeze_authority: match mint.freeze_authority {
 | 
			
		||||
                COption::Some(pubkey) => Some(pubkey.to_string()),
 | 
			
		||||
                COption::None => None,
 | 
			
		||||
            },
 | 
			
		||||
        }))
 | 
			
		||||
    } else if data.len() == Multisig::get_packed_len() {
 | 
			
		||||
        let multisig = Multisig::unpack(data)
 | 
			
		||||
            .map_err(|_| ParseAccountError::AccountNotParsable(ParsableAccount::SplToken))?;
 | 
			
		||||
        Ok(TokenAccountType::Multisig(UiMultisig {
 | 
			
		||||
            num_required_signers: multisig.m,
 | 
			
		||||
            num_valid_signers: multisig.n,
 | 
			
		||||
            is_initialized: multisig.is_initialized,
 | 
			
		||||
            signers: multisig
 | 
			
		||||
                .signers
 | 
			
		||||
                .iter()
 | 
			
		||||
                .filter_map(|pubkey| {
 | 
			
		||||
                    if pubkey != &SplTokenPubkey::default() {
 | 
			
		||||
                        Some(pubkey.to_string())
 | 
			
		||||
                    } else {
 | 
			
		||||
                        None
 | 
			
		||||
                    }
 | 
			
		||||
                })
 | 
			
		||||
                .collect(),
 | 
			
		||||
        }))
 | 
			
		||||
    } else {
 | 
			
		||||
        Err(ParseAccountError::AccountNotParsable(
 | 
			
		||||
            ParsableAccount::SplToken,
 | 
			
		||||
        ))
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase", tag = "type", content = "info")]
 | 
			
		||||
pub enum TokenAccountType {
 | 
			
		||||
    Account(UiTokenAccount),
 | 
			
		||||
    Mint(UiMint),
 | 
			
		||||
    Multisig(UiMultisig),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiTokenAccount {
 | 
			
		||||
    pub mint: String,
 | 
			
		||||
    pub owner: String,
 | 
			
		||||
    pub token_amount: UiTokenAmount,
 | 
			
		||||
    #[serde(skip_serializing_if = "Option::is_none")]
 | 
			
		||||
    pub delegate: Option<String>,
 | 
			
		||||
    pub state: UiAccountState,
 | 
			
		||||
    pub is_native: bool,
 | 
			
		||||
    #[serde(skip_serializing_if = "Option::is_none")]
 | 
			
		||||
    pub rent_exempt_reserve: Option<UiTokenAmount>,
 | 
			
		||||
    #[serde(skip_serializing_if = "Option::is_none")]
 | 
			
		||||
    pub delegated_amount: Option<UiTokenAmount>,
 | 
			
		||||
    #[serde(skip_serializing_if = "Option::is_none")]
 | 
			
		||||
    pub close_authority: Option<String>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub enum UiAccountState {
 | 
			
		||||
    Uninitialized,
 | 
			
		||||
    Initialized,
 | 
			
		||||
    Frozen,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<AccountState> for UiAccountState {
 | 
			
		||||
    fn from(state: AccountState) -> Self {
 | 
			
		||||
        match state {
 | 
			
		||||
            AccountState::Uninitialized => UiAccountState::Uninitialized,
 | 
			
		||||
            AccountState::Initialized => UiAccountState::Initialized,
 | 
			
		||||
            AccountState::Frozen => UiAccountState::Frozen,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiTokenAmount {
 | 
			
		||||
    pub ui_amount: f64,
 | 
			
		||||
    pub decimals: u8,
 | 
			
		||||
    pub amount: StringAmount,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl UiTokenAmount {
 | 
			
		||||
    pub fn real_number_string(&self) -> String {
 | 
			
		||||
        let decimals = self.decimals as usize;
 | 
			
		||||
        if decimals > 0 {
 | 
			
		||||
            let amount = u64::from_str(&self.amount).unwrap_or(0);
 | 
			
		||||
 | 
			
		||||
            // Left-pad zeros to decimals + 1, so we at least have an integer zero
 | 
			
		||||
            let mut s = format!("{:01$}", amount, decimals + 1);
 | 
			
		||||
 | 
			
		||||
            // Add the decimal point (Sorry, "," locales!)
 | 
			
		||||
            s.insert(s.len() - decimals, '.');
 | 
			
		||||
            s
 | 
			
		||||
        } else {
 | 
			
		||||
            self.amount.clone()
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn real_number_string_trimmed(&self) -> String {
 | 
			
		||||
        let s = self.real_number_string();
 | 
			
		||||
        let zeros_trimmed = s.trim_end_matches('0');
 | 
			
		||||
        let decimal_trimmed = zeros_trimmed.trim_end_matches('.');
 | 
			
		||||
        decimal_trimmed.to_string()
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn token_amount_to_ui_amount(amount: u64, decimals: u8) -> UiTokenAmount {
 | 
			
		||||
    // Use `amount_to_ui_amount()` once spl_token is bumped to a version that supports it: https://github.com/solana-labs/solana-program-library/pull/211
 | 
			
		||||
    let amount_decimals = amount as f64 / 10_usize.pow(decimals as u32) as f64;
 | 
			
		||||
    UiTokenAmount {
 | 
			
		||||
        ui_amount: amount_decimals,
 | 
			
		||||
        decimals,
 | 
			
		||||
        amount: amount.to_string(),
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiMint {
 | 
			
		||||
    pub mint_authority: Option<String>,
 | 
			
		||||
    pub supply: StringAmount,
 | 
			
		||||
    pub decimals: u8,
 | 
			
		||||
    pub is_initialized: bool,
 | 
			
		||||
    pub freeze_authority: Option<String>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiMultisig {
 | 
			
		||||
    pub num_required_signers: u8,
 | 
			
		||||
    pub num_valid_signers: u8,
 | 
			
		||||
    pub is_initialized: bool,
 | 
			
		||||
    pub signers: Vec<String>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn get_token_account_mint(data: &[u8]) -> Option<Pubkey> {
 | 
			
		||||
    if data.len() == Account::get_packed_len() {
 | 
			
		||||
        Some(Pubkey::new(&data[0..32]))
 | 
			
		||||
    } else {
 | 
			
		||||
        None
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod test {
 | 
			
		||||
    use super::*;
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_parse_token() {
 | 
			
		||||
        let mint_pubkey = SplTokenPubkey::new(&[2; 32]);
 | 
			
		||||
        let owner_pubkey = SplTokenPubkey::new(&[3; 32]);
 | 
			
		||||
        let mut account_data = vec![0; Account::get_packed_len()];
 | 
			
		||||
        let mut account = Account::unpack_unchecked(&account_data).unwrap();
 | 
			
		||||
        account.mint = mint_pubkey;
 | 
			
		||||
        account.owner = owner_pubkey;
 | 
			
		||||
        account.amount = 42;
 | 
			
		||||
        account.state = AccountState::Initialized;
 | 
			
		||||
        account.is_native = COption::None;
 | 
			
		||||
        account.close_authority = COption::Some(owner_pubkey);
 | 
			
		||||
        Account::pack(account, &mut account_data).unwrap();
 | 
			
		||||
 | 
			
		||||
        assert!(parse_token(&account_data, None).is_err());
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_token(&account_data, Some(2)).unwrap(),
 | 
			
		||||
            TokenAccountType::Account(UiTokenAccount {
 | 
			
		||||
                mint: mint_pubkey.to_string(),
 | 
			
		||||
                owner: owner_pubkey.to_string(),
 | 
			
		||||
                token_amount: UiTokenAmount {
 | 
			
		||||
                    ui_amount: 0.42,
 | 
			
		||||
                    decimals: 2,
 | 
			
		||||
                    amount: "42".to_string()
 | 
			
		||||
                },
 | 
			
		||||
                delegate: None,
 | 
			
		||||
                state: UiAccountState::Initialized,
 | 
			
		||||
                is_native: false,
 | 
			
		||||
                rent_exempt_reserve: None,
 | 
			
		||||
                delegated_amount: None,
 | 
			
		||||
                close_authority: Some(owner_pubkey.to_string()),
 | 
			
		||||
            }),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let mut mint_data = vec![0; Mint::get_packed_len()];
 | 
			
		||||
        let mut mint = Mint::unpack_unchecked(&mint_data).unwrap();
 | 
			
		||||
        mint.mint_authority = COption::Some(owner_pubkey);
 | 
			
		||||
        mint.supply = 42;
 | 
			
		||||
        mint.decimals = 3;
 | 
			
		||||
        mint.is_initialized = true;
 | 
			
		||||
        mint.freeze_authority = COption::Some(owner_pubkey);
 | 
			
		||||
        Mint::pack(mint, &mut mint_data).unwrap();
 | 
			
		||||
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_token(&mint_data, None).unwrap(),
 | 
			
		||||
            TokenAccountType::Mint(UiMint {
 | 
			
		||||
                mint_authority: Some(owner_pubkey.to_string()),
 | 
			
		||||
                supply: 42.to_string(),
 | 
			
		||||
                decimals: 3,
 | 
			
		||||
                is_initialized: true,
 | 
			
		||||
                freeze_authority: Some(owner_pubkey.to_string()),
 | 
			
		||||
            }),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let signer1 = SplTokenPubkey::new(&[1; 32]);
 | 
			
		||||
        let signer2 = SplTokenPubkey::new(&[2; 32]);
 | 
			
		||||
        let signer3 = SplTokenPubkey::new(&[3; 32]);
 | 
			
		||||
        let mut multisig_data = vec![0; Multisig::get_packed_len()];
 | 
			
		||||
        let mut signers = [SplTokenPubkey::default(); 11];
 | 
			
		||||
        signers[0] = signer1;
 | 
			
		||||
        signers[1] = signer2;
 | 
			
		||||
        signers[2] = signer3;
 | 
			
		||||
        let mut multisig = Multisig::unpack_unchecked(&multisig_data).unwrap();
 | 
			
		||||
        multisig.m = 2;
 | 
			
		||||
        multisig.n = 3;
 | 
			
		||||
        multisig.is_initialized = true;
 | 
			
		||||
        multisig.signers = signers;
 | 
			
		||||
        Multisig::pack(multisig, &mut multisig_data).unwrap();
 | 
			
		||||
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_token(&multisig_data, None).unwrap(),
 | 
			
		||||
            TokenAccountType::Multisig(UiMultisig {
 | 
			
		||||
                num_required_signers: 2,
 | 
			
		||||
                num_valid_signers: 3,
 | 
			
		||||
                is_initialized: true,
 | 
			
		||||
                signers: vec![
 | 
			
		||||
                    signer1.to_string(),
 | 
			
		||||
                    signer2.to_string(),
 | 
			
		||||
                    signer3.to_string()
 | 
			
		||||
                ],
 | 
			
		||||
            }),
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let bad_data = vec![0; 4];
 | 
			
		||||
        assert!(parse_token(&bad_data, None).is_err());
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_get_token_account_mint() {
 | 
			
		||||
        let mint_pubkey = SplTokenPubkey::new(&[2; 32]);
 | 
			
		||||
        let mut account_data = vec![0; Account::get_packed_len()];
 | 
			
		||||
        let mut account = Account::unpack_unchecked(&account_data).unwrap();
 | 
			
		||||
        account.mint = mint_pubkey;
 | 
			
		||||
        Account::pack(account, &mut account_data).unwrap();
 | 
			
		||||
 | 
			
		||||
        let expected_mint_pubkey = Pubkey::new(&[2; 32]);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            get_token_account_mint(&account_data),
 | 
			
		||||
            Some(expected_mint_pubkey)
 | 
			
		||||
        );
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_ui_token_amount_real_string() {
 | 
			
		||||
        let token_amount = token_amount_to_ui_amount(1, 0);
 | 
			
		||||
        assert_eq!(&token_amount.real_number_string(), "1");
 | 
			
		||||
        assert_eq!(&token_amount.real_number_string_trimmed(), "1");
 | 
			
		||||
        let token_amount = token_amount_to_ui_amount(1, 9);
 | 
			
		||||
        assert_eq!(&token_amount.real_number_string(), "0.000000001");
 | 
			
		||||
        assert_eq!(&token_amount.real_number_string_trimmed(), "0.000000001");
 | 
			
		||||
        let token_amount = token_amount_to_ui_amount(1_000_000_000, 9);
 | 
			
		||||
        assert_eq!(&token_amount.real_number_string(), "1.000000000");
 | 
			
		||||
        assert_eq!(&token_amount.real_number_string_trimmed(), "1");
 | 
			
		||||
        let token_amount = token_amount_to_ui_amount(1_234_567_890, 3);
 | 
			
		||||
        assert_eq!(&token_amount.real_number_string(), "1234567.890");
 | 
			
		||||
        assert_eq!(&token_amount.real_number_string_trimmed(), "1234567.89");
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,144 +0,0 @@
 | 
			
		||||
use crate::{parse_account_data::ParseAccountError, StringAmount};
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    clock::{Epoch, Slot},
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
};
 | 
			
		||||
use solana_vote_program::vote_state::{BlockTimestamp, Lockout, VoteState};
 | 
			
		||||
 | 
			
		||||
pub fn parse_vote(data: &[u8]) -> Result<VoteAccountType, ParseAccountError> {
 | 
			
		||||
    let mut vote_state = VoteState::deserialize(data).map_err(ParseAccountError::from)?;
 | 
			
		||||
    let epoch_credits = vote_state
 | 
			
		||||
        .epoch_credits()
 | 
			
		||||
        .iter()
 | 
			
		||||
        .map(|(epoch, credits, previous_credits)| UiEpochCredits {
 | 
			
		||||
            epoch: *epoch,
 | 
			
		||||
            credits: credits.to_string(),
 | 
			
		||||
            previous_credits: previous_credits.to_string(),
 | 
			
		||||
        })
 | 
			
		||||
        .collect();
 | 
			
		||||
    let votes = vote_state
 | 
			
		||||
        .votes
 | 
			
		||||
        .iter()
 | 
			
		||||
        .map(|lockout| UiLockout {
 | 
			
		||||
            slot: lockout.slot,
 | 
			
		||||
            confirmation_count: lockout.confirmation_count,
 | 
			
		||||
        })
 | 
			
		||||
        .collect();
 | 
			
		||||
    let authorized_voters = vote_state
 | 
			
		||||
        .authorized_voters()
 | 
			
		||||
        .iter()
 | 
			
		||||
        .map(|(epoch, authorized_voter)| UiAuthorizedVoters {
 | 
			
		||||
            epoch: *epoch,
 | 
			
		||||
            authorized_voter: authorized_voter.to_string(),
 | 
			
		||||
        })
 | 
			
		||||
        .collect();
 | 
			
		||||
    let prior_voters = vote_state
 | 
			
		||||
        .prior_voters()
 | 
			
		||||
        .buf()
 | 
			
		||||
        .iter()
 | 
			
		||||
        .filter(|(pubkey, _, _)| pubkey != &Pubkey::default())
 | 
			
		||||
        .map(
 | 
			
		||||
            |(authorized_pubkey, epoch_of_last_authorized_switch, target_epoch)| UiPriorVoters {
 | 
			
		||||
                authorized_pubkey: authorized_pubkey.to_string(),
 | 
			
		||||
                epoch_of_last_authorized_switch: *epoch_of_last_authorized_switch,
 | 
			
		||||
                target_epoch: *target_epoch,
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        .collect();
 | 
			
		||||
    Ok(VoteAccountType::Vote(UiVoteState {
 | 
			
		||||
        node_pubkey: vote_state.node_pubkey.to_string(),
 | 
			
		||||
        authorized_withdrawer: vote_state.authorized_withdrawer.to_string(),
 | 
			
		||||
        commission: vote_state.commission,
 | 
			
		||||
        votes,
 | 
			
		||||
        root_slot: vote_state.root_slot,
 | 
			
		||||
        authorized_voters,
 | 
			
		||||
        prior_voters,
 | 
			
		||||
        epoch_credits,
 | 
			
		||||
        last_timestamp: vote_state.last_timestamp,
 | 
			
		||||
    }))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// A wrapper enum for consistency across programs
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase", tag = "type", content = "info")]
 | 
			
		||||
pub enum VoteAccountType {
 | 
			
		||||
    Vote(UiVoteState),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// A duplicate representation of VoteState for pretty JSON serialization
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, Default, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
pub struct UiVoteState {
 | 
			
		||||
    node_pubkey: String,
 | 
			
		||||
    authorized_withdrawer: String,
 | 
			
		||||
    commission: u8,
 | 
			
		||||
    votes: Vec<UiLockout>,
 | 
			
		||||
    root_slot: Option<Slot>,
 | 
			
		||||
    authorized_voters: Vec<UiAuthorizedVoters>,
 | 
			
		||||
    prior_voters: Vec<UiPriorVoters>,
 | 
			
		||||
    epoch_credits: Vec<UiEpochCredits>,
 | 
			
		||||
    last_timestamp: BlockTimestamp,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
struct UiLockout {
 | 
			
		||||
    slot: Slot,
 | 
			
		||||
    confirmation_count: u32,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<&Lockout> for UiLockout {
 | 
			
		||||
    fn from(lockout: &Lockout) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            slot: lockout.slot,
 | 
			
		||||
            confirmation_count: lockout.confirmation_count,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
struct UiAuthorizedVoters {
 | 
			
		||||
    epoch: Epoch,
 | 
			
		||||
    authorized_voter: String,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
struct UiPriorVoters {
 | 
			
		||||
    authorized_pubkey: String,
 | 
			
		||||
    epoch_of_last_authorized_switch: Epoch,
 | 
			
		||||
    target_epoch: Epoch,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
 | 
			
		||||
#[serde(rename_all = "camelCase")]
 | 
			
		||||
struct UiEpochCredits {
 | 
			
		||||
    epoch: Epoch,
 | 
			
		||||
    credits: StringAmount,
 | 
			
		||||
    previous_credits: StringAmount,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod test {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use solana_vote_program::vote_state::VoteStateVersions;
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_parse_vote() {
 | 
			
		||||
        let vote_state = VoteState::default();
 | 
			
		||||
        let mut vote_account_data: Vec<u8> = vec![0; VoteState::size_of()];
 | 
			
		||||
        let versioned = VoteStateVersions::Current(Box::new(vote_state));
 | 
			
		||||
        VoteState::serialize(&versioned, &mut vote_account_data).unwrap();
 | 
			
		||||
        let mut expected_vote_state = UiVoteState::default();
 | 
			
		||||
        expected_vote_state.node_pubkey = Pubkey::default().to_string();
 | 
			
		||||
        expected_vote_state.authorized_withdrawer = Pubkey::default().to_string();
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            parse_vote(&vote_account_data).unwrap(),
 | 
			
		||||
            VoteAccountType::Vote(expected_vote_state)
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        let bad_data = vec![0; 4];
 | 
			
		||||
        assert!(parse_vote(&bad_data).is_err());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,18 +0,0 @@
 | 
			
		||||
use solana_config_program::ConfigState;
 | 
			
		||||
 | 
			
		||||
pub const MAX_SHORT_FIELD_LENGTH: usize = 70;
 | 
			
		||||
pub const MAX_LONG_FIELD_LENGTH: usize = 300;
 | 
			
		||||
pub const MAX_VALIDATOR_INFO: u64 = 576;
 | 
			
		||||
 | 
			
		||||
solana_sdk::declare_id!("Va1idator1nfo111111111111111111111111111111");
 | 
			
		||||
 | 
			
		||||
#[derive(Debug, Deserialize, PartialEq, Serialize, Default)]
 | 
			
		||||
pub struct ValidatorInfo {
 | 
			
		||||
    pub info: String,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl ConfigState for ValidatorInfo {
 | 
			
		||||
    fn max_space() -> u64 {
 | 
			
		||||
        MAX_VALIDATOR_INFO
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,21 +1,19 @@
 | 
			
		||||
[package]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
 | 
			
		||||
edition = "2018"
 | 
			
		||||
name = "solana-accounts-bench"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
version = "1.2.0"
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
publish = false
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
log = "0.4.6"
 | 
			
		||||
rayon = "1.4.0"
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.4.18" }
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.4.18" }
 | 
			
		||||
solana-measure = { path = "../measure", version = "1.4.18" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.4.18" }
 | 
			
		||||
solana-version = { path = "../version", version = "1.4.18" }
 | 
			
		||||
rayon = "1.3.0"
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.2.0" }
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.2.0" }
 | 
			
		||||
solana-measure = { path = "../measure", version = "1.2.0" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.2.0" }
 | 
			
		||||
rand = "0.7.0"
 | 
			
		||||
clap = "2.33.1"
 | 
			
		||||
crossbeam-channel = "0.4"
 | 
			
		||||
 
 | 
			
		||||
@@ -1,21 +1,20 @@
 | 
			
		||||
use clap::{crate_description, crate_name, value_t, App, Arg};
 | 
			
		||||
use clap::{value_t, App, Arg};
 | 
			
		||||
use rayon::prelude::*;
 | 
			
		||||
use solana_measure::measure::Measure;
 | 
			
		||||
use solana_runtime::{
 | 
			
		||||
    accounts::{create_test_accounts, update_accounts, Accounts},
 | 
			
		||||
    accounts_index::Ancestors,
 | 
			
		||||
};
 | 
			
		||||
use solana_sdk::{genesis_config::ClusterType, pubkey::Pubkey};
 | 
			
		||||
use std::env;
 | 
			
		||||
use solana_sdk::pubkey::Pubkey;
 | 
			
		||||
use std::fs;
 | 
			
		||||
use std::path::PathBuf;
 | 
			
		||||
 | 
			
		||||
fn main() {
 | 
			
		||||
    solana_logger::setup();
 | 
			
		||||
 | 
			
		||||
    let matches = App::new(crate_name!())
 | 
			
		||||
        .about(crate_description!())
 | 
			
		||||
        .version(solana_version::version!())
 | 
			
		||||
    let matches = App::new("crate")
 | 
			
		||||
        .about("about")
 | 
			
		||||
        .version("version")
 | 
			
		||||
        .arg(
 | 
			
		||||
            Arg::with_name("num_slots")
 | 
			
		||||
                .long("num_slots")
 | 
			
		||||
@@ -51,12 +50,11 @@ fn main() {
 | 
			
		||||
    let clean = matches.is_present("clean");
 | 
			
		||||
    println!("clean: {:?}", clean);
 | 
			
		||||
 | 
			
		||||
    let path = PathBuf::from(env::var("FARF_DIR").unwrap_or_else(|_| "farf".to_owned()))
 | 
			
		||||
        .join("accounts-bench");
 | 
			
		||||
    let path = PathBuf::from("farf/accounts-bench");
 | 
			
		||||
    if fs::remove_dir_all(path.clone()).is_err() {
 | 
			
		||||
        println!("Warning: Couldn't remove {:?}", path);
 | 
			
		||||
    }
 | 
			
		||||
    let accounts = Accounts::new(vec![path], &ClusterType::Testnet);
 | 
			
		||||
    let accounts = Accounts::new(vec![path]);
 | 
			
		||||
    println!("Creating {} accounts", num_accounts);
 | 
			
		||||
    let mut create_time = Measure::start("create accounts");
 | 
			
		||||
    let pubkeys: Vec<_> = (0..num_slots)
 | 
			
		||||
@@ -88,7 +86,7 @@ fn main() {
 | 
			
		||||
    for x in 0..iterations {
 | 
			
		||||
        if clean {
 | 
			
		||||
            let mut time = Measure::start("clean");
 | 
			
		||||
            accounts.accounts_db.clean_accounts(None);
 | 
			
		||||
            accounts.accounts_db.clean_accounts();
 | 
			
		||||
            time.stop();
 | 
			
		||||
            println!("{}", time);
 | 
			
		||||
            for slot in 0..num_slots {
 | 
			
		||||
@@ -98,10 +96,7 @@ fn main() {
 | 
			
		||||
        } else {
 | 
			
		||||
            let mut pubkeys: Vec<Pubkey> = vec![];
 | 
			
		||||
            let mut time = Measure::start("hash");
 | 
			
		||||
            let hash = accounts
 | 
			
		||||
                .accounts_db
 | 
			
		||||
                .update_accounts_hash(0, &ancestors, true)
 | 
			
		||||
                .0;
 | 
			
		||||
            let hash = accounts.accounts_db.update_accounts_hash(0, &ancestors);
 | 
			
		||||
            time.stop();
 | 
			
		||||
            println!("hash: {} {}", hash, time);
 | 
			
		||||
            create_test_accounts(&accounts, &mut pubkeys, 1, 0);
 | 
			
		||||
 
 | 
			
		||||
@@ -1,29 +1,28 @@
 | 
			
		||||
[package]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
 | 
			
		||||
edition = "2018"
 | 
			
		||||
name = "solana-banking-bench"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
version = "1.2.0"
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
publish = false
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
clap = "2.33.1"
 | 
			
		||||
crossbeam-channel = "0.4"
 | 
			
		||||
log = "0.4.6"
 | 
			
		||||
rand = "0.7.0"
 | 
			
		||||
rayon = "1.4.0"
 | 
			
		||||
solana-core = { path = "../core", version = "1.4.18" }
 | 
			
		||||
solana-clap-utils = { path = "../clap-utils", version = "1.4.18" }
 | 
			
		||||
solana-streamer = { path = "../streamer", version = "1.4.18" }
 | 
			
		||||
solana-perf = { path = "../perf", version = "1.4.18" }
 | 
			
		||||
solana-ledger = { path = "../ledger", version = "1.4.18" }
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.4.18" }
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.4.18" }
 | 
			
		||||
solana-measure = { path = "../measure", version = "1.4.18" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.4.18" }
 | 
			
		||||
solana-version = { path = "../version", version = "1.4.18" }
 | 
			
		||||
rayon = "1.3.0"
 | 
			
		||||
solana-core = { path = "../core", version = "1.2.0" }
 | 
			
		||||
solana-clap-utils = { path = "../clap-utils", version = "1.2.0" }
 | 
			
		||||
solana-streamer = { path = "../streamer", version = "1.2.0" }
 | 
			
		||||
solana-perf = { path = "../perf", version = "1.2.0" }
 | 
			
		||||
solana-ledger = { path = "../ledger", version = "1.2.0" }
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.2.0" }
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.2.0" }
 | 
			
		||||
solana-measure = { path = "../measure", version = "1.2.0" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.2.0" }
 | 
			
		||||
solana-version = { path = "../version", version = "1.2.0" }
 | 
			
		||||
 | 
			
		||||
[package.metadata.docs.rs]
 | 
			
		||||
targets = ["x86_64-unknown-linux-gnu"]
 | 
			
		||||
 
 | 
			
		||||
@@ -11,15 +11,17 @@ use solana_core::{
 | 
			
		||||
    poh_recorder::WorkingBankEntry,
 | 
			
		||||
};
 | 
			
		||||
use solana_ledger::{
 | 
			
		||||
    bank_forks::BankForks,
 | 
			
		||||
    blockstore::Blockstore,
 | 
			
		||||
    genesis_utils::{create_genesis_config, GenesisConfigInfo},
 | 
			
		||||
    get_tmp_ledger_path,
 | 
			
		||||
};
 | 
			
		||||
use solana_measure::measure::Measure;
 | 
			
		||||
use solana_perf::packet::to_packets_chunked;
 | 
			
		||||
use solana_runtime::{bank::Bank, bank_forks::BankForks};
 | 
			
		||||
use solana_runtime::bank::Bank;
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    hash::Hash,
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    signature::Keypair,
 | 
			
		||||
    signature::Signature,
 | 
			
		||||
    system_transaction,
 | 
			
		||||
@@ -68,7 +70,7 @@ fn make_accounts_txs(
 | 
			
		||||
    hash: Hash,
 | 
			
		||||
    same_payer: bool,
 | 
			
		||||
) -> Vec<Transaction> {
 | 
			
		||||
    let to_pubkey = solana_sdk::pubkey::new_rand();
 | 
			
		||||
    let to_pubkey = Pubkey::new_rand();
 | 
			
		||||
    let payer_key = Keypair::new();
 | 
			
		||||
    let dummy = system_transaction::transfer(&payer_key, &to_pubkey, 1, hash);
 | 
			
		||||
    (0..total_num_transactions)
 | 
			
		||||
@@ -77,9 +79,9 @@ fn make_accounts_txs(
 | 
			
		||||
            let mut new = dummy.clone();
 | 
			
		||||
            let sig: Vec<u8> = (0..64).map(|_| thread_rng().gen()).collect();
 | 
			
		||||
            if !same_payer {
 | 
			
		||||
                new.message.account_keys[0] = solana_sdk::pubkey::new_rand();
 | 
			
		||||
                new.message.account_keys[0] = Pubkey::new_rand();
 | 
			
		||||
            }
 | 
			
		||||
            new.message.account_keys[1] = solana_sdk::pubkey::new_rand();
 | 
			
		||||
            new.message.account_keys[1] = Pubkey::new_rand();
 | 
			
		||||
            new.signatures = vec![Signature::new(&sig[0..64])];
 | 
			
		||||
            new
 | 
			
		||||
        })
 | 
			
		||||
@@ -166,9 +168,8 @@ fn main() {
 | 
			
		||||
 | 
			
		||||
    let (verified_sender, verified_receiver) = unbounded();
 | 
			
		||||
    let (vote_sender, vote_receiver) = unbounded();
 | 
			
		||||
    let (replay_vote_sender, _replay_vote_receiver) = unbounded();
 | 
			
		||||
    let bank0 = Bank::new(&genesis_config);
 | 
			
		||||
    let mut bank_forks = BankForks::new(bank0);
 | 
			
		||||
    let mut bank_forks = BankForks::new(0, bank0);
 | 
			
		||||
    let mut bank = bank_forks.working_bank();
 | 
			
		||||
 | 
			
		||||
    info!("threads: {} txs: {}", num_threads, total_num_transactions);
 | 
			
		||||
@@ -208,7 +209,7 @@ fn main() {
 | 
			
		||||
        bank.clear_signatures();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let mut verified: Vec<_> = to_packets_chunked(&transactions, packets_per_chunk);
 | 
			
		||||
    let mut verified: Vec<_> = to_packets_chunked(&transactions.clone(), packets_per_chunk);
 | 
			
		||||
    let ledger_path = get_tmp_ledger_path!();
 | 
			
		||||
    {
 | 
			
		||||
        let blockstore = Arc::new(
 | 
			
		||||
@@ -224,7 +225,6 @@ fn main() {
 | 
			
		||||
            verified_receiver,
 | 
			
		||||
            vote_receiver,
 | 
			
		||||
            None,
 | 
			
		||||
            replay_vote_sender,
 | 
			
		||||
        );
 | 
			
		||||
        poh_recorder.lock().unwrap().set_bank(&bank);
 | 
			
		||||
 | 
			
		||||
@@ -240,7 +240,7 @@ fn main() {
 | 
			
		||||
        let base_tx_count = bank.transaction_count();
 | 
			
		||||
        let mut txs_processed = 0;
 | 
			
		||||
        let mut root = 1;
 | 
			
		||||
        let collector = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let collector = Pubkey::new_rand();
 | 
			
		||||
        let config = Config {
 | 
			
		||||
            packets_per_batch: packets_per_chunk,
 | 
			
		||||
            chunk_len,
 | 
			
		||||
 
 | 
			
		||||
@@ -1,31 +0,0 @@
 | 
			
		||||
[package]
 | 
			
		||||
name = "solana-banks-client"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
description = "Solana banks client"
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
edition = "2018"
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
async-trait = "0.1.36"
 | 
			
		||||
bincode = "1.3.1"
 | 
			
		||||
futures = "0.3"
 | 
			
		||||
mio = "0.7.6"
 | 
			
		||||
solana-banks-interface = { path = "../banks-interface", version = "1.4.18" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.4.18" }
 | 
			
		||||
tarpc = { version = "0.23.0", features = ["full"] }
 | 
			
		||||
tokio = { version = "0.3", features = ["full"] }
 | 
			
		||||
tokio-serde = { version = "0.6", features = ["bincode"] }
 | 
			
		||||
 | 
			
		||||
[dev-dependencies]
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.4.18" }
 | 
			
		||||
solana-banks-server = { path = "../banks-server", version = "1.4.18" }
 | 
			
		||||
 | 
			
		||||
[lib]
 | 
			
		||||
crate-type = ["lib"]
 | 
			
		||||
name = "solana_banks_client"
 | 
			
		||||
 | 
			
		||||
[package.metadata.docs.rs]
 | 
			
		||||
targets = ["x86_64-unknown-linux-gnu"]
 | 
			
		||||
@@ -1,318 +0,0 @@
 | 
			
		||||
//! A client for the ledger state, from the perspective of an arbitrary validator.
 | 
			
		||||
//!
 | 
			
		||||
//! Use start_tcp_client() to create a client and then import BanksClientExt to
 | 
			
		||||
//! access its methods. Additional "*_with_context" methods are also available,
 | 
			
		||||
//! but they are undocumented, may change over time, and are generally more
 | 
			
		||||
//! cumbersome to use.
 | 
			
		||||
 | 
			
		||||
use async_trait::async_trait;
 | 
			
		||||
use futures::future::join_all;
 | 
			
		||||
pub use solana_banks_interface::{BanksClient, TransactionStatus};
 | 
			
		||||
use solana_banks_interface::{BanksRequest, BanksResponse};
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    account::{from_account, Account},
 | 
			
		||||
    clock::Slot,
 | 
			
		||||
    commitment_config::CommitmentLevel,
 | 
			
		||||
    fee_calculator::FeeCalculator,
 | 
			
		||||
    hash::Hash,
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    rent::Rent,
 | 
			
		||||
    signature::Signature,
 | 
			
		||||
    sysvar,
 | 
			
		||||
    transaction::Transaction,
 | 
			
		||||
    transport,
 | 
			
		||||
};
 | 
			
		||||
use std::io::{self, Error, ErrorKind};
 | 
			
		||||
use tarpc::{
 | 
			
		||||
    client, context,
 | 
			
		||||
    rpc::{transport::channel::UnboundedChannel, ClientMessage, Response},
 | 
			
		||||
    serde_transport::tcp,
 | 
			
		||||
};
 | 
			
		||||
use tokio::{net::ToSocketAddrs, time::Duration};
 | 
			
		||||
use tokio_serde::formats::Bincode;
 | 
			
		||||
 | 
			
		||||
#[async_trait]
 | 
			
		||||
pub trait BanksClientExt {
 | 
			
		||||
    /// Send a transaction and return immediately. The server will resend the
 | 
			
		||||
    /// transaction until either it is accepted by the cluster or the transaction's
 | 
			
		||||
    /// blockhash expires.
 | 
			
		||||
    async fn send_transaction(&mut self, transaction: Transaction) -> io::Result<()>;
 | 
			
		||||
 | 
			
		||||
    /// Return a recent, rooted blockhash from the server. The cluster will only accept
 | 
			
		||||
    /// transactions with a blockhash that has not yet expired. Use the `get_fees`
 | 
			
		||||
    /// method to get both a blockhash and the blockhash's last valid slot.
 | 
			
		||||
    async fn get_recent_blockhash(&mut self) -> io::Result<Hash>;
 | 
			
		||||
 | 
			
		||||
    /// Return the fee parameters associated with a recent, rooted blockhash. The cluster
 | 
			
		||||
    /// will use the transaction's blockhash to look up these same fee parameters and
 | 
			
		||||
    /// use them to calculate the transaction fee.
 | 
			
		||||
    async fn get_fees(&mut self) -> io::Result<(FeeCalculator, Hash, Slot)>;
 | 
			
		||||
 | 
			
		||||
    /// Return the cluster rent
 | 
			
		||||
    async fn get_rent(&mut self) -> io::Result<Rent>;
 | 
			
		||||
 | 
			
		||||
    /// Send a transaction and return after the transaction has been rejected or
 | 
			
		||||
    /// reached the given level of commitment.
 | 
			
		||||
    async fn process_transaction_with_commitment(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        transaction: Transaction,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> transport::Result<()>;
 | 
			
		||||
 | 
			
		||||
    /// Send a transaction and return after the transaction has been finalized or rejected.
 | 
			
		||||
    async fn process_transaction(&mut self, transaction: Transaction) -> transport::Result<()>;
 | 
			
		||||
 | 
			
		||||
    /// Return the status of a transaction with a signature matching the transaction's first
 | 
			
		||||
    /// signature. Return None if the transaction is not found, which may be because the
 | 
			
		||||
    /// blockhash was expired or the fee-paying account had insufficient funds to pay the
 | 
			
		||||
    /// transaction fee. Note that servers rarely store the full transaction history. This
 | 
			
		||||
    /// method may return None if the transaction status has been discarded.
 | 
			
		||||
    async fn get_transaction_status(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        signature: Signature,
 | 
			
		||||
    ) -> io::Result<Option<TransactionStatus>>;
 | 
			
		||||
 | 
			
		||||
    /// Same as get_transaction_status, but for multiple transactions.
 | 
			
		||||
    async fn get_transaction_statuses(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        signatures: Vec<Signature>,
 | 
			
		||||
    ) -> io::Result<Vec<Option<TransactionStatus>>>;
 | 
			
		||||
 | 
			
		||||
    /// Return the most recent rooted slot height. All transactions at or below this height
 | 
			
		||||
    /// are said to be finalized. The cluster will not fork to a higher slot height.
 | 
			
		||||
    async fn get_root_slot(&mut self) -> io::Result<Slot>;
 | 
			
		||||
 | 
			
		||||
    /// Return the account at the given address at the slot corresponding to the given
 | 
			
		||||
    /// commitment level. If the account is not found, None is returned.
 | 
			
		||||
    async fn get_account_with_commitment(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        address: Pubkey,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> io::Result<Option<Account>>;
 | 
			
		||||
 | 
			
		||||
    /// Return the account at the given address at the time of the most recent root slot.
 | 
			
		||||
    /// If the account is not found, None is returned.
 | 
			
		||||
    async fn get_account(&mut self, address: Pubkey) -> io::Result<Option<Account>>;
 | 
			
		||||
 | 
			
		||||
    /// Return the balance in lamports of an account at the given address at the slot
 | 
			
		||||
    /// corresponding to the given commitment level.
 | 
			
		||||
    async fn get_balance_with_commitment(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        address: Pubkey,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> io::Result<u64>;
 | 
			
		||||
 | 
			
		||||
    /// Return the balance in lamports of an account at the given address at the time
 | 
			
		||||
    /// of the most recent root slot.
 | 
			
		||||
    async fn get_balance(&mut self, address: Pubkey) -> io::Result<u64>;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[async_trait]
 | 
			
		||||
impl BanksClientExt for BanksClient {
 | 
			
		||||
    async fn send_transaction(&mut self, transaction: Transaction) -> io::Result<()> {
 | 
			
		||||
        self.send_transaction_with_context(context::current(), transaction)
 | 
			
		||||
            .await
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_fees(&mut self) -> io::Result<(FeeCalculator, Hash, Slot)> {
 | 
			
		||||
        self.get_fees_with_commitment_and_context(context::current(), CommitmentLevel::Root)
 | 
			
		||||
            .await
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_rent(&mut self) -> io::Result<Rent> {
 | 
			
		||||
        let rent_sysvar = self
 | 
			
		||||
            .get_account(sysvar::rent::id())
 | 
			
		||||
            .await?
 | 
			
		||||
            .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Rent sysvar not present"))?;
 | 
			
		||||
 | 
			
		||||
        from_account::<Rent>(&rent_sysvar).ok_or_else(|| {
 | 
			
		||||
            io::Error::new(io::ErrorKind::Other, "Failed to deserialize Rent sysvar")
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_recent_blockhash(&mut self) -> io::Result<Hash> {
 | 
			
		||||
        Ok(self.get_fees().await?.1)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn process_transaction_with_commitment(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        transaction: Transaction,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> transport::Result<()> {
 | 
			
		||||
        let mut ctx = context::current();
 | 
			
		||||
        ctx.deadline += Duration::from_secs(50);
 | 
			
		||||
        let result = self
 | 
			
		||||
            .process_transaction_with_commitment_and_context(ctx, transaction, commitment)
 | 
			
		||||
            .await?;
 | 
			
		||||
        match result {
 | 
			
		||||
            None => Err(Error::new(ErrorKind::TimedOut, "invalid blockhash or fee-payer").into()),
 | 
			
		||||
            Some(transaction_result) => Ok(transaction_result?),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn process_transaction(&mut self, transaction: Transaction) -> transport::Result<()> {
 | 
			
		||||
        self.process_transaction_with_commitment(transaction, CommitmentLevel::default())
 | 
			
		||||
            .await
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_root_slot(&mut self) -> io::Result<Slot> {
 | 
			
		||||
        self.get_slot_with_context(context::current(), CommitmentLevel::Root)
 | 
			
		||||
            .await
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_account_with_commitment(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        address: Pubkey,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> io::Result<Option<Account>> {
 | 
			
		||||
        self.get_account_with_commitment_and_context(context::current(), address, commitment)
 | 
			
		||||
            .await
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_account(&mut self, address: Pubkey) -> io::Result<Option<Account>> {
 | 
			
		||||
        self.get_account_with_commitment(address, CommitmentLevel::default())
 | 
			
		||||
            .await
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_balance_with_commitment(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        address: Pubkey,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> io::Result<u64> {
 | 
			
		||||
        let account = self
 | 
			
		||||
            .get_account_with_commitment_and_context(context::current(), address, commitment)
 | 
			
		||||
            .await?;
 | 
			
		||||
        Ok(account.map(|x| x.lamports).unwrap_or(0))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_balance(&mut self, address: Pubkey) -> io::Result<u64> {
 | 
			
		||||
        self.get_balance_with_commitment(address, CommitmentLevel::default())
 | 
			
		||||
            .await
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_transaction_status(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        signature: Signature,
 | 
			
		||||
    ) -> io::Result<Option<TransactionStatus>> {
 | 
			
		||||
        self.get_transaction_status_with_context(context::current(), signature)
 | 
			
		||||
            .await
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_transaction_statuses(
 | 
			
		||||
        &mut self,
 | 
			
		||||
        signatures: Vec<Signature>,
 | 
			
		||||
    ) -> io::Result<Vec<Option<TransactionStatus>>> {
 | 
			
		||||
        // tarpc futures oddly hold a mutable reference back to the client so clone the client upfront
 | 
			
		||||
        let mut clients_and_signatures: Vec<_> = signatures
 | 
			
		||||
            .into_iter()
 | 
			
		||||
            .map(|signature| (self.clone(), signature))
 | 
			
		||||
            .collect();
 | 
			
		||||
 | 
			
		||||
        let futs = clients_and_signatures
 | 
			
		||||
            .iter_mut()
 | 
			
		||||
            .map(|(client, signature)| client.get_transaction_status(*signature));
 | 
			
		||||
 | 
			
		||||
        let statuses = join_all(futs).await;
 | 
			
		||||
 | 
			
		||||
        // Convert Vec<Result<_, _>> to Result<Vec<_>>
 | 
			
		||||
        statuses.into_iter().collect()
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub async fn start_client(
 | 
			
		||||
    transport: UnboundedChannel<Response<BanksResponse>, ClientMessage<BanksRequest>>,
 | 
			
		||||
) -> io::Result<BanksClient> {
 | 
			
		||||
    BanksClient::new(client::Config::default(), transport).spawn()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub async fn start_tcp_client<T: ToSocketAddrs>(addr: T) -> io::Result<BanksClient> {
 | 
			
		||||
    let transport = tcp::connect(addr, Bincode::default).await?;
 | 
			
		||||
    BanksClient::new(client::Config::default(), transport).spawn()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod tests {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use solana_banks_server::banks_server::start_local_server;
 | 
			
		||||
    use solana_runtime::{bank::Bank, bank_forks::BankForks, genesis_utils::create_genesis_config};
 | 
			
		||||
    use solana_sdk::{message::Message, signature::Signer, system_instruction};
 | 
			
		||||
    use std::sync::{Arc, RwLock};
 | 
			
		||||
    use tarpc::transport;
 | 
			
		||||
    use tokio::{runtime::Runtime, time::sleep};
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_banks_client_new() {
 | 
			
		||||
        let (client_transport, _server_transport) = transport::channel::unbounded();
 | 
			
		||||
        BanksClient::new(client::Config::default(), client_transport);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_banks_server_transfer_via_server() -> io::Result<()> {
 | 
			
		||||
        // This test shows the preferred way to interact with BanksServer.
 | 
			
		||||
        // It creates a runtime explicitly (no globals via tokio macros) and calls
 | 
			
		||||
        // `runtime.block_on()` just once, to run all the async code.
 | 
			
		||||
 | 
			
		||||
        let genesis = create_genesis_config(10);
 | 
			
		||||
        let bank_forks = Arc::new(RwLock::new(BankForks::new(Bank::new(
 | 
			
		||||
            &genesis.genesis_config,
 | 
			
		||||
        ))));
 | 
			
		||||
 | 
			
		||||
        let bob_pubkey = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let mint_pubkey = genesis.mint_keypair.pubkey();
 | 
			
		||||
        let instruction = system_instruction::transfer(&mint_pubkey, &bob_pubkey, 1);
 | 
			
		||||
        let message = Message::new(&[instruction], Some(&mint_pubkey));
 | 
			
		||||
 | 
			
		||||
        Runtime::new()?.block_on(async {
 | 
			
		||||
            let client_transport = start_local_server(&bank_forks).await;
 | 
			
		||||
            let mut banks_client =
 | 
			
		||||
                BanksClient::new(client::Config::default(), client_transport).spawn()?;
 | 
			
		||||
 | 
			
		||||
            let recent_blockhash = banks_client.get_recent_blockhash().await?;
 | 
			
		||||
            let transaction = Transaction::new(&[&genesis.mint_keypair], message, recent_blockhash);
 | 
			
		||||
            banks_client.process_transaction(transaction).await.unwrap();
 | 
			
		||||
            assert_eq!(banks_client.get_balance(bob_pubkey).await?, 1);
 | 
			
		||||
            Ok(())
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_banks_server_transfer_via_client() -> io::Result<()> {
 | 
			
		||||
        // The caller may not want to hold the connection open until the transaction
 | 
			
		||||
        // is processed (or blockhash expires). In this test, we verify the
 | 
			
		||||
        // server-side functionality is available to the client.
 | 
			
		||||
 | 
			
		||||
        let genesis = create_genesis_config(10);
 | 
			
		||||
        let bank_forks = Arc::new(RwLock::new(BankForks::new(Bank::new(
 | 
			
		||||
            &genesis.genesis_config,
 | 
			
		||||
        ))));
 | 
			
		||||
 | 
			
		||||
        let mint_pubkey = &genesis.mint_keypair.pubkey();
 | 
			
		||||
        let bob_pubkey = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let instruction = system_instruction::transfer(&mint_pubkey, &bob_pubkey, 1);
 | 
			
		||||
        let message = Message::new(&[instruction], Some(&mint_pubkey));
 | 
			
		||||
 | 
			
		||||
        Runtime::new()?.block_on(async {
 | 
			
		||||
            let client_transport = start_local_server(&bank_forks).await;
 | 
			
		||||
            let mut banks_client =
 | 
			
		||||
                BanksClient::new(client::Config::default(), client_transport).spawn()?;
 | 
			
		||||
            let (_, recent_blockhash, last_valid_slot) = banks_client.get_fees().await?;
 | 
			
		||||
            let transaction = Transaction::new(&[&genesis.mint_keypair], message, recent_blockhash);
 | 
			
		||||
            let signature = transaction.signatures[0];
 | 
			
		||||
            banks_client.send_transaction(transaction).await?;
 | 
			
		||||
 | 
			
		||||
            let mut status = banks_client.get_transaction_status(signature).await?;
 | 
			
		||||
 | 
			
		||||
            while status.is_none() {
 | 
			
		||||
                let root_slot = banks_client.get_root_slot().await?;
 | 
			
		||||
                if root_slot > last_valid_slot {
 | 
			
		||||
                    break;
 | 
			
		||||
                }
 | 
			
		||||
                sleep(Duration::from_millis(100)).await;
 | 
			
		||||
                status = banks_client.get_transaction_status(signature).await?;
 | 
			
		||||
            }
 | 
			
		||||
            assert!(status.unwrap().err.is_none());
 | 
			
		||||
            assert_eq!(banks_client.get_balance(bob_pubkey).await?, 1);
 | 
			
		||||
            Ok(())
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,22 +0,0 @@
 | 
			
		||||
[package]
 | 
			
		||||
name = "solana-banks-interface"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
description = "Solana banks RPC interface"
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
edition = "2018"
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
mio = "0.7.6"
 | 
			
		||||
serde = { version = "1.0.112", features = ["derive"] }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.4.18" }
 | 
			
		||||
tarpc = { version = "0.23.0", features = ["full"] }
 | 
			
		||||
 | 
			
		||||
[lib]
 | 
			
		||||
crate-type = ["lib"]
 | 
			
		||||
name = "solana_banks_interface"
 | 
			
		||||
 | 
			
		||||
[package.metadata.docs.rs]
 | 
			
		||||
targets = ["x86_64-unknown-linux-gnu"]
 | 
			
		||||
@@ -1,49 +0,0 @@
 | 
			
		||||
use serde::{Deserialize, Serialize};
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    account::Account,
 | 
			
		||||
    clock::Slot,
 | 
			
		||||
    commitment_config::CommitmentLevel,
 | 
			
		||||
    fee_calculator::FeeCalculator,
 | 
			
		||||
    hash::Hash,
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    signature::Signature,
 | 
			
		||||
    transaction::{self, Transaction, TransactionError},
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
 | 
			
		||||
pub struct TransactionStatus {
 | 
			
		||||
    pub slot: Slot,
 | 
			
		||||
    pub confirmations: Option<usize>, // None = rooted
 | 
			
		||||
    pub err: Option<TransactionError>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[tarpc::service]
 | 
			
		||||
pub trait Banks {
 | 
			
		||||
    async fn send_transaction_with_context(transaction: Transaction);
 | 
			
		||||
    async fn get_fees_with_commitment_and_context(
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> (FeeCalculator, Hash, Slot);
 | 
			
		||||
    async fn get_transaction_status_with_context(signature: Signature)
 | 
			
		||||
        -> Option<TransactionStatus>;
 | 
			
		||||
    async fn get_slot_with_context(commitment: CommitmentLevel) -> Slot;
 | 
			
		||||
    async fn process_transaction_with_commitment_and_context(
 | 
			
		||||
        transaction: Transaction,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> Option<transaction::Result<()>>;
 | 
			
		||||
    async fn get_account_with_commitment_and_context(
 | 
			
		||||
        address: Pubkey,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> Option<Account>;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod tests {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use tarpc::{client, transport};
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_banks_client_new() {
 | 
			
		||||
        let (client_transport, _server_transport) = transport::channel::unbounded();
 | 
			
		||||
        BanksClient::new(client::Config::default(), client_transport);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,29 +0,0 @@
 | 
			
		||||
[package]
 | 
			
		||||
name = "solana-banks-server"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
description = "Solana banks server"
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
edition = "2018"
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
bincode = "1.3.1"
 | 
			
		||||
futures = "0.3"
 | 
			
		||||
log = "0.4.8"
 | 
			
		||||
mio = "0.7.6"
 | 
			
		||||
solana-banks-interface = { path = "../banks-interface", version = "1.4.18" }
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.4.18" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.4.18" }
 | 
			
		||||
solana-metrics = { path = "../metrics", version = "1.4.18" }
 | 
			
		||||
tarpc = { version = "0.23.0", features = ["full"] }
 | 
			
		||||
tokio = { version = "0.3", features = ["full"] }
 | 
			
		||||
tokio-serde = { version = "0.6", features = ["bincode"] }
 | 
			
		||||
 | 
			
		||||
[lib]
 | 
			
		||||
crate-type = ["lib"]
 | 
			
		||||
name = "solana_banks_server"
 | 
			
		||||
 | 
			
		||||
[package.metadata.docs.rs]
 | 
			
		||||
targets = ["x86_64-unknown-linux-gnu"]
 | 
			
		||||
@@ -1,275 +0,0 @@
 | 
			
		||||
use crate::send_transaction_service::{SendTransactionService, TransactionInfo};
 | 
			
		||||
use bincode::{deserialize, serialize};
 | 
			
		||||
use futures::{
 | 
			
		||||
    future,
 | 
			
		||||
    prelude::stream::{self, StreamExt},
 | 
			
		||||
};
 | 
			
		||||
use solana_banks_interface::{Banks, BanksRequest, BanksResponse, TransactionStatus};
 | 
			
		||||
use solana_runtime::{bank::Bank, bank_forks::BankForks, commitment::BlockCommitmentCache};
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    account::Account,
 | 
			
		||||
    clock::Slot,
 | 
			
		||||
    commitment_config::CommitmentLevel,
 | 
			
		||||
    fee_calculator::FeeCalculator,
 | 
			
		||||
    hash::Hash,
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    signature::Signature,
 | 
			
		||||
    transaction::{self, Transaction},
 | 
			
		||||
};
 | 
			
		||||
use std::{
 | 
			
		||||
    io,
 | 
			
		||||
    net::{Ipv4Addr, SocketAddr},
 | 
			
		||||
    sync::{
 | 
			
		||||
        mpsc::{channel, Receiver, Sender},
 | 
			
		||||
        Arc, RwLock,
 | 
			
		||||
    },
 | 
			
		||||
    thread::Builder,
 | 
			
		||||
    time::Duration,
 | 
			
		||||
};
 | 
			
		||||
use tarpc::{
 | 
			
		||||
    context::Context,
 | 
			
		||||
    rpc::{transport::channel::UnboundedChannel, ClientMessage, Response},
 | 
			
		||||
    serde_transport::tcp,
 | 
			
		||||
    server::{self, Channel, Handler},
 | 
			
		||||
    transport,
 | 
			
		||||
};
 | 
			
		||||
use tokio::time::sleep;
 | 
			
		||||
use tokio_serde::formats::Bincode;
 | 
			
		||||
 | 
			
		||||
#[derive(Clone)]
 | 
			
		||||
struct BanksServer {
 | 
			
		||||
    bank_forks: Arc<RwLock<BankForks>>,
 | 
			
		||||
    block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>,
 | 
			
		||||
    transaction_sender: Sender<TransactionInfo>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl BanksServer {
 | 
			
		||||
    /// Return a BanksServer that forwards transactions to the
 | 
			
		||||
    /// given sender. If unit-testing, those transactions can go to
 | 
			
		||||
    /// a bank in the given BankForks. Otherwise, the receiver should
 | 
			
		||||
    /// forward them to a validator in the leader schedule.
 | 
			
		||||
    fn new(
 | 
			
		||||
        bank_forks: Arc<RwLock<BankForks>>,
 | 
			
		||||
        block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>,
 | 
			
		||||
        transaction_sender: Sender<TransactionInfo>,
 | 
			
		||||
    ) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            bank_forks,
 | 
			
		||||
            block_commitment_cache,
 | 
			
		||||
            transaction_sender,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn run(bank: &Bank, transaction_receiver: Receiver<TransactionInfo>) {
 | 
			
		||||
        while let Ok(info) = transaction_receiver.recv() {
 | 
			
		||||
            let mut transaction_infos = vec![info];
 | 
			
		||||
            while let Ok(info) = transaction_receiver.try_recv() {
 | 
			
		||||
                transaction_infos.push(info);
 | 
			
		||||
            }
 | 
			
		||||
            let transactions: Vec<_> = transaction_infos
 | 
			
		||||
                .into_iter()
 | 
			
		||||
                .map(|info| deserialize(&info.wire_transaction).unwrap())
 | 
			
		||||
                .collect();
 | 
			
		||||
            let _ = bank.process_transactions(&transactions);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /// Useful for unit-testing
 | 
			
		||||
    fn new_loopback(bank_forks: Arc<RwLock<BankForks>>) -> Self {
 | 
			
		||||
        let (transaction_sender, transaction_receiver) = channel();
 | 
			
		||||
        let bank = bank_forks.read().unwrap().working_bank();
 | 
			
		||||
        let slot = bank.slot();
 | 
			
		||||
        let block_commitment_cache = Arc::new(RwLock::new(
 | 
			
		||||
            BlockCommitmentCache::new_for_tests_with_slots(slot, slot),
 | 
			
		||||
        ));
 | 
			
		||||
        Builder::new()
 | 
			
		||||
            .name("solana-bank-forks-client".to_string())
 | 
			
		||||
            .spawn(move || Self::run(&bank, transaction_receiver))
 | 
			
		||||
            .unwrap();
 | 
			
		||||
        Self::new(bank_forks, block_commitment_cache, transaction_sender)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn slot(&self, commitment: CommitmentLevel) -> Slot {
 | 
			
		||||
        self.block_commitment_cache
 | 
			
		||||
            .read()
 | 
			
		||||
            .unwrap()
 | 
			
		||||
            .slot_with_commitment(commitment)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn bank(&self, commitment: CommitmentLevel) -> Arc<Bank> {
 | 
			
		||||
        self.bank_forks.read().unwrap()[self.slot(commitment)].clone()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn poll_signature_status(
 | 
			
		||||
        self,
 | 
			
		||||
        signature: &Signature,
 | 
			
		||||
        blockhash: &Hash,
 | 
			
		||||
        last_valid_slot: Slot,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> Option<transaction::Result<()>> {
 | 
			
		||||
        let mut status = self
 | 
			
		||||
            .bank(commitment)
 | 
			
		||||
            .get_signature_status_with_blockhash(signature, blockhash);
 | 
			
		||||
        while status.is_none() {
 | 
			
		||||
            sleep(Duration::from_millis(200)).await;
 | 
			
		||||
            let bank = self.bank(commitment);
 | 
			
		||||
            if bank.slot() > last_valid_slot {
 | 
			
		||||
                break;
 | 
			
		||||
            }
 | 
			
		||||
            status = bank.get_signature_status_with_blockhash(signature, blockhash);
 | 
			
		||||
        }
 | 
			
		||||
        status
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn verify_transaction(transaction: &Transaction) -> transaction::Result<()> {
 | 
			
		||||
    if let Err(err) = transaction.verify() {
 | 
			
		||||
        Err(err)
 | 
			
		||||
    } else if let Err(err) = transaction.verify_precompiles() {
 | 
			
		||||
        Err(err)
 | 
			
		||||
    } else {
 | 
			
		||||
        Ok(())
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[tarpc::server]
 | 
			
		||||
impl Banks for BanksServer {
 | 
			
		||||
    async fn send_transaction_with_context(self, _: Context, transaction: Transaction) {
 | 
			
		||||
        let blockhash = &transaction.message.recent_blockhash;
 | 
			
		||||
        let last_valid_slot = self
 | 
			
		||||
            .bank_forks
 | 
			
		||||
            .read()
 | 
			
		||||
            .unwrap()
 | 
			
		||||
            .root_bank()
 | 
			
		||||
            .get_blockhash_last_valid_slot(&blockhash)
 | 
			
		||||
            .unwrap();
 | 
			
		||||
        let signature = transaction.signatures.get(0).cloned().unwrap_or_default();
 | 
			
		||||
        let info =
 | 
			
		||||
            TransactionInfo::new(signature, serialize(&transaction).unwrap(), last_valid_slot);
 | 
			
		||||
        self.transaction_sender.send(info).unwrap();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_fees_with_commitment_and_context(
 | 
			
		||||
        self,
 | 
			
		||||
        _: Context,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> (FeeCalculator, Hash, Slot) {
 | 
			
		||||
        let bank = self.bank(commitment);
 | 
			
		||||
        let (blockhash, fee_calculator) = bank.last_blockhash_with_fee_calculator();
 | 
			
		||||
        let last_valid_slot = bank.get_blockhash_last_valid_slot(&blockhash).unwrap();
 | 
			
		||||
        (fee_calculator, blockhash, last_valid_slot)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_transaction_status_with_context(
 | 
			
		||||
        self,
 | 
			
		||||
        _: Context,
 | 
			
		||||
        signature: Signature,
 | 
			
		||||
    ) -> Option<TransactionStatus> {
 | 
			
		||||
        let bank = self.bank(CommitmentLevel::Recent);
 | 
			
		||||
        let (slot, status) = bank.get_signature_status_slot(&signature)?;
 | 
			
		||||
        let r_block_commitment_cache = self.block_commitment_cache.read().unwrap();
 | 
			
		||||
 | 
			
		||||
        let confirmations = if r_block_commitment_cache.root() >= slot {
 | 
			
		||||
            None
 | 
			
		||||
        } else {
 | 
			
		||||
            r_block_commitment_cache
 | 
			
		||||
                .get_confirmation_count(slot)
 | 
			
		||||
                .or(Some(0))
 | 
			
		||||
        };
 | 
			
		||||
        Some(TransactionStatus {
 | 
			
		||||
            slot,
 | 
			
		||||
            confirmations,
 | 
			
		||||
            err: status.err(),
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_slot_with_context(self, _: Context, commitment: CommitmentLevel) -> Slot {
 | 
			
		||||
        self.slot(commitment)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn process_transaction_with_commitment_and_context(
 | 
			
		||||
        self,
 | 
			
		||||
        _: Context,
 | 
			
		||||
        transaction: Transaction,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> Option<transaction::Result<()>> {
 | 
			
		||||
        if let Err(err) = verify_transaction(&transaction) {
 | 
			
		||||
            return Some(Err(err));
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        let blockhash = &transaction.message.recent_blockhash;
 | 
			
		||||
        let last_valid_slot = self
 | 
			
		||||
            .bank_forks
 | 
			
		||||
            .read()
 | 
			
		||||
            .unwrap()
 | 
			
		||||
            .root_bank()
 | 
			
		||||
            .get_blockhash_last_valid_slot(blockhash)
 | 
			
		||||
            .unwrap();
 | 
			
		||||
        let signature = transaction.signatures.get(0).cloned().unwrap_or_default();
 | 
			
		||||
        let info =
 | 
			
		||||
            TransactionInfo::new(signature, serialize(&transaction).unwrap(), last_valid_slot);
 | 
			
		||||
        self.transaction_sender.send(info).unwrap();
 | 
			
		||||
        self.poll_signature_status(&signature, blockhash, last_valid_slot, commitment)
 | 
			
		||||
            .await
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async fn get_account_with_commitment_and_context(
 | 
			
		||||
        self,
 | 
			
		||||
        _: Context,
 | 
			
		||||
        address: Pubkey,
 | 
			
		||||
        commitment: CommitmentLevel,
 | 
			
		||||
    ) -> Option<Account> {
 | 
			
		||||
        let bank = self.bank(commitment);
 | 
			
		||||
        bank.get_account(&address)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub async fn start_local_server(
 | 
			
		||||
    bank_forks: &Arc<RwLock<BankForks>>,
 | 
			
		||||
) -> UnboundedChannel<Response<BanksResponse>, ClientMessage<BanksRequest>> {
 | 
			
		||||
    let banks_server = BanksServer::new_loopback(bank_forks.clone());
 | 
			
		||||
    let (client_transport, server_transport) = transport::channel::unbounded();
 | 
			
		||||
    let server = server::new(server::Config::default())
 | 
			
		||||
        .incoming(stream::once(future::ready(server_transport)))
 | 
			
		||||
        .respond_with(banks_server.serve());
 | 
			
		||||
    tokio::spawn(server);
 | 
			
		||||
    client_transport
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub async fn start_tcp_server(
 | 
			
		||||
    listen_addr: SocketAddr,
 | 
			
		||||
    tpu_addr: SocketAddr,
 | 
			
		||||
    bank_forks: Arc<RwLock<BankForks>>,
 | 
			
		||||
    block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>,
 | 
			
		||||
) -> io::Result<()> {
 | 
			
		||||
    // Note: These settings are copied straight from the tarpc example.
 | 
			
		||||
    let server = tcp::listen(listen_addr, Bincode::default)
 | 
			
		||||
        .await?
 | 
			
		||||
        // Ignore accept errors.
 | 
			
		||||
        .filter_map(|r| future::ready(r.ok()))
 | 
			
		||||
        .map(server::BaseChannel::with_defaults)
 | 
			
		||||
        // Limit channels to 1 per IP.
 | 
			
		||||
        .max_channels_per_key(1, |t| {
 | 
			
		||||
            t.as_ref()
 | 
			
		||||
                .peer_addr()
 | 
			
		||||
                .map(|x| x.ip())
 | 
			
		||||
                .unwrap_or_else(|_| Ipv4Addr::new(0, 0, 0, 0).into())
 | 
			
		||||
        })
 | 
			
		||||
        // serve is generated by the service attribute. It takes as input any type implementing
 | 
			
		||||
        // the generated Banks trait.
 | 
			
		||||
        .map(move |chan| {
 | 
			
		||||
            let (sender, receiver) = channel();
 | 
			
		||||
 | 
			
		||||
            SendTransactionService::new(tpu_addr, &bank_forks, receiver);
 | 
			
		||||
 | 
			
		||||
            let server =
 | 
			
		||||
                BanksServer::new(bank_forks.clone(), block_commitment_cache.clone(), sender);
 | 
			
		||||
            chan.respond_with(server.serve()).execute()
 | 
			
		||||
        })
 | 
			
		||||
        // Max 10 channels.
 | 
			
		||||
        .buffer_unordered(10)
 | 
			
		||||
        .for_each(|_| async {});
 | 
			
		||||
 | 
			
		||||
    server.await;
 | 
			
		||||
    Ok(())
 | 
			
		||||
}
 | 
			
		||||
@@ -1,6 +0,0 @@
 | 
			
		||||
pub mod banks_server;
 | 
			
		||||
pub mod rpc_banks_service;
 | 
			
		||||
pub mod send_transaction_service;
 | 
			
		||||
 | 
			
		||||
#[macro_use]
 | 
			
		||||
extern crate solana_metrics;
 | 
			
		||||
@@ -1,116 +0,0 @@
 | 
			
		||||
//! The `rpc_banks_service` module implements the Solana Banks RPC API.
 | 
			
		||||
 | 
			
		||||
use crate::banks_server::start_tcp_server;
 | 
			
		||||
use futures::{future::FutureExt, pin_mut, prelude::stream::StreamExt, select};
 | 
			
		||||
use solana_runtime::{bank_forks::BankForks, commitment::BlockCommitmentCache};
 | 
			
		||||
use std::{
 | 
			
		||||
    net::SocketAddr,
 | 
			
		||||
    sync::{
 | 
			
		||||
        atomic::{AtomicBool, Ordering},
 | 
			
		||||
        Arc, RwLock,
 | 
			
		||||
    },
 | 
			
		||||
    thread::{self, Builder, JoinHandle},
 | 
			
		||||
};
 | 
			
		||||
use tokio::{
 | 
			
		||||
    runtime::Runtime,
 | 
			
		||||
    time::{self, Duration},
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
pub struct RpcBanksService {
 | 
			
		||||
    thread_hdl: JoinHandle<()>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// Run the TCP service until `exit` is set to true
 | 
			
		||||
async fn start_abortable_tcp_server(
 | 
			
		||||
    listen_addr: SocketAddr,
 | 
			
		||||
    tpu_addr: SocketAddr,
 | 
			
		||||
    bank_forks: Arc<RwLock<BankForks>>,
 | 
			
		||||
    block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>,
 | 
			
		||||
    exit: Arc<AtomicBool>,
 | 
			
		||||
) {
 | 
			
		||||
    let server = start_tcp_server(
 | 
			
		||||
        listen_addr,
 | 
			
		||||
        tpu_addr,
 | 
			
		||||
        bank_forks.clone(),
 | 
			
		||||
        block_commitment_cache.clone(),
 | 
			
		||||
    )
 | 
			
		||||
    .fuse();
 | 
			
		||||
    let interval = time::interval(Duration::from_millis(100)).fuse();
 | 
			
		||||
    pin_mut!(server, interval);
 | 
			
		||||
    loop {
 | 
			
		||||
        select! {
 | 
			
		||||
            _ = server => {},
 | 
			
		||||
            _ = interval.select_next_some() => {
 | 
			
		||||
                if exit.load(Ordering::Relaxed) {
 | 
			
		||||
                    break;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl RpcBanksService {
 | 
			
		||||
    fn run(
 | 
			
		||||
        listen_addr: SocketAddr,
 | 
			
		||||
        tpu_addr: SocketAddr,
 | 
			
		||||
        bank_forks: Arc<RwLock<BankForks>>,
 | 
			
		||||
        block_commitment_cache: Arc<RwLock<BlockCommitmentCache>>,
 | 
			
		||||
        exit: Arc<AtomicBool>,
 | 
			
		||||
    ) {
 | 
			
		||||
        let server = start_abortable_tcp_server(
 | 
			
		||||
            listen_addr,
 | 
			
		||||
            tpu_addr,
 | 
			
		||||
            bank_forks,
 | 
			
		||||
            block_commitment_cache,
 | 
			
		||||
            exit,
 | 
			
		||||
        );
 | 
			
		||||
        Runtime::new().unwrap().block_on(server);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn new(
 | 
			
		||||
        listen_addr: SocketAddr,
 | 
			
		||||
        tpu_addr: SocketAddr,
 | 
			
		||||
        bank_forks: &Arc<RwLock<BankForks>>,
 | 
			
		||||
        block_commitment_cache: &Arc<RwLock<BlockCommitmentCache>>,
 | 
			
		||||
        exit: &Arc<AtomicBool>,
 | 
			
		||||
    ) -> Self {
 | 
			
		||||
        let bank_forks = bank_forks.clone();
 | 
			
		||||
        let block_commitment_cache = block_commitment_cache.clone();
 | 
			
		||||
        let exit = exit.clone();
 | 
			
		||||
        let thread_hdl = Builder::new()
 | 
			
		||||
            .name("solana-rpc-banks".to_string())
 | 
			
		||||
            .spawn(move || {
 | 
			
		||||
                Self::run(
 | 
			
		||||
                    listen_addr,
 | 
			
		||||
                    tpu_addr,
 | 
			
		||||
                    bank_forks,
 | 
			
		||||
                    block_commitment_cache,
 | 
			
		||||
                    exit,
 | 
			
		||||
                )
 | 
			
		||||
            })
 | 
			
		||||
            .unwrap();
 | 
			
		||||
 | 
			
		||||
        Self { thread_hdl }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn join(self) -> thread::Result<()> {
 | 
			
		||||
        self.thread_hdl.join()
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod tests {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use solana_runtime::bank::Bank;
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_rpc_banks_server_exit() {
 | 
			
		||||
        let bank_forks = Arc::new(RwLock::new(BankForks::new(Bank::default())));
 | 
			
		||||
        let block_commitment_cache = Arc::new(RwLock::new(BlockCommitmentCache::default()));
 | 
			
		||||
        let exit = Arc::new(AtomicBool::new(false));
 | 
			
		||||
        let addr = "127.0.0.1:0".parse().unwrap();
 | 
			
		||||
        let service = RpcBanksService::new(addr, addr, &bank_forks, &block_commitment_cache, &exit);
 | 
			
		||||
        exit.store(true, Ordering::Relaxed);
 | 
			
		||||
        service.join().unwrap();
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,343 +0,0 @@
 | 
			
		||||
// TODO: Merge this implementation with the one at `core/src/send_transaction_service.rs`
 | 
			
		||||
use log::*;
 | 
			
		||||
use solana_metrics::{datapoint_warn, inc_new_counter_info};
 | 
			
		||||
use solana_runtime::{bank::Bank, bank_forks::BankForks};
 | 
			
		||||
use solana_sdk::{clock::Slot, signature::Signature};
 | 
			
		||||
use std::{
 | 
			
		||||
    collections::HashMap,
 | 
			
		||||
    net::{SocketAddr, UdpSocket},
 | 
			
		||||
    sync::{
 | 
			
		||||
        mpsc::{Receiver, RecvTimeoutError},
 | 
			
		||||
        Arc, RwLock,
 | 
			
		||||
    },
 | 
			
		||||
    thread::{self, Builder, JoinHandle},
 | 
			
		||||
    time::{Duration, Instant},
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
/// Maximum size of the transaction queue
 | 
			
		||||
const MAX_TRANSACTION_QUEUE_SIZE: usize = 10_000; // This seems like a lot but maybe it needs to be bigger one day
 | 
			
		||||
 | 
			
		||||
pub struct SendTransactionService {
 | 
			
		||||
    thread: JoinHandle<()>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub struct TransactionInfo {
 | 
			
		||||
    pub signature: Signature,
 | 
			
		||||
    pub wire_transaction: Vec<u8>,
 | 
			
		||||
    pub last_valid_slot: Slot,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl TransactionInfo {
 | 
			
		||||
    pub fn new(signature: Signature, wire_transaction: Vec<u8>, last_valid_slot: Slot) -> Self {
 | 
			
		||||
        Self {
 | 
			
		||||
            signature,
 | 
			
		||||
            wire_transaction,
 | 
			
		||||
            last_valid_slot,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[derive(Default, Debug, PartialEq)]
 | 
			
		||||
struct ProcessTransactionsResult {
 | 
			
		||||
    rooted: u64,
 | 
			
		||||
    expired: u64,
 | 
			
		||||
    retried: u64,
 | 
			
		||||
    failed: u64,
 | 
			
		||||
    retained: u64,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl SendTransactionService {
 | 
			
		||||
    pub fn new(
 | 
			
		||||
        tpu_address: SocketAddr,
 | 
			
		||||
        bank_forks: &Arc<RwLock<BankForks>>,
 | 
			
		||||
        receiver: Receiver<TransactionInfo>,
 | 
			
		||||
    ) -> Self {
 | 
			
		||||
        let thread = Self::retry_thread(receiver, bank_forks.clone(), tpu_address);
 | 
			
		||||
        Self { thread }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn retry_thread(
 | 
			
		||||
        receiver: Receiver<TransactionInfo>,
 | 
			
		||||
        bank_forks: Arc<RwLock<BankForks>>,
 | 
			
		||||
        tpu_address: SocketAddr,
 | 
			
		||||
    ) -> JoinHandle<()> {
 | 
			
		||||
        let mut last_status_check = Instant::now();
 | 
			
		||||
        let mut transactions = HashMap::new();
 | 
			
		||||
        let send_socket = UdpSocket::bind("0.0.0.0:0").unwrap();
 | 
			
		||||
 | 
			
		||||
        Builder::new()
 | 
			
		||||
            .name("send-tx-svc".to_string())
 | 
			
		||||
            .spawn(move || loop {
 | 
			
		||||
                match receiver.recv_timeout(Duration::from_secs(1)) {
 | 
			
		||||
                    Err(RecvTimeoutError::Disconnected) => break,
 | 
			
		||||
                    Err(RecvTimeoutError::Timeout) => {}
 | 
			
		||||
                    Ok(transaction_info) => {
 | 
			
		||||
                        Self::send_transaction(
 | 
			
		||||
                            &send_socket,
 | 
			
		||||
                            &tpu_address,
 | 
			
		||||
                            &transaction_info.wire_transaction,
 | 
			
		||||
                        );
 | 
			
		||||
                        if transactions.len() < MAX_TRANSACTION_QUEUE_SIZE {
 | 
			
		||||
                            transactions.insert(transaction_info.signature, transaction_info);
 | 
			
		||||
                        } else {
 | 
			
		||||
                            datapoint_warn!("send_transaction_service-queue-overflow");
 | 
			
		||||
                        }
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                if Instant::now().duration_since(last_status_check).as_secs() >= 5 {
 | 
			
		||||
                    if !transactions.is_empty() {
 | 
			
		||||
                        datapoint_info!(
 | 
			
		||||
                            "send_transaction_service-queue-size",
 | 
			
		||||
                            ("len", transactions.len(), i64)
 | 
			
		||||
                        );
 | 
			
		||||
                        let bank_forks = bank_forks.read().unwrap();
 | 
			
		||||
                        let root_bank = bank_forks.root_bank();
 | 
			
		||||
                        let working_bank = bank_forks.working_bank();
 | 
			
		||||
 | 
			
		||||
                        let _result = Self::process_transactions(
 | 
			
		||||
                            &working_bank,
 | 
			
		||||
                            &root_bank,
 | 
			
		||||
                            &send_socket,
 | 
			
		||||
                            &tpu_address,
 | 
			
		||||
                            &mut transactions,
 | 
			
		||||
                        );
 | 
			
		||||
                    }
 | 
			
		||||
                    last_status_check = Instant::now();
 | 
			
		||||
                }
 | 
			
		||||
            })
 | 
			
		||||
            .unwrap()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn process_transactions(
 | 
			
		||||
        working_bank: &Arc<Bank>,
 | 
			
		||||
        root_bank: &Arc<Bank>,
 | 
			
		||||
        send_socket: &UdpSocket,
 | 
			
		||||
        tpu_address: &SocketAddr,
 | 
			
		||||
        transactions: &mut HashMap<Signature, TransactionInfo>,
 | 
			
		||||
    ) -> ProcessTransactionsResult {
 | 
			
		||||
        let mut result = ProcessTransactionsResult::default();
 | 
			
		||||
 | 
			
		||||
        transactions.retain(|signature, transaction_info| {
 | 
			
		||||
            if root_bank.has_signature(signature) {
 | 
			
		||||
                info!("Transaction is rooted: {}", signature);
 | 
			
		||||
                result.rooted += 1;
 | 
			
		||||
                inc_new_counter_info!("send_transaction_service-rooted", 1);
 | 
			
		||||
                false
 | 
			
		||||
            } else if transaction_info.last_valid_slot < root_bank.slot() {
 | 
			
		||||
                info!("Dropping expired transaction: {}", signature);
 | 
			
		||||
                result.expired += 1;
 | 
			
		||||
                inc_new_counter_info!("send_transaction_service-expired", 1);
 | 
			
		||||
                false
 | 
			
		||||
            } else {
 | 
			
		||||
                match working_bank.get_signature_status_slot(signature) {
 | 
			
		||||
                    None => {
 | 
			
		||||
                        // Transaction is unknown to the working bank, it might have been
 | 
			
		||||
                        // dropped or landed in another fork.  Re-send it
 | 
			
		||||
                        info!("Retrying transaction: {}", signature);
 | 
			
		||||
                        result.retried += 1;
 | 
			
		||||
                        inc_new_counter_info!("send_transaction_service-retry", 1);
 | 
			
		||||
                        Self::send_transaction(
 | 
			
		||||
                            &send_socket,
 | 
			
		||||
                            &tpu_address,
 | 
			
		||||
                            &transaction_info.wire_transaction,
 | 
			
		||||
                        );
 | 
			
		||||
                        true
 | 
			
		||||
                    }
 | 
			
		||||
                    Some((_slot, status)) => {
 | 
			
		||||
                        if status.is_err() {
 | 
			
		||||
                            info!("Dropping failed transaction: {}", signature);
 | 
			
		||||
                            result.failed += 1;
 | 
			
		||||
                            inc_new_counter_info!("send_transaction_service-failed", 1);
 | 
			
		||||
                            false
 | 
			
		||||
                        } else {
 | 
			
		||||
                            result.retained += 1;
 | 
			
		||||
                            true
 | 
			
		||||
                        }
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        result
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fn send_transaction(
 | 
			
		||||
        send_socket: &UdpSocket,
 | 
			
		||||
        tpu_address: &SocketAddr,
 | 
			
		||||
        wire_transaction: &[u8],
 | 
			
		||||
    ) {
 | 
			
		||||
        if let Err(err) = send_socket.send_to(wire_transaction, tpu_address) {
 | 
			
		||||
            warn!("Failed to send transaction to {}: {:?}", tpu_address, err);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn join(self) -> thread::Result<()> {
 | 
			
		||||
        self.thread.join()
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
mod test {
 | 
			
		||||
    use super::*;
 | 
			
		||||
    use solana_sdk::{
 | 
			
		||||
        genesis_config::create_genesis_config, pubkey::Pubkey, signature::Signer,
 | 
			
		||||
        system_transaction,
 | 
			
		||||
    };
 | 
			
		||||
    use std::sync::mpsc::channel;
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn service_exit() {
 | 
			
		||||
        let tpu_address = "127.0.0.1:0".parse().unwrap();
 | 
			
		||||
        let bank = Bank::default();
 | 
			
		||||
        let bank_forks = Arc::new(RwLock::new(BankForks::new(bank)));
 | 
			
		||||
        let (sender, receiver) = channel();
 | 
			
		||||
 | 
			
		||||
        let send_tranaction_service =
 | 
			
		||||
            SendTransactionService::new(tpu_address, &bank_forks, receiver);
 | 
			
		||||
 | 
			
		||||
        drop(sender);
 | 
			
		||||
        send_tranaction_service.join().unwrap();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn process_transactions() {
 | 
			
		||||
        let (genesis_config, mint_keypair) = create_genesis_config(4);
 | 
			
		||||
        let bank = Bank::new(&genesis_config);
 | 
			
		||||
        let bank_forks = Arc::new(RwLock::new(BankForks::new(bank)));
 | 
			
		||||
        let send_socket = UdpSocket::bind("0.0.0.0:0").unwrap();
 | 
			
		||||
        let tpu_address = "127.0.0.1:0".parse().unwrap();
 | 
			
		||||
 | 
			
		||||
        let root_bank = Arc::new(Bank::new_from_parent(
 | 
			
		||||
            &bank_forks.read().unwrap().working_bank(),
 | 
			
		||||
            &Pubkey::default(),
 | 
			
		||||
            1,
 | 
			
		||||
        ));
 | 
			
		||||
        let rooted_signature = root_bank
 | 
			
		||||
            .transfer(1, &mint_keypair, &mint_keypair.pubkey())
 | 
			
		||||
            .unwrap();
 | 
			
		||||
 | 
			
		||||
        let working_bank = Arc::new(Bank::new_from_parent(&root_bank, &Pubkey::default(), 2));
 | 
			
		||||
 | 
			
		||||
        let non_rooted_signature = working_bank
 | 
			
		||||
            .transfer(2, &mint_keypair, &mint_keypair.pubkey())
 | 
			
		||||
            .unwrap();
 | 
			
		||||
 | 
			
		||||
        let failed_signature = {
 | 
			
		||||
            let blockhash = working_bank.last_blockhash();
 | 
			
		||||
            let transaction =
 | 
			
		||||
                system_transaction::transfer(&mint_keypair, &Pubkey::default(), 1, blockhash);
 | 
			
		||||
            let signature = transaction.signatures[0];
 | 
			
		||||
            working_bank.process_transaction(&transaction).unwrap_err();
 | 
			
		||||
            signature
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        let mut transactions = HashMap::new();
 | 
			
		||||
 | 
			
		||||
        info!("Expired transactions are dropped..");
 | 
			
		||||
        transactions.insert(
 | 
			
		||||
            Signature::default(),
 | 
			
		||||
            TransactionInfo::new(Signature::default(), vec![], root_bank.slot() - 1),
 | 
			
		||||
        );
 | 
			
		||||
        let result = SendTransactionService::process_transactions(
 | 
			
		||||
            &working_bank,
 | 
			
		||||
            &root_bank,
 | 
			
		||||
            &send_socket,
 | 
			
		||||
            &tpu_address,
 | 
			
		||||
            &mut transactions,
 | 
			
		||||
        );
 | 
			
		||||
        assert!(transactions.is_empty());
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            result,
 | 
			
		||||
            ProcessTransactionsResult {
 | 
			
		||||
                expired: 1,
 | 
			
		||||
                ..ProcessTransactionsResult::default()
 | 
			
		||||
            }
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        info!("Rooted transactions are dropped...");
 | 
			
		||||
        transactions.insert(
 | 
			
		||||
            rooted_signature,
 | 
			
		||||
            TransactionInfo::new(rooted_signature, vec![], working_bank.slot()),
 | 
			
		||||
        );
 | 
			
		||||
        let result = SendTransactionService::process_transactions(
 | 
			
		||||
            &working_bank,
 | 
			
		||||
            &root_bank,
 | 
			
		||||
            &send_socket,
 | 
			
		||||
            &tpu_address,
 | 
			
		||||
            &mut transactions,
 | 
			
		||||
        );
 | 
			
		||||
        assert!(transactions.is_empty());
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            result,
 | 
			
		||||
            ProcessTransactionsResult {
 | 
			
		||||
                rooted: 1,
 | 
			
		||||
                ..ProcessTransactionsResult::default()
 | 
			
		||||
            }
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        info!("Failed transactions are dropped...");
 | 
			
		||||
        transactions.insert(
 | 
			
		||||
            failed_signature,
 | 
			
		||||
            TransactionInfo::new(failed_signature, vec![], working_bank.slot()),
 | 
			
		||||
        );
 | 
			
		||||
        let result = SendTransactionService::process_transactions(
 | 
			
		||||
            &working_bank,
 | 
			
		||||
            &root_bank,
 | 
			
		||||
            &send_socket,
 | 
			
		||||
            &tpu_address,
 | 
			
		||||
            &mut transactions,
 | 
			
		||||
        );
 | 
			
		||||
        assert!(transactions.is_empty());
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            result,
 | 
			
		||||
            ProcessTransactionsResult {
 | 
			
		||||
                failed: 1,
 | 
			
		||||
                ..ProcessTransactionsResult::default()
 | 
			
		||||
            }
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        info!("Non-rooted transactions are kept...");
 | 
			
		||||
        transactions.insert(
 | 
			
		||||
            non_rooted_signature,
 | 
			
		||||
            TransactionInfo::new(non_rooted_signature, vec![], working_bank.slot()),
 | 
			
		||||
        );
 | 
			
		||||
        let result = SendTransactionService::process_transactions(
 | 
			
		||||
            &working_bank,
 | 
			
		||||
            &root_bank,
 | 
			
		||||
            &send_socket,
 | 
			
		||||
            &tpu_address,
 | 
			
		||||
            &mut transactions,
 | 
			
		||||
        );
 | 
			
		||||
        assert_eq!(transactions.len(), 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            result,
 | 
			
		||||
            ProcessTransactionsResult {
 | 
			
		||||
                retained: 1,
 | 
			
		||||
                ..ProcessTransactionsResult::default()
 | 
			
		||||
            }
 | 
			
		||||
        );
 | 
			
		||||
        transactions.clear();
 | 
			
		||||
 | 
			
		||||
        info!("Unknown transactions are retried...");
 | 
			
		||||
        transactions.insert(
 | 
			
		||||
            Signature::default(),
 | 
			
		||||
            TransactionInfo::new(Signature::default(), vec![], working_bank.slot()),
 | 
			
		||||
        );
 | 
			
		||||
        let result = SendTransactionService::process_transactions(
 | 
			
		||||
            &working_bank,
 | 
			
		||||
            &root_bank,
 | 
			
		||||
            &send_socket,
 | 
			
		||||
            &tpu_address,
 | 
			
		||||
            &mut transactions,
 | 
			
		||||
        );
 | 
			
		||||
        assert_eq!(transactions.len(), 1);
 | 
			
		||||
        assert_eq!(
 | 
			
		||||
            result,
 | 
			
		||||
            ProcessTransactionsResult {
 | 
			
		||||
                retried: 1,
 | 
			
		||||
                ..ProcessTransactionsResult::default()
 | 
			
		||||
            }
 | 
			
		||||
        );
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -1,8 +1,8 @@
 | 
			
		||||
[package]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
 | 
			
		||||
edition = "2018"
 | 
			
		||||
name = "solana-bench-exchange"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
version = "1.2.0"
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
@@ -15,24 +15,24 @@ log = "0.4.8"
 | 
			
		||||
num-derive = "0.3"
 | 
			
		||||
num-traits = "0.2"
 | 
			
		||||
rand = "0.7.0"
 | 
			
		||||
rayon = "1.4.0"
 | 
			
		||||
serde_json = "1.0.56"
 | 
			
		||||
serde_yaml = "0.8.13"
 | 
			
		||||
solana-clap-utils = { path = "../clap-utils", version = "1.4.18" }
 | 
			
		||||
solana-core = { path = "../core", version = "1.4.18" }
 | 
			
		||||
solana-genesis = { path = "../genesis", version = "1.4.18" }
 | 
			
		||||
solana-client = { path = "../client", version = "1.4.18" }
 | 
			
		||||
solana-faucet = { path = "../faucet", version = "1.4.18" }
 | 
			
		||||
solana-exchange-program = { path = "../programs/exchange", version = "1.4.18" }
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.4.18" }
 | 
			
		||||
solana-metrics = { path = "../metrics", version = "1.4.18" }
 | 
			
		||||
solana-net-utils = { path = "../net-utils", version = "1.4.18" }
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.4.18" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.4.18" }
 | 
			
		||||
solana-version = { path = "../version", version = "1.4.18" }
 | 
			
		||||
rayon = "1.3.0"
 | 
			
		||||
serde_json = "1.0.53"
 | 
			
		||||
serde_yaml = "0.8.12"
 | 
			
		||||
solana-clap-utils = { path = "../clap-utils", version = "1.2.0" }
 | 
			
		||||
solana-core = { path = "../core", version = "1.2.0" }
 | 
			
		||||
solana-genesis = { path = "../genesis", version = "1.2.0" }
 | 
			
		||||
solana-client = { path = "../client", version = "1.2.0" }
 | 
			
		||||
solana-faucet = { path = "../faucet", version = "1.2.0" }
 | 
			
		||||
solana-exchange-program = { path = "../programs/exchange", version = "1.2.0" }
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.2.0" }
 | 
			
		||||
solana-metrics = { path = "../metrics", version = "1.2.0" }
 | 
			
		||||
solana-net-utils = { path = "../net-utils", version = "1.2.0" }
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.2.0" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.2.0" }
 | 
			
		||||
solana-version = { path = "../version", version = "1.2.0" }
 | 
			
		||||
 | 
			
		||||
[dev-dependencies]
 | 
			
		||||
solana-local-cluster = { path = "../local-cluster", version = "1.4.18" }
 | 
			
		||||
solana-local-cluster = { path = "../local-cluster", version = "1.2.0" }
 | 
			
		||||
 | 
			
		||||
[package.metadata.docs.rs]
 | 
			
		||||
targets = ["x86_64-unknown-linux-gnu"]
 | 
			
		||||
 
 | 
			
		||||
@@ -14,7 +14,6 @@ use solana_metrics::datapoint_info;
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    client::{Client, SyncClient},
 | 
			
		||||
    commitment_config::CommitmentConfig,
 | 
			
		||||
    message::Message,
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    signature::{Keypair, Signer},
 | 
			
		||||
    timing::{duration_as_ms, duration_as_s},
 | 
			
		||||
@@ -179,13 +178,19 @@ where
 | 
			
		||||
 | 
			
		||||
    info!("Generating {:?} account keys", total_keys);
 | 
			
		||||
    let mut account_keypairs = generate_keypairs(total_keys);
 | 
			
		||||
    let src_keypairs: Vec<_> = account_keypairs.drain(0..accounts_in_groups).collect();
 | 
			
		||||
    let src_keypairs: Vec<_> = account_keypairs
 | 
			
		||||
        .drain(0..accounts_in_groups)
 | 
			
		||||
        .map(|keypair| keypair)
 | 
			
		||||
        .collect();
 | 
			
		||||
    let src_pubkeys: Vec<Pubkey> = src_keypairs
 | 
			
		||||
        .iter()
 | 
			
		||||
        .map(|keypair| keypair.pubkey())
 | 
			
		||||
        .collect();
 | 
			
		||||
 | 
			
		||||
    let profit_keypairs: Vec<_> = account_keypairs.drain(0..accounts_in_groups).collect();
 | 
			
		||||
    let profit_keypairs: Vec<_> = account_keypairs
 | 
			
		||||
        .drain(0..accounts_in_groups)
 | 
			
		||||
        .map(|keypair| keypair)
 | 
			
		||||
        .collect();
 | 
			
		||||
    let profit_pubkeys: Vec<Pubkey> = profit_keypairs
 | 
			
		||||
        .iter()
 | 
			
		||||
        .map(|keypair| keypair.pubkey())
 | 
			
		||||
@@ -444,7 +449,7 @@ fn swapper<T>(
 | 
			
		||||
            }
 | 
			
		||||
            account_group = (account_group + 1) % account_groups as usize;
 | 
			
		||||
 | 
			
		||||
            let (blockhash, _fee_calculator, _last_valid_slot) = client
 | 
			
		||||
            let (blockhash, _fee_calculator) = client
 | 
			
		||||
                .get_recent_blockhash_with_commitment(CommitmentConfig::recent())
 | 
			
		||||
                .expect("Failed to get blockhash");
 | 
			
		||||
            let to_swap_txs: Vec<_> = to_swap
 | 
			
		||||
@@ -452,14 +457,16 @@ fn swapper<T>(
 | 
			
		||||
                .map(|(signer, swap, profit)| {
 | 
			
		||||
                    let s: &Keypair = &signer;
 | 
			
		||||
                    let owner = &signer.pubkey();
 | 
			
		||||
                    let instruction = exchange_instruction::swap_request(
 | 
			
		||||
                    Transaction::new_signed_instructions(
 | 
			
		||||
                        &[s],
 | 
			
		||||
                        &[exchange_instruction::swap_request(
 | 
			
		||||
                            owner,
 | 
			
		||||
                            &swap.0.pubkey,
 | 
			
		||||
                            &swap.1.pubkey,
 | 
			
		||||
                            &profit,
 | 
			
		||||
                    );
 | 
			
		||||
                    let message = Message::new(&[instruction], Some(&s.pubkey()));
 | 
			
		||||
                    Transaction::new(&[s], message, blockhash)
 | 
			
		||||
                        )],
 | 
			
		||||
                        blockhash,
 | 
			
		||||
                    )
 | 
			
		||||
                })
 | 
			
		||||
                .collect();
 | 
			
		||||
 | 
			
		||||
@@ -570,7 +577,7 @@ fn trader<T>(
 | 
			
		||||
        }
 | 
			
		||||
        account_group = (account_group + 1) % account_groups as usize;
 | 
			
		||||
 | 
			
		||||
        let (blockhash, _fee_calculator, _last_valid_slot) = client
 | 
			
		||||
        let (blockhash, _fee_calculator) = client
 | 
			
		||||
            .get_recent_blockhash_with_commitment(CommitmentConfig::recent())
 | 
			
		||||
            .expect("Failed to get blockhash");
 | 
			
		||||
 | 
			
		||||
@@ -581,7 +588,9 @@ fn trader<T>(
 | 
			
		||||
                    let owner_pubkey = &owner.pubkey();
 | 
			
		||||
                    let trade_pubkey = &trade.pubkey();
 | 
			
		||||
                    let space = mem::size_of::<ExchangeState>() as u64;
 | 
			
		||||
                    let instructions = [
 | 
			
		||||
                    Transaction::new_signed_instructions(
 | 
			
		||||
                        &[owner.as_ref(), trade],
 | 
			
		||||
                        &[
 | 
			
		||||
                            system_instruction::create_account(
 | 
			
		||||
                                owner_pubkey,
 | 
			
		||||
                                trade_pubkey,
 | 
			
		||||
@@ -598,9 +607,9 @@ fn trader<T>(
 | 
			
		||||
                                price,
 | 
			
		||||
                                src,
 | 
			
		||||
                            ),
 | 
			
		||||
                    ];
 | 
			
		||||
                    let message = Message::new(&instructions, Some(&owner_pubkey));
 | 
			
		||||
                    Transaction::new(&[owner.as_ref(), trade], message, blockhash)
 | 
			
		||||
                        ],
 | 
			
		||||
                        blockhash,
 | 
			
		||||
                    )
 | 
			
		||||
                })
 | 
			
		||||
                .collect();
 | 
			
		||||
 | 
			
		||||
@@ -738,9 +747,13 @@ pub fn fund_keys<T: Client>(client: &T, source: &Keypair, dests: &[Arc<Keypair>]
 | 
			
		||||
            let mut to_fund_txs: Vec<_> = chunk
 | 
			
		||||
                .par_iter()
 | 
			
		||||
                .map(|(k, m)| {
 | 
			
		||||
                    let instructions = system_instruction::transfer_many(&k.pubkey(), &m);
 | 
			
		||||
                    let message = Message::new(&instructions, Some(&k.pubkey()));
 | 
			
		||||
                    (k.clone(), Transaction::new_unsigned(message))
 | 
			
		||||
                    (
 | 
			
		||||
                        k.clone(),
 | 
			
		||||
                        Transaction::new_unsigned_instructions(&system_instruction::transfer_many(
 | 
			
		||||
                            &k.pubkey(),
 | 
			
		||||
                            &m,
 | 
			
		||||
                        )),
 | 
			
		||||
                    )
 | 
			
		||||
                })
 | 
			
		||||
                .collect();
 | 
			
		||||
 | 
			
		||||
@@ -763,7 +776,7 @@ pub fn fund_keys<T: Client>(client: &T, source: &Keypair, dests: &[Arc<Keypair>]
 | 
			
		||||
                    to_fund_txs.len(),
 | 
			
		||||
                );
 | 
			
		||||
 | 
			
		||||
                let (blockhash, _fee_calculator, _last_valid_slot) = client
 | 
			
		||||
                let (blockhash, _fee_calculator) = client
 | 
			
		||||
                    .get_recent_blockhash_with_commitment(CommitmentConfig::recent())
 | 
			
		||||
                    .expect("blockhash");
 | 
			
		||||
                to_fund_txs.par_iter_mut().for_each(|(k, tx)| {
 | 
			
		||||
@@ -835,10 +848,9 @@ pub fn create_token_accounts<T: Client>(
 | 
			
		||||
                    );
 | 
			
		||||
                    let request_ix =
 | 
			
		||||
                        exchange_instruction::account_request(owner_pubkey, &new_keypair.pubkey());
 | 
			
		||||
                    let message = Message::new(&[create_ix, request_ix], Some(&owner_pubkey));
 | 
			
		||||
                    (
 | 
			
		||||
                        (from_keypair, new_keypair),
 | 
			
		||||
                        Transaction::new_unsigned(message),
 | 
			
		||||
                        Transaction::new_unsigned_instructions(&[create_ix, request_ix]),
 | 
			
		||||
                    )
 | 
			
		||||
                })
 | 
			
		||||
                .collect();
 | 
			
		||||
@@ -856,7 +868,7 @@ pub fn create_token_accounts<T: Client>(
 | 
			
		||||
 | 
			
		||||
            let mut retries = 0;
 | 
			
		||||
            while !to_create_txs.is_empty() {
 | 
			
		||||
                let (blockhash, _fee_calculator, _last_valid_slot) = client
 | 
			
		||||
                let (blockhash, _fee_calculator) = client
 | 
			
		||||
                    .get_recent_blockhash_with_commitment(CommitmentConfig::recent())
 | 
			
		||||
                    .expect("Failed to get blockhash");
 | 
			
		||||
                to_create_txs
 | 
			
		||||
@@ -985,7 +997,7 @@ pub fn airdrop_lamports<T: Client>(
 | 
			
		||||
 | 
			
		||||
    let mut tries = 0;
 | 
			
		||||
    loop {
 | 
			
		||||
        let (blockhash, _fee_calculator, _last_valid_slot) = client
 | 
			
		||||
        let (blockhash, _fee_calculator) = client
 | 
			
		||||
            .get_recent_blockhash_with_commitment(CommitmentConfig::recent())
 | 
			
		||||
            .expect("Failed to get blockhash");
 | 
			
		||||
        match request_airdrop_transaction(&faucet_addr, &id.pubkey(), amount_to_drop, blockhash) {
 | 
			
		||||
 
 | 
			
		||||
@@ -39,7 +39,7 @@ fn test_exchange_local_cluster() {
 | 
			
		||||
    } = config;
 | 
			
		||||
    let accounts_in_groups = batch_size * account_groups;
 | 
			
		||||
 | 
			
		||||
    let cluster = LocalCluster::new(&mut ClusterConfig {
 | 
			
		||||
    let cluster = LocalCluster::new(&ClusterConfig {
 | 
			
		||||
        node_stakes: vec![100_000; NUM_NODES],
 | 
			
		||||
        cluster_lamports: 100_000_000_000_000,
 | 
			
		||||
        validator_configs: vec![ValidatorConfig::default(); NUM_NODES],
 | 
			
		||||
@@ -86,7 +86,7 @@ fn test_exchange_bank_client() {
 | 
			
		||||
    solana_logger::setup();
 | 
			
		||||
    let (genesis_config, identity) = create_genesis_config(100_000_000_000_000);
 | 
			
		||||
    let mut bank = Bank::new(&genesis_config);
 | 
			
		||||
    bank.add_builtin("exchange_program", id(), process_instruction);
 | 
			
		||||
    bank.add_builtin_program("exchange_program", id(), process_instruction);
 | 
			
		||||
    let clients = vec![BankClient::new(bank)];
 | 
			
		||||
 | 
			
		||||
    let mut config = Config::default();
 | 
			
		||||
 
 | 
			
		||||
@@ -1,20 +1,19 @@
 | 
			
		||||
[package]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
 | 
			
		||||
edition = "2018"
 | 
			
		||||
name = "solana-bench-streamer"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
version = "1.2.0"
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
publish = false
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
clap = "2.33.1"
 | 
			
		||||
solana-clap-utils = { path = "../clap-utils", version = "1.4.18" }
 | 
			
		||||
solana-streamer = { path = "../streamer", version = "1.4.18" }
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.4.18" }
 | 
			
		||||
solana-net-utils = { path = "../net-utils", version = "1.4.18" }
 | 
			
		||||
solana-version = { path = "../version", version = "1.4.18" }
 | 
			
		||||
solana-clap-utils = { path = "../clap-utils", version = "1.2.0" }
 | 
			
		||||
solana-streamer = { path = "../streamer", version = "1.2.0" }
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.2.0" }
 | 
			
		||||
solana-net-utils = { path = "../net-utils", version = "1.2.0" }
 | 
			
		||||
solana-version = { path = "../version", version = "1.2.0" }
 | 
			
		||||
 | 
			
		||||
[package.metadata.docs.rs]
 | 
			
		||||
targets = ["x86_64-unknown-linux-gnu"]
 | 
			
		||||
 
 | 
			
		||||
@@ -27,7 +27,7 @@ fn producer(addr: &SocketAddr, exit: Arc<AtomicBool>) -> JoinHandle<()> {
 | 
			
		||||
        let mut num = 0;
 | 
			
		||||
        for p in &msgs.packets {
 | 
			
		||||
            let a = p.meta.addr();
 | 
			
		||||
            assert!(p.meta.size <= PACKET_DATA_SIZE);
 | 
			
		||||
            assert!(p.meta.size < PACKET_DATA_SIZE);
 | 
			
		||||
            send.send_to(&p.data[..p.meta.size], &a).unwrap();
 | 
			
		||||
            num += 1;
 | 
			
		||||
        }
 | 
			
		||||
 
 | 
			
		||||
@@ -1,37 +1,41 @@
 | 
			
		||||
[package]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
 | 
			
		||||
edition = "2018"
 | 
			
		||||
name = "solana-bench-tps"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
version = "1.2.0"
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
publish = false
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
bincode = "1.3.1"
 | 
			
		||||
bincode = "1.2.1"
 | 
			
		||||
clap = "2.33.1"
 | 
			
		||||
log = "0.4.8"
 | 
			
		||||
rayon = "1.4.0"
 | 
			
		||||
serde_json = "1.0.56"
 | 
			
		||||
serde_yaml = "0.8.13"
 | 
			
		||||
solana-clap-utils = { path = "../clap-utils", version = "1.4.18" }
 | 
			
		||||
solana-core = { path = "../core", version = "1.4.18" }
 | 
			
		||||
solana-genesis = { path = "../genesis", version = "1.4.18" }
 | 
			
		||||
solana-client = { path = "../client", version = "1.4.18" }
 | 
			
		||||
solana-faucet = { path = "../faucet", version = "1.4.18" }
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.4.18" }
 | 
			
		||||
solana-metrics = { path = "../metrics", version = "1.4.18" }
 | 
			
		||||
solana-measure = { path = "../measure", version = "1.4.18" }
 | 
			
		||||
solana-net-utils = { path = "../net-utils", version = "1.4.18" }
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.4.18" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.4.18" }
 | 
			
		||||
solana-version = { path = "../version", version = "1.4.18" }
 | 
			
		||||
rayon = "1.3.0"
 | 
			
		||||
serde_json = "1.0.53"
 | 
			
		||||
serde_yaml = "0.8.12"
 | 
			
		||||
solana-clap-utils = { path = "../clap-utils", version = "1.2.0" }
 | 
			
		||||
solana-core = { path = "../core", version = "1.2.0" }
 | 
			
		||||
solana-genesis = { path = "../genesis", version = "1.2.0" }
 | 
			
		||||
solana-client = { path = "../client", version = "1.2.0" }
 | 
			
		||||
solana-faucet = { path = "../faucet", version = "1.2.0" }
 | 
			
		||||
solana-librapay = { path = "../programs/librapay", version = "1.2.0", optional = true }
 | 
			
		||||
solana-logger = { path = "../logger", version = "1.2.0" }
 | 
			
		||||
solana-metrics = { path = "../metrics", version = "1.2.0" }
 | 
			
		||||
solana-measure = { path = "../measure", version = "1.2.0" }
 | 
			
		||||
solana-net-utils = { path = "../net-utils", version = "1.2.0" }
 | 
			
		||||
solana-runtime = { path = "../runtime", version = "1.2.0" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.2.0" }
 | 
			
		||||
solana-move-loader-program = { path = "../programs/move_loader", version = "1.2.0", optional = true }
 | 
			
		||||
solana-version = { path = "../version", version = "1.2.0" }
 | 
			
		||||
 | 
			
		||||
[dev-dependencies]
 | 
			
		||||
serial_test = "0.4.0"
 | 
			
		||||
serial_test_derive = "0.4.0"
 | 
			
		||||
solana-local-cluster = { path = "../local-cluster", version = "1.4.18" }
 | 
			
		||||
solana-local-cluster = { path = "../local-cluster", version = "1.2.0" }
 | 
			
		||||
 | 
			
		||||
[features]
 | 
			
		||||
move = ["solana-librapay", "solana-move-loader-program"]
 | 
			
		||||
 | 
			
		||||
[package.metadata.docs.rs]
 | 
			
		||||
targets = ["x86_64-unknown-linux-gnu"]
 | 
			
		||||
 
 | 
			
		||||
@@ -4,6 +4,8 @@ use rayon::prelude::*;
 | 
			
		||||
use solana_client::perf_utils::{sample_txs, SampleStats};
 | 
			
		||||
use solana_core::gen_keys::GenKeys;
 | 
			
		||||
use solana_faucet::faucet::request_airdrop_transaction;
 | 
			
		||||
#[cfg(feature = "move")]
 | 
			
		||||
use solana_librapay::{create_genesis, upload_mint_script, upload_payment_script};
 | 
			
		||||
use solana_measure::measure::Measure;
 | 
			
		||||
use solana_metrics::{self, datapoint_info};
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
@@ -12,7 +14,6 @@ use solana_sdk::{
 | 
			
		||||
    commitment_config::CommitmentConfig,
 | 
			
		||||
    fee_calculator::FeeCalculator,
 | 
			
		||||
    hash::Hash,
 | 
			
		||||
    message::Message,
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    signature::{Keypair, Signer},
 | 
			
		||||
    system_instruction, system_transaction,
 | 
			
		||||
@@ -35,6 +36,9 @@ use std::{
 | 
			
		||||
const MAX_TX_QUEUE_AGE: u64 =
 | 
			
		||||
    MAX_PROCESSING_AGE as u64 * DEFAULT_TICKS_PER_SLOT / DEFAULT_TICKS_PER_SECOND;
 | 
			
		||||
 | 
			
		||||
#[cfg(feature = "move")]
 | 
			
		||||
use solana_librapay::librapay_transaction;
 | 
			
		||||
 | 
			
		||||
pub const MAX_SPENDS_PER_TX: u64 = 4;
 | 
			
		||||
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
@@ -46,12 +50,12 @@ pub type Result<T> = std::result::Result<T, BenchTpsError>;
 | 
			
		||||
 | 
			
		||||
pub type SharedTransactions = Arc<RwLock<VecDeque<Vec<(Transaction, u64)>>>>;
 | 
			
		||||
 | 
			
		||||
type LibraKeys = (Keypair, Pubkey, Pubkey, Vec<Keypair>);
 | 
			
		||||
 | 
			
		||||
fn get_recent_blockhash<T: Client>(client: &T) -> (Hash, FeeCalculator) {
 | 
			
		||||
    loop {
 | 
			
		||||
        match client.get_recent_blockhash_with_commitment(CommitmentConfig::recent()) {
 | 
			
		||||
            Ok((blockhash, fee_calculator, _last_valid_slot)) => {
 | 
			
		||||
                return (blockhash, fee_calculator)
 | 
			
		||||
            }
 | 
			
		||||
            Ok((blockhash, fee_calculator)) => return (blockhash, fee_calculator),
 | 
			
		||||
            Err(err) => {
 | 
			
		||||
                info!("Couldn't get recent blockhash: {:?}", err);
 | 
			
		||||
                sleep(Duration::from_secs(1));
 | 
			
		||||
@@ -115,6 +119,7 @@ fn generate_chunked_transfers(
 | 
			
		||||
    threads: usize,
 | 
			
		||||
    duration: Duration,
 | 
			
		||||
    sustained: bool,
 | 
			
		||||
    libra_args: Option<LibraKeys>,
 | 
			
		||||
) {
 | 
			
		||||
    // generate and send transactions for the specified duration
 | 
			
		||||
    let start = Instant::now();
 | 
			
		||||
@@ -129,6 +134,7 @@ fn generate_chunked_transfers(
 | 
			
		||||
            &dest_keypair_chunks[chunk_index],
 | 
			
		||||
            threads,
 | 
			
		||||
            reclaim_lamports_back_to_source_account,
 | 
			
		||||
            &libra_args,
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        // In sustained mode, overlap the transfers with generation. This has higher average
 | 
			
		||||
@@ -196,7 +202,12 @@ where
 | 
			
		||||
        .collect()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn do_bench_tps<T>(client: Arc<T>, config: Config, gen_keypairs: Vec<Keypair>) -> u64
 | 
			
		||||
pub fn do_bench_tps<T>(
 | 
			
		||||
    client: Arc<T>,
 | 
			
		||||
    config: Config,
 | 
			
		||||
    gen_keypairs: Vec<Keypair>,
 | 
			
		||||
    libra_args: Option<LibraKeys>,
 | 
			
		||||
) -> u64
 | 
			
		||||
where
 | 
			
		||||
    T: 'static + Client + Send + Sync,
 | 
			
		||||
{
 | 
			
		||||
@@ -280,6 +291,7 @@ where
 | 
			
		||||
        threads,
 | 
			
		||||
        duration,
 | 
			
		||||
        sustained,
 | 
			
		||||
        libra_args,
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    // Stop the sampling threads so it will collect the stats
 | 
			
		||||
@@ -325,6 +337,52 @@ fn metrics_submit_lamport_balance(lamport_balance: u64) {
 | 
			
		||||
    );
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(feature = "move")]
 | 
			
		||||
fn generate_move_txs(
 | 
			
		||||
    source: &[&Keypair],
 | 
			
		||||
    dest: &VecDeque<&Keypair>,
 | 
			
		||||
    reclaim: bool,
 | 
			
		||||
    move_keypairs: &[Keypair],
 | 
			
		||||
    libra_pay_program_id: &Pubkey,
 | 
			
		||||
    libra_mint_id: &Pubkey,
 | 
			
		||||
    blockhash: &Hash,
 | 
			
		||||
) -> Vec<(Transaction, u64)> {
 | 
			
		||||
    let count = move_keypairs.len() / 2;
 | 
			
		||||
    let source_move = &move_keypairs[..count];
 | 
			
		||||
    let dest_move = &move_keypairs[count..];
 | 
			
		||||
    let pairs: Vec<_> = if !reclaim {
 | 
			
		||||
        source_move
 | 
			
		||||
            .iter()
 | 
			
		||||
            .zip(dest_move.iter())
 | 
			
		||||
            .zip(source.iter())
 | 
			
		||||
            .collect()
 | 
			
		||||
    } else {
 | 
			
		||||
        dest_move
 | 
			
		||||
            .iter()
 | 
			
		||||
            .zip(source_move.iter())
 | 
			
		||||
            .zip(dest.iter())
 | 
			
		||||
            .collect()
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    pairs
 | 
			
		||||
        .par_iter()
 | 
			
		||||
        .map(|((from, to), payer)| {
 | 
			
		||||
            (
 | 
			
		||||
                librapay_transaction::transfer(
 | 
			
		||||
                    libra_pay_program_id,
 | 
			
		||||
                    libra_mint_id,
 | 
			
		||||
                    &payer,
 | 
			
		||||
                    &from,
 | 
			
		||||
                    &to.pubkey(),
 | 
			
		||||
                    1,
 | 
			
		||||
                    *blockhash,
 | 
			
		||||
                ),
 | 
			
		||||
                timestamp(),
 | 
			
		||||
            )
 | 
			
		||||
        })
 | 
			
		||||
        .collect()
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn generate_system_txs(
 | 
			
		||||
    source: &[&Keypair],
 | 
			
		||||
    dest: &VecDeque<&Keypair>,
 | 
			
		||||
@@ -355,6 +413,7 @@ fn generate_txs(
 | 
			
		||||
    dest: &VecDeque<&Keypair>,
 | 
			
		||||
    threads: usize,
 | 
			
		||||
    reclaim: bool,
 | 
			
		||||
    libra_args: &Option<LibraKeys>,
 | 
			
		||||
) {
 | 
			
		||||
    let blockhash = *blockhash.read().unwrap();
 | 
			
		||||
    let tx_count = source.len();
 | 
			
		||||
@@ -364,7 +423,33 @@ fn generate_txs(
 | 
			
		||||
    );
 | 
			
		||||
    let signing_start = Instant::now();
 | 
			
		||||
 | 
			
		||||
    let transactions = generate_system_txs(source, dest, reclaim, &blockhash);
 | 
			
		||||
    let transactions = if let Some((
 | 
			
		||||
        _libra_genesis_keypair,
 | 
			
		||||
        _libra_pay_program_id,
 | 
			
		||||
        _libra_mint_program_id,
 | 
			
		||||
        _libra_keys,
 | 
			
		||||
    )) = libra_args
 | 
			
		||||
    {
 | 
			
		||||
        #[cfg(not(feature = "move"))]
 | 
			
		||||
        {
 | 
			
		||||
            return;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        #[cfg(feature = "move")]
 | 
			
		||||
        {
 | 
			
		||||
            generate_move_txs(
 | 
			
		||||
                source,
 | 
			
		||||
                dest,
 | 
			
		||||
                reclaim,
 | 
			
		||||
                &_libra_keys,
 | 
			
		||||
                _libra_pay_program_id,
 | 
			
		||||
                &_libra_genesis_keypair.pubkey(),
 | 
			
		||||
                &blockhash,
 | 
			
		||||
            )
 | 
			
		||||
        }
 | 
			
		||||
    } else {
 | 
			
		||||
        generate_system_txs(source, dest, reclaim, &blockhash)
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    let duration = signing_start.elapsed();
 | 
			
		||||
    let ns = duration.as_secs() * 1_000_000_000 + u64::from(duration.subsec_nanos());
 | 
			
		||||
@@ -565,9 +650,10 @@ impl<'a> FundingTransactions<'a> for Vec<(&'a Keypair, Transaction)> {
 | 
			
		||||
        let to_fund_txs: Vec<(&Keypair, Transaction)> = to_fund
 | 
			
		||||
            .par_iter()
 | 
			
		||||
            .map(|(k, t)| {
 | 
			
		||||
                let instructions = system_instruction::transfer_many(&k.pubkey(), &t);
 | 
			
		||||
                let message = Message::new(&instructions, Some(&k.pubkey()));
 | 
			
		||||
                (*k, Transaction::new_unsigned(message))
 | 
			
		||||
                let tx = Transaction::new_unsigned_instructions(
 | 
			
		||||
                    &system_instruction::transfer_many(&k.pubkey(), &t),
 | 
			
		||||
                );
 | 
			
		||||
                (*k, tx)
 | 
			
		||||
            })
 | 
			
		||||
            .collect();
 | 
			
		||||
        make_txs.stop();
 | 
			
		||||
@@ -866,13 +952,181 @@ pub fn generate_keypairs(seed_keypair: &Keypair, count: u64) -> (Vec<Keypair>, u
 | 
			
		||||
    (rnd.gen_n_keypairs(total_keys), extra)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(feature = "move")]
 | 
			
		||||
fn fund_move_keys<T: Client>(
 | 
			
		||||
    client: &T,
 | 
			
		||||
    funding_key: &Keypair,
 | 
			
		||||
    keypairs: &[Keypair],
 | 
			
		||||
    total: u64,
 | 
			
		||||
    libra_pay_program_id: &Pubkey,
 | 
			
		||||
    libra_mint_program_id: &Pubkey,
 | 
			
		||||
    libra_genesis_key: &Keypair,
 | 
			
		||||
) {
 | 
			
		||||
    let (mut blockhash, _fee_calculator) = get_recent_blockhash(client);
 | 
			
		||||
 | 
			
		||||
    info!("creating the libra funding account..");
 | 
			
		||||
    let libra_funding_key = Keypair::new();
 | 
			
		||||
    let tx = librapay_transaction::create_account(funding_key, &libra_funding_key, 1, blockhash);
 | 
			
		||||
    client
 | 
			
		||||
        .send_message(&[funding_key, &libra_funding_key], tx.message)
 | 
			
		||||
        .unwrap();
 | 
			
		||||
 | 
			
		||||
    info!("minting to funding keypair");
 | 
			
		||||
    let tx = librapay_transaction::mint_tokens(
 | 
			
		||||
        &libra_mint_program_id,
 | 
			
		||||
        funding_key,
 | 
			
		||||
        libra_genesis_key,
 | 
			
		||||
        &libra_funding_key.pubkey(),
 | 
			
		||||
        total,
 | 
			
		||||
        blockhash,
 | 
			
		||||
    );
 | 
			
		||||
    client
 | 
			
		||||
        .send_message(&[funding_key, libra_genesis_key], tx.message)
 | 
			
		||||
        .unwrap();
 | 
			
		||||
 | 
			
		||||
    info!("creating {} move accounts...", keypairs.len());
 | 
			
		||||
    let total_len = keypairs.len();
 | 
			
		||||
    let create_len = 5;
 | 
			
		||||
    let mut funding_time = Measure::start("funding_time");
 | 
			
		||||
    for (i, keys) in keypairs.chunks(create_len).enumerate() {
 | 
			
		||||
        if client
 | 
			
		||||
            .get_balance_with_commitment(&keys[0].pubkey(), CommitmentConfig::recent())
 | 
			
		||||
            .unwrap_or(0)
 | 
			
		||||
            > 0
 | 
			
		||||
        {
 | 
			
		||||
            // already created these accounts.
 | 
			
		||||
            break;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        let keypairs: Vec<_> = keys.iter().map(|k| k).collect();
 | 
			
		||||
        let tx = librapay_transaction::create_accounts(funding_key, &keypairs, 1, blockhash);
 | 
			
		||||
        let ser_size = bincode::serialized_size(&tx).unwrap();
 | 
			
		||||
        let mut keys = vec![funding_key];
 | 
			
		||||
        keys.extend(&keypairs);
 | 
			
		||||
        client.send_message(&keys, tx.message).unwrap();
 | 
			
		||||
 | 
			
		||||
        if i % 10 == 0 {
 | 
			
		||||
            info!(
 | 
			
		||||
                "created {} accounts of {} (size {})",
 | 
			
		||||
                i,
 | 
			
		||||
                total_len / create_len,
 | 
			
		||||
                ser_size,
 | 
			
		||||
            );
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const NUM_FUNDING_KEYS: usize = 10;
 | 
			
		||||
    let funding_keys: Vec<_> = (0..NUM_FUNDING_KEYS).map(|_| Keypair::new()).collect();
 | 
			
		||||
    let pubkey_amounts: Vec<_> = funding_keys
 | 
			
		||||
        .iter()
 | 
			
		||||
        .map(|key| (key.pubkey(), total / NUM_FUNDING_KEYS as u64))
 | 
			
		||||
        .collect();
 | 
			
		||||
    let tx = Transaction::new_signed_instructions(
 | 
			
		||||
        &[funding_key],
 | 
			
		||||
        &system_instruction::transfer_many(&funding_key.pubkey(), &pubkey_amounts),
 | 
			
		||||
        blockhash,
 | 
			
		||||
    );
 | 
			
		||||
    client.send_message(&[funding_key], tx.message).unwrap();
 | 
			
		||||
    let mut balance = 0;
 | 
			
		||||
    for _ in 0..20 {
 | 
			
		||||
        if let Ok(balance_) = client
 | 
			
		||||
            .get_balance_with_commitment(&funding_keys[0].pubkey(), CommitmentConfig::recent())
 | 
			
		||||
        {
 | 
			
		||||
            if balance_ > 0 {
 | 
			
		||||
                balance = balance_;
 | 
			
		||||
                break;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        sleep(Duration::from_millis(100));
 | 
			
		||||
    }
 | 
			
		||||
    assert!(balance > 0);
 | 
			
		||||
    info!(
 | 
			
		||||
        "funded multiple funding accounts with {:?} lanports",
 | 
			
		||||
        balance
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
    let libra_funding_keys: Vec<_> = (0..NUM_FUNDING_KEYS).map(|_| Keypair::new()).collect();
 | 
			
		||||
    for (i, key) in libra_funding_keys.iter().enumerate() {
 | 
			
		||||
        let tx = librapay_transaction::create_account(&funding_keys[i], &key, 1, blockhash);
 | 
			
		||||
        client
 | 
			
		||||
            .send_message(&[&funding_keys[i], &key], tx.message)
 | 
			
		||||
            .unwrap();
 | 
			
		||||
 | 
			
		||||
        let tx = librapay_transaction::transfer(
 | 
			
		||||
            libra_pay_program_id,
 | 
			
		||||
            &libra_genesis_key.pubkey(),
 | 
			
		||||
            &funding_keys[i],
 | 
			
		||||
            &libra_funding_key,
 | 
			
		||||
            &key.pubkey(),
 | 
			
		||||
            total / NUM_FUNDING_KEYS as u64,
 | 
			
		||||
            blockhash,
 | 
			
		||||
        );
 | 
			
		||||
        client
 | 
			
		||||
            .send_message(&[&funding_keys[i], &libra_funding_key], tx.message)
 | 
			
		||||
            .unwrap();
 | 
			
		||||
 | 
			
		||||
        info!("funded libra funding key {}", i);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let keypair_count = keypairs.len();
 | 
			
		||||
    let amount = total / (keypair_count as u64);
 | 
			
		||||
    for (i, keys) in keypairs[..keypair_count]
 | 
			
		||||
        .chunks(NUM_FUNDING_KEYS)
 | 
			
		||||
        .enumerate()
 | 
			
		||||
    {
 | 
			
		||||
        for (j, key) in keys.iter().enumerate() {
 | 
			
		||||
            let tx = librapay_transaction::transfer(
 | 
			
		||||
                libra_pay_program_id,
 | 
			
		||||
                &libra_genesis_key.pubkey(),
 | 
			
		||||
                &funding_keys[j],
 | 
			
		||||
                &libra_funding_keys[j],
 | 
			
		||||
                &key.pubkey(),
 | 
			
		||||
                amount,
 | 
			
		||||
                blockhash,
 | 
			
		||||
            );
 | 
			
		||||
 | 
			
		||||
            let _sig = client
 | 
			
		||||
                .async_send_transaction(tx.clone())
 | 
			
		||||
                .expect("create_account in generate_and_fund_keypairs");
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        for (j, key) in keys.iter().enumerate() {
 | 
			
		||||
            let mut times = 0;
 | 
			
		||||
            loop {
 | 
			
		||||
                let balance =
 | 
			
		||||
                    librapay_transaction::get_libra_balance(client, &key.pubkey()).unwrap();
 | 
			
		||||
                if balance >= amount {
 | 
			
		||||
                    break;
 | 
			
		||||
                } else if times > 20 {
 | 
			
		||||
                    info!("timed out.. {} key: {} balance: {}", i, j, balance);
 | 
			
		||||
                    break;
 | 
			
		||||
                } else {
 | 
			
		||||
                    times += 1;
 | 
			
		||||
                    sleep(Duration::from_millis(100));
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        info!(
 | 
			
		||||
            "funded group {} of {}",
 | 
			
		||||
            i + 1,
 | 
			
		||||
            keypairs.len() / NUM_FUNDING_KEYS
 | 
			
		||||
        );
 | 
			
		||||
        blockhash = get_recent_blockhash(client).0;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    funding_time.stop();
 | 
			
		||||
    info!("done funding keys, took {} ms", funding_time.as_ms());
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn generate_and_fund_keypairs<T: 'static + Client + Send + Sync>(
 | 
			
		||||
    client: Arc<T>,
 | 
			
		||||
    faucet_addr: Option<SocketAddr>,
 | 
			
		||||
    funding_key: &Keypair,
 | 
			
		||||
    keypair_count: usize,
 | 
			
		||||
    lamports_per_account: u64,
 | 
			
		||||
) -> Result<Vec<Keypair>> {
 | 
			
		||||
    use_move: bool,
 | 
			
		||||
) -> Result<(Vec<Keypair>, Option<LibraKeys>)> {
 | 
			
		||||
    info!("Creating {} keypairs...", keypair_count);
 | 
			
		||||
    let (mut keypairs, extra) = generate_keypairs(funding_key, keypair_count as u64);
 | 
			
		||||
    info!("Get lamports...");
 | 
			
		||||
@@ -885,6 +1139,12 @@ pub fn generate_and_fund_keypairs<T: 'static + Client + Send + Sync>(
 | 
			
		||||
    let last_key = keypairs[keypair_count - 1].pubkey();
 | 
			
		||||
    let last_keypair_balance = client.get_balance(&last_key).unwrap_or(0);
 | 
			
		||||
 | 
			
		||||
    #[cfg(feature = "move")]
 | 
			
		||||
    let mut move_keypairs_ret = None;
 | 
			
		||||
 | 
			
		||||
    #[cfg(not(feature = "move"))]
 | 
			
		||||
    let move_keypairs_ret = None;
 | 
			
		||||
 | 
			
		||||
    // Repeated runs will eat up keypair balances from transaction fees. In order to quickly
 | 
			
		||||
    //   start another bench-tps run without re-funding all of the keypairs, check if the
 | 
			
		||||
    //   keypairs still have at least 80% of the expected funds. That should be enough to
 | 
			
		||||
@@ -895,7 +1155,10 @@ pub fn generate_and_fund_keypairs<T: 'static + Client + Send + Sync>(
 | 
			
		||||
        let max_fee = fee_rate_governor.max_lamports_per_signature;
 | 
			
		||||
        let extra_fees = extra * max_fee;
 | 
			
		||||
        let total_keypairs = keypairs.len() as u64 + 1; // Add one for funding keypair
 | 
			
		||||
        let total = lamports_per_account * total_keypairs + extra_fees;
 | 
			
		||||
        let mut total = lamports_per_account * total_keypairs + extra_fees;
 | 
			
		||||
        if use_move {
 | 
			
		||||
            total *= 3;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        let funding_key_balance = client.get_balance(&funding_key.pubkey()).unwrap_or(0);
 | 
			
		||||
        info!(
 | 
			
		||||
@@ -907,6 +1170,40 @@ pub fn generate_and_fund_keypairs<T: 'static + Client + Send + Sync>(
 | 
			
		||||
            airdrop_lamports(client.as_ref(), &faucet_addr.unwrap(), funding_key, total)?;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        #[cfg(feature = "move")]
 | 
			
		||||
        {
 | 
			
		||||
            if use_move {
 | 
			
		||||
                let libra_genesis_keypair =
 | 
			
		||||
                    create_genesis(&funding_key, client.as_ref(), 10_000_000);
 | 
			
		||||
                let libra_mint_program_id = upload_mint_script(&funding_key, client.as_ref());
 | 
			
		||||
                let libra_pay_program_id = upload_payment_script(&funding_key, client.as_ref());
 | 
			
		||||
 | 
			
		||||
                // Generate another set of keypairs for move accounts.
 | 
			
		||||
                // Still fund the solana ones which will be used for fees.
 | 
			
		||||
                let seed = [0u8; 32];
 | 
			
		||||
                let mut rnd = GenKeys::new(seed);
 | 
			
		||||
                let move_keypairs = rnd.gen_n_keypairs(keypair_count as u64);
 | 
			
		||||
                fund_move_keys(
 | 
			
		||||
                    client.as_ref(),
 | 
			
		||||
                    funding_key,
 | 
			
		||||
                    &move_keypairs,
 | 
			
		||||
                    total / 3,
 | 
			
		||||
                    &libra_pay_program_id,
 | 
			
		||||
                    &libra_mint_program_id,
 | 
			
		||||
                    &libra_genesis_keypair,
 | 
			
		||||
                );
 | 
			
		||||
                move_keypairs_ret = Some((
 | 
			
		||||
                    libra_genesis_keypair,
 | 
			
		||||
                    libra_pay_program_id,
 | 
			
		||||
                    libra_mint_program_id,
 | 
			
		||||
                    move_keypairs,
 | 
			
		||||
                ));
 | 
			
		||||
 | 
			
		||||
                // Give solana keys 1/3 and move keys 1/3 the lamports. Keep 1/3 for fees.
 | 
			
		||||
                total /= 3;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        fund_keys(
 | 
			
		||||
            client,
 | 
			
		||||
            funding_key,
 | 
			
		||||
@@ -920,7 +1217,7 @@ pub fn generate_and_fund_keypairs<T: 'static + Client + Send + Sync>(
 | 
			
		||||
    // 'generate_keypairs' generates extra keys to be able to have size-aligned funding batches for fund_keys.
 | 
			
		||||
    keypairs.truncate(keypair_count);
 | 
			
		||||
 | 
			
		||||
    Ok(keypairs)
 | 
			
		||||
    Ok((keypairs, move_keypairs_ret))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[cfg(test)]
 | 
			
		||||
@@ -944,11 +1241,11 @@ mod tests {
 | 
			
		||||
        config.duration = Duration::from_secs(5);
 | 
			
		||||
 | 
			
		||||
        let keypair_count = config.tx_count * config.keypair_multiplier;
 | 
			
		||||
        let keypairs =
 | 
			
		||||
            generate_and_fund_keypairs(client.clone(), None, &config.id, keypair_count, 20)
 | 
			
		||||
        let (keypairs, _move_keypairs) =
 | 
			
		||||
            generate_and_fund_keypairs(client.clone(), None, &config.id, keypair_count, 20, false)
 | 
			
		||||
                .unwrap();
 | 
			
		||||
 | 
			
		||||
        do_bench_tps(client, config, keypairs);
 | 
			
		||||
        do_bench_tps(client, config, keypairs, None);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
@@ -959,8 +1256,9 @@ mod tests {
 | 
			
		||||
        let keypair_count = 20;
 | 
			
		||||
        let lamports = 20;
 | 
			
		||||
 | 
			
		||||
        let keypairs =
 | 
			
		||||
            generate_and_fund_keypairs(client.clone(), None, &id, keypair_count, lamports).unwrap();
 | 
			
		||||
        let (keypairs, _move_keypairs) =
 | 
			
		||||
            generate_and_fund_keypairs(client.clone(), None, &id, keypair_count, lamports, false)
 | 
			
		||||
                .unwrap();
 | 
			
		||||
 | 
			
		||||
        for kp in &keypairs {
 | 
			
		||||
            assert_eq!(
 | 
			
		||||
@@ -982,8 +1280,9 @@ mod tests {
 | 
			
		||||
        let keypair_count = 20;
 | 
			
		||||
        let lamports = 20;
 | 
			
		||||
 | 
			
		||||
        let keypairs =
 | 
			
		||||
            generate_and_fund_keypairs(client.clone(), None, &id, keypair_count, lamports).unwrap();
 | 
			
		||||
        let (keypairs, _move_keypairs) =
 | 
			
		||||
            generate_and_fund_keypairs(client.clone(), None, &id, keypair_count, lamports, false)
 | 
			
		||||
                .unwrap();
 | 
			
		||||
 | 
			
		||||
        for kp in &keypairs {
 | 
			
		||||
            assert_eq!(client.get_balance(&kp.pubkey()).unwrap(), lamports);
 | 
			
		||||
 
 | 
			
		||||
@@ -1,10 +1,7 @@
 | 
			
		||||
use clap::{crate_description, crate_name, App, Arg, ArgMatches};
 | 
			
		||||
use solana_faucet::faucet::FAUCET_PORT;
 | 
			
		||||
use solana_sdk::fee_calculator::FeeRateGovernor;
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    signature::{read_keypair_file, Keypair},
 | 
			
		||||
};
 | 
			
		||||
use solana_sdk::signature::{read_keypair_file, Keypair};
 | 
			
		||||
use std::{net::SocketAddr, process::exit, time::Duration};
 | 
			
		||||
 | 
			
		||||
const NUM_LAMPORTS_PER_ACCOUNT_DEFAULT: u64 = solana_sdk::native_token::LAMPORTS_PER_SOL;
 | 
			
		||||
@@ -26,9 +23,9 @@ pub struct Config {
 | 
			
		||||
    pub read_from_client_file: bool,
 | 
			
		||||
    pub target_lamports_per_signature: u64,
 | 
			
		||||
    pub multi_client: bool,
 | 
			
		||||
    pub use_move: bool,
 | 
			
		||||
    pub num_lamports_per_account: u64,
 | 
			
		||||
    pub target_slots_per_epoch: u64,
 | 
			
		||||
    pub target_node: Option<Pubkey>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl Default for Config {
 | 
			
		||||
@@ -49,9 +46,9 @@ impl Default for Config {
 | 
			
		||||
            read_from_client_file: false,
 | 
			
		||||
            target_lamports_per_signature: FeeRateGovernor::default().target_lamports_per_signature,
 | 
			
		||||
            multi_client: true,
 | 
			
		||||
            use_move: false,
 | 
			
		||||
            num_lamports_per_account: NUM_LAMPORTS_PER_ACCOUNT_DEFAULT,
 | 
			
		||||
            target_slots_per_epoch: 0,
 | 
			
		||||
            target_node: None,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -112,19 +109,16 @@ pub fn build_args<'a, 'b>(version: &'b str) -> App<'a, 'b> {
 | 
			
		||||
                .long("sustained")
 | 
			
		||||
                .help("Use sustained performance mode vs. peak mode. This overlaps the tx generation with transfers."),
 | 
			
		||||
        )
 | 
			
		||||
        .arg(
 | 
			
		||||
            Arg::with_name("use-move")
 | 
			
		||||
                .long("use-move")
 | 
			
		||||
                .help("Use Move language transactions to perform transfers."),
 | 
			
		||||
        )
 | 
			
		||||
        .arg(
 | 
			
		||||
            Arg::with_name("no-multi-client")
 | 
			
		||||
                .long("no-multi-client")
 | 
			
		||||
                .help("Disable multi-client support, only transact with the entrypoint."),
 | 
			
		||||
        )
 | 
			
		||||
        .arg(
 | 
			
		||||
            Arg::with_name("target_node")
 | 
			
		||||
                .long("target-node")
 | 
			
		||||
                .requires("no-multi-client")
 | 
			
		||||
                .takes_value(true)
 | 
			
		||||
                .value_name("PUBKEY")
 | 
			
		||||
                .help("Specify an exact node to send transactions to."),
 | 
			
		||||
        )
 | 
			
		||||
        .arg(
 | 
			
		||||
            Arg::with_name("tx_count")
 | 
			
		||||
                .long("tx_count")
 | 
			
		||||
@@ -269,10 +263,8 @@ pub fn extract_args<'a>(matches: &ArgMatches<'a>) -> Config {
 | 
			
		||||
        args.target_lamports_per_signature = v.to_string().parse().expect("can't parse lamports");
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    args.use_move = matches.is_present("use-move");
 | 
			
		||||
    args.multi_client = !matches.is_present("no-multi-client");
 | 
			
		||||
    args.target_node = matches
 | 
			
		||||
        .value_of("target_node")
 | 
			
		||||
        .map(|target_str| target_str.parse().unwrap());
 | 
			
		||||
 | 
			
		||||
    if let Some(v) = matches.value_of("num_lamports_per_account") {
 | 
			
		||||
        args.num_lamports_per_account = v.to_string().parse().expect("can't parse lamports");
 | 
			
		||||
 
 | 
			
		||||
@@ -29,9 +29,9 @@ fn main() {
 | 
			
		||||
        write_to_client_file,
 | 
			
		||||
        read_from_client_file,
 | 
			
		||||
        target_lamports_per_signature,
 | 
			
		||||
        use_move,
 | 
			
		||||
        multi_client,
 | 
			
		||||
        num_lamports_per_account,
 | 
			
		||||
        target_node,
 | 
			
		||||
        ..
 | 
			
		||||
    } = &cli_config;
 | 
			
		||||
 | 
			
		||||
@@ -82,24 +82,11 @@ fn main() {
 | 
			
		||||
            exit(1);
 | 
			
		||||
        }
 | 
			
		||||
        Arc::new(client)
 | 
			
		||||
    } else if let Some(target_node) = target_node {
 | 
			
		||||
        info!("Searching for target_node: {:?}", target_node);
 | 
			
		||||
        let mut target_client = None;
 | 
			
		||||
        for node in nodes {
 | 
			
		||||
            if node.id == *target_node {
 | 
			
		||||
                target_client = Some(Arc::new(get_client(&[node])));
 | 
			
		||||
                break;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        target_client.unwrap_or_else(|| {
 | 
			
		||||
            eprintln!("Target node {} not found", target_node);
 | 
			
		||||
            exit(1);
 | 
			
		||||
        })
 | 
			
		||||
    } else {
 | 
			
		||||
        Arc::new(get_client(&nodes))
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    let keypairs = if *read_from_client_file {
 | 
			
		||||
    let (keypairs, move_keypairs) = if *read_from_client_file && !use_move {
 | 
			
		||||
        let path = Path::new(&client_ids_and_stake_file);
 | 
			
		||||
        let file = File::open(path).unwrap();
 | 
			
		||||
 | 
			
		||||
@@ -128,8 +115,8 @@ fn main() {
 | 
			
		||||
        // Sort keypairs so that do_bench_tps() uses the same subset of accounts for each run.
 | 
			
		||||
        // This prevents the amount of storage needed for bench-tps accounts from creeping up
 | 
			
		||||
        // across multiple runs.
 | 
			
		||||
        keypairs.sort_by_key(|x| x.pubkey().to_string());
 | 
			
		||||
        keypairs
 | 
			
		||||
        keypairs.sort_by(|x, y| x.pubkey().to_string().cmp(&y.pubkey().to_string()));
 | 
			
		||||
        (keypairs, None)
 | 
			
		||||
    } else {
 | 
			
		||||
        generate_and_fund_keypairs(
 | 
			
		||||
            client.clone(),
 | 
			
		||||
@@ -137,6 +124,7 @@ fn main() {
 | 
			
		||||
            &id,
 | 
			
		||||
            keypair_count,
 | 
			
		||||
            *num_lamports_per_account,
 | 
			
		||||
            *use_move,
 | 
			
		||||
        )
 | 
			
		||||
        .unwrap_or_else(|e| {
 | 
			
		||||
            eprintln!("Error could not fund keys: {:?}", e);
 | 
			
		||||
@@ -144,5 +132,5 @@ fn main() {
 | 
			
		||||
        })
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    do_bench_tps(client, cli_config, keypairs);
 | 
			
		||||
    do_bench_tps(client, cli_config, keypairs, move_keypairs);
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -6,16 +6,22 @@ use solana_core::cluster_info::VALIDATOR_PORT_RANGE;
 | 
			
		||||
use solana_core::validator::ValidatorConfig;
 | 
			
		||||
use solana_faucet::faucet::run_local_faucet;
 | 
			
		||||
use solana_local_cluster::local_cluster::{ClusterConfig, LocalCluster};
 | 
			
		||||
#[cfg(feature = "move")]
 | 
			
		||||
use solana_sdk::move_loader::solana_move_loader_program;
 | 
			
		||||
use solana_sdk::signature::{Keypair, Signer};
 | 
			
		||||
use std::sync::{mpsc::channel, Arc};
 | 
			
		||||
use std::time::Duration;
 | 
			
		||||
 | 
			
		||||
fn test_bench_tps_local_cluster(config: Config) {
 | 
			
		||||
    #[cfg(feature = "move")]
 | 
			
		||||
    let native_instruction_processors = vec![solana_move_loader_program()];
 | 
			
		||||
 | 
			
		||||
    #[cfg(not(feature = "move"))]
 | 
			
		||||
    let native_instruction_processors = vec![];
 | 
			
		||||
 | 
			
		||||
    solana_logger::setup();
 | 
			
		||||
    const NUM_NODES: usize = 1;
 | 
			
		||||
    let cluster = LocalCluster::new(&mut ClusterConfig {
 | 
			
		||||
    let cluster = LocalCluster::new(&ClusterConfig {
 | 
			
		||||
        node_stakes: vec![999_990; NUM_NODES],
 | 
			
		||||
        cluster_lamports: 200_000_000,
 | 
			
		||||
        validator_configs: vec![ValidatorConfig::default(); NUM_NODES],
 | 
			
		||||
@@ -42,16 +48,17 @@ fn test_bench_tps_local_cluster(config: Config) {
 | 
			
		||||
    let lamports_per_account = 100;
 | 
			
		||||
 | 
			
		||||
    let keypair_count = config.tx_count * config.keypair_multiplier;
 | 
			
		||||
    let keypairs = generate_and_fund_keypairs(
 | 
			
		||||
    let (keypairs, move_keypairs) = generate_and_fund_keypairs(
 | 
			
		||||
        client.clone(),
 | 
			
		||||
        Some(faucet_addr),
 | 
			
		||||
        &config.id,
 | 
			
		||||
        keypair_count,
 | 
			
		||||
        lamports_per_account,
 | 
			
		||||
        config.use_move,
 | 
			
		||||
    )
 | 
			
		||||
    .unwrap();
 | 
			
		||||
 | 
			
		||||
    let _total = do_bench_tps(client, config, keypairs);
 | 
			
		||||
    let _total = do_bench_tps(client, config, keypairs, move_keypairs);
 | 
			
		||||
 | 
			
		||||
    #[cfg(not(debug_assertions))]
 | 
			
		||||
    assert!(_total > 100);
 | 
			
		||||
@@ -66,3 +73,14 @@ fn test_bench_tps_local_cluster_solana() {
 | 
			
		||||
 | 
			
		||||
    test_bench_tps_local_cluster(config);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[test]
 | 
			
		||||
#[serial]
 | 
			
		||||
fn test_bench_tps_local_cluster_move() {
 | 
			
		||||
    let mut config = Config::default();
 | 
			
		||||
    config.tx_count = 100;
 | 
			
		||||
    config.duration = Duration::from_secs(10);
 | 
			
		||||
    config.use_move = true;
 | 
			
		||||
 | 
			
		||||
    test_bench_tps_local_cluster(config);
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										31
									
								
								cargo
									
									
									
									
									
								
							
							
						
						
									
										31
									
								
								cargo
									
									
									
									
									
								
							@@ -1,31 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
 | 
			
		||||
# shellcheck source=ci/rust-version.sh
 | 
			
		||||
here=$(dirname "$0")
 | 
			
		||||
 | 
			
		||||
source "${here}"/ci/rust-version.sh all
 | 
			
		||||
 | 
			
		||||
toolchain=
 | 
			
		||||
case "$1" in
 | 
			
		||||
  stable)
 | 
			
		||||
    # shellcheck disable=SC2054 # rust_stable is sourced from rust-version.sh
 | 
			
		||||
    toolchain="$rust_stable"
 | 
			
		||||
    shift
 | 
			
		||||
    ;;
 | 
			
		||||
  nightly)
 | 
			
		||||
    # shellcheck disable=SC2054 # rust_nightly is sourced from rust-version.sh
 | 
			
		||||
    toolchain="$rust_nightly"
 | 
			
		||||
    shift
 | 
			
		||||
    ;;
 | 
			
		||||
  +*)
 | 
			
		||||
    toolchain="${1#+}"
 | 
			
		||||
    shift
 | 
			
		||||
    ;;
 | 
			
		||||
  *)
 | 
			
		||||
    # shellcheck disable=SC2054 # rust_stable is sourced from rust-version.sh
 | 
			
		||||
    toolchain="$rust_stable"
 | 
			
		||||
    ;;
 | 
			
		||||
esac
 | 
			
		||||
 | 
			
		||||
set -x
 | 
			
		||||
exec cargo "+${toolchain}" "${@}"
 | 
			
		||||
@@ -1,13 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
 | 
			
		||||
here=$(dirname "$0")
 | 
			
		||||
 | 
			
		||||
maybe_bpf_sdk="--bpf-sdk $here/sdk/bpf"
 | 
			
		||||
for a in "$@"; do
 | 
			
		||||
  if [[ $a = --bpf-sdk ]]; then
 | 
			
		||||
    maybe_bpf_sdk=
 | 
			
		||||
  fi
 | 
			
		||||
done
 | 
			
		||||
 | 
			
		||||
set -x
 | 
			
		||||
exec "$here"/cargo run --manifest-path "$here"/sdk/cargo-build-bpf/Cargo.toml -- $maybe_bpf_sdk "$@"
 | 
			
		||||
@@ -1,14 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
 | 
			
		||||
here=$(dirname "$0")
 | 
			
		||||
 | 
			
		||||
maybe_bpf_sdk="--bpf-sdk $here/sdk/bpf"
 | 
			
		||||
for a in "$@"; do
 | 
			
		||||
  if [[ $a = --bpf-sdk ]]; then
 | 
			
		||||
    maybe_bpf_sdk=
 | 
			
		||||
  fi
 | 
			
		||||
done
 | 
			
		||||
 | 
			
		||||
export CARGO_BUILD_BPF="$here"/cargo-build-bpf
 | 
			
		||||
set -x
 | 
			
		||||
exec "$here"/cargo run --manifest-path "$here"/sdk/cargo-test-bpf/Cargo.toml -- $maybe_bpf_sdk "$@"
 | 
			
		||||
							
								
								
									
										43
									
								
								ci/affects-files.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										43
									
								
								ci/affects-files.sh
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,43 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
#
 | 
			
		||||
# Checks if a CI build affects one or more path patterns.  Each command-line
 | 
			
		||||
# argument is checked in series.
 | 
			
		||||
#
 | 
			
		||||
# Bash regular expressions are permitted in the pattern:
 | 
			
		||||
#     ./affects-files.sh .rs$    -- any file or directory ending in .rs
 | 
			
		||||
#     ./affects-files.sh .rs     -- also matches foo.rs.bar
 | 
			
		||||
#     ./affects-files.sh ^snap/  -- anything under the snap/ subdirectory
 | 
			
		||||
#     ./affects-files.sh snap/   -- also matches foo/snap/
 | 
			
		||||
# Any pattern starting with the ! character will be negated:
 | 
			
		||||
#     ./affects-files.sh !^docs/  -- anything *not* under the docs/ subdirectory
 | 
			
		||||
#
 | 
			
		||||
set -e
 | 
			
		||||
cd "$(dirname "$0")"/..
 | 
			
		||||
 | 
			
		||||
if [[ -n $CI_PULL_REQUEST ]]; then
 | 
			
		||||
  affectedFiles="$(buildkite-agent meta-data get affected_files)"
 | 
			
		||||
  echo "Affected files in this PR: $affectedFiles"
 | 
			
		||||
 | 
			
		||||
  IFS=':' read -ra files <<< "$affectedFiles"
 | 
			
		||||
  for pattern in "$@"; do
 | 
			
		||||
    if [[ ${pattern:0:1} = "!" ]]; then
 | 
			
		||||
      for file in "${files[@]}"; do
 | 
			
		||||
        if [[ ! $file =~ ${pattern:1} ]]; then
 | 
			
		||||
          exit 0
 | 
			
		||||
        fi
 | 
			
		||||
      done
 | 
			
		||||
    else
 | 
			
		||||
      for file in "${files[@]}"; do
 | 
			
		||||
        if [[ $file =~ $pattern ]]; then
 | 
			
		||||
          exit 0
 | 
			
		||||
        fi
 | 
			
		||||
      done
 | 
			
		||||
    fi
 | 
			
		||||
  done
 | 
			
		||||
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
# affected_files metadata is not currently available for non-PR builds, so assume
 | 
			
		||||
# the worse (affected)
 | 
			
		||||
exit 0
 | 
			
		||||
@@ -1,278 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
#
 | 
			
		||||
# Builds a buildkite pipeline based on the environment variables
 | 
			
		||||
#
 | 
			
		||||
 | 
			
		||||
set -e
 | 
			
		||||
cd "$(dirname "$0")"/..
 | 
			
		||||
 | 
			
		||||
output_file=${1:-/dev/stderr}
 | 
			
		||||
 | 
			
		||||
if [[ -n $CI_PULL_REQUEST ]]; then
 | 
			
		||||
  IFS=':' read -ra affected_files <<< "$(buildkite-agent meta-data get affected_files)"
 | 
			
		||||
  if [[ ${#affected_files[*]} -eq 0 ]]; then
 | 
			
		||||
    echo "Unable to determine the files affected by this PR"
 | 
			
		||||
    exit 1
 | 
			
		||||
  fi
 | 
			
		||||
else
 | 
			
		||||
  affected_files=()
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
annotate() {
 | 
			
		||||
  if [[ -n $BUILDKITE ]]; then
 | 
			
		||||
    buildkite-agent annotate "$@"
 | 
			
		||||
  fi
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Checks if a CI pull request affects one or more path patterns.  Each
 | 
			
		||||
# pattern argument is checked in series. If one of them found to be affected,
 | 
			
		||||
# return immediately as such.
 | 
			
		||||
#
 | 
			
		||||
# Bash regular expressions are permitted in the pattern:
 | 
			
		||||
#     affects .rs$    -- any file or directory ending in .rs
 | 
			
		||||
#     affects .rs     -- also matches foo.rs.bar
 | 
			
		||||
#     affects ^snap/  -- anything under the snap/ subdirectory
 | 
			
		||||
#     affects snap/   -- also matches foo/snap/
 | 
			
		||||
# Any pattern starting with the ! character will be negated:
 | 
			
		||||
#     affects !^docs/  -- anything *not* under the docs/ subdirectory
 | 
			
		||||
#
 | 
			
		||||
affects() {
 | 
			
		||||
  if [[ -z $CI_PULL_REQUEST ]]; then
 | 
			
		||||
    # affected_files metadata is not currently available for non-PR builds so assume
 | 
			
		||||
    # the worse (affected)
 | 
			
		||||
    return 0
 | 
			
		||||
  fi
 | 
			
		||||
  # Assume everyting needs to be tested when any Dockerfile changes
 | 
			
		||||
  for pattern in ^ci/docker-rust/Dockerfile ^ci/docker-rust-nightly/Dockerfile "$@"; do
 | 
			
		||||
    if [[ ${pattern:0:1} = "!" ]]; then
 | 
			
		||||
      for file in "${affected_files[@]}"; do
 | 
			
		||||
        if [[ ! $file =~ ${pattern:1} ]]; then
 | 
			
		||||
          return 0 # affected
 | 
			
		||||
        fi
 | 
			
		||||
      done
 | 
			
		||||
    else
 | 
			
		||||
      for file in "${affected_files[@]}"; do
 | 
			
		||||
        if [[ $file =~ $pattern ]]; then
 | 
			
		||||
          return 0 # affected
 | 
			
		||||
        fi
 | 
			
		||||
      done
 | 
			
		||||
    fi
 | 
			
		||||
  done
 | 
			
		||||
 | 
			
		||||
  return 1 # not affected
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Checks if a CI pull request affects anything other than the provided path patterns
 | 
			
		||||
#
 | 
			
		||||
# Syntax is the same as `affects()` except that the negation prefix is not
 | 
			
		||||
# supported
 | 
			
		||||
#
 | 
			
		||||
affects_other_than() {
 | 
			
		||||
  if [[ -z $CI_PULL_REQUEST ]]; then
 | 
			
		||||
    # affected_files metadata is not currently available for non-PR builds so assume
 | 
			
		||||
    # the worse (affected)
 | 
			
		||||
    return 0
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  for file in "${affected_files[@]}"; do
 | 
			
		||||
    declare matched=false
 | 
			
		||||
    for pattern in "$@"; do
 | 
			
		||||
        if [[ $file =~ $pattern ]]; then
 | 
			
		||||
          matched=true
 | 
			
		||||
        fi
 | 
			
		||||
    done
 | 
			
		||||
    if ! $matched; then
 | 
			
		||||
      return 0 # affected
 | 
			
		||||
    fi
 | 
			
		||||
  done
 | 
			
		||||
 | 
			
		||||
  return 1 # not affected
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
start_pipeline() {
 | 
			
		||||
  echo "# $*" > "$output_file"
 | 
			
		||||
  echo "steps:" >> "$output_file"
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
command_step() {
 | 
			
		||||
  cat >> "$output_file" <<EOF
 | 
			
		||||
  - name: "$1"
 | 
			
		||||
    command: "$2"
 | 
			
		||||
    timeout_in_minutes: $3
 | 
			
		||||
    artifact_paths: "log-*.txt"
 | 
			
		||||
EOF
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
trigger_secondary_step() {
 | 
			
		||||
  cat  >> "$output_file" <<"EOF"
 | 
			
		||||
  - trigger: "solana-secondary"
 | 
			
		||||
    branches: "!pull/*"
 | 
			
		||||
    async: true
 | 
			
		||||
    build:
 | 
			
		||||
      message: "${BUILDKITE_MESSAGE}"
 | 
			
		||||
      commit: "${BUILDKITE_COMMIT}"
 | 
			
		||||
      branch: "${BUILDKITE_BRANCH}"
 | 
			
		||||
      env:
 | 
			
		||||
        TRIGGERED_BUILDKITE_TAG: "${BUILDKITE_TAG}"
 | 
			
		||||
EOF
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
wait_step() {
 | 
			
		||||
  echo "  - wait" >> "$output_file"
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
all_test_steps() {
 | 
			
		||||
  command_step checks ". ci/rust-version.sh; ci/docker-run.sh \$\$rust_nightly_docker_image ci/test-checks.sh" 20
 | 
			
		||||
  wait_step
 | 
			
		||||
 | 
			
		||||
  # Coverage...
 | 
			
		||||
  if affects \
 | 
			
		||||
             .rs$ \
 | 
			
		||||
             Cargo.lock$ \
 | 
			
		||||
             Cargo.toml$ \
 | 
			
		||||
             ^ci/rust-version.sh \
 | 
			
		||||
             ^ci/test-coverage.sh \
 | 
			
		||||
             ^scripts/coverage.sh \
 | 
			
		||||
      ; then
 | 
			
		||||
    command_step coverage ". ci/rust-version.sh; ci/docker-run.sh \$\$rust_nightly_docker_image ci/test-coverage.sh" 30
 | 
			
		||||
    wait_step
 | 
			
		||||
  else
 | 
			
		||||
    annotate --style info --context test-coverage \
 | 
			
		||||
      "Coverage skipped as no .rs files were modified"
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  # Full test suite
 | 
			
		||||
  command_step stable ". ci/rust-version.sh; ci/docker-run.sh \$\$rust_stable_docker_image ci/test-stable.sh" 60
 | 
			
		||||
  wait_step
 | 
			
		||||
 | 
			
		||||
  # Perf test suite
 | 
			
		||||
  if affects \
 | 
			
		||||
             .rs$ \
 | 
			
		||||
             Cargo.lock$ \
 | 
			
		||||
             Cargo.toml$ \
 | 
			
		||||
             ^ci/rust-version.sh \
 | 
			
		||||
             ^ci/test-stable-perf.sh \
 | 
			
		||||
             ^ci/test-stable.sh \
 | 
			
		||||
             ^ci/test-local-cluster.sh \
 | 
			
		||||
             ^core/build.rs \
 | 
			
		||||
             ^fetch-perf-libs.sh \
 | 
			
		||||
             ^programs/ \
 | 
			
		||||
             ^sdk/ \
 | 
			
		||||
      ; then
 | 
			
		||||
    cat >> "$output_file" <<"EOF"
 | 
			
		||||
  - command: "ci/test-stable-perf.sh"
 | 
			
		||||
    name: "stable-perf"
 | 
			
		||||
    timeout_in_minutes: 40
 | 
			
		||||
    artifact_paths: "log-*.txt"
 | 
			
		||||
    agents:
 | 
			
		||||
      - "queue=cuda"
 | 
			
		||||
EOF
 | 
			
		||||
  else
 | 
			
		||||
    annotate --style info \
 | 
			
		||||
      "Stable-perf skipped as no relevant files were modified"
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  # Downstream backwards compatibility
 | 
			
		||||
  if affects \
 | 
			
		||||
             .rs$ \
 | 
			
		||||
             Cargo.lock$ \
 | 
			
		||||
             Cargo.toml$ \
 | 
			
		||||
             ^ci/rust-version.sh \
 | 
			
		||||
             ^ci/test-stable-perf.sh \
 | 
			
		||||
             ^ci/test-stable.sh \
 | 
			
		||||
             ^ci/test-local-cluster.sh \
 | 
			
		||||
             ^core/build.rs \
 | 
			
		||||
             ^fetch-perf-libs.sh \
 | 
			
		||||
             ^programs/ \
 | 
			
		||||
             ^sdk/ \
 | 
			
		||||
             ^scripts/build-downstream-projects.sh \
 | 
			
		||||
      ; then
 | 
			
		||||
    cat >> "$output_file" <<"EOF"
 | 
			
		||||
  - command: "scripts/build-downstream-projects.sh"
 | 
			
		||||
    name: "downstream-projects"
 | 
			
		||||
    timeout_in_minutes: 30
 | 
			
		||||
EOF
 | 
			
		||||
  else
 | 
			
		||||
    annotate --style info \
 | 
			
		||||
      "downstream-projects skipped as no relevant files were modified"
 | 
			
		||||
  fi
 | 
			
		||||
  # Benches...
 | 
			
		||||
  if affects \
 | 
			
		||||
             .rs$ \
 | 
			
		||||
             Cargo.lock$ \
 | 
			
		||||
             Cargo.toml$ \
 | 
			
		||||
             ^ci/rust-version.sh \
 | 
			
		||||
             ^ci/test-coverage.sh \
 | 
			
		||||
             ^ci/test-bench.sh \
 | 
			
		||||
      ; then
 | 
			
		||||
    command_step bench "ci/test-bench.sh" 30
 | 
			
		||||
  else
 | 
			
		||||
    annotate --style info --context test-bench \
 | 
			
		||||
      "Bench skipped as no .rs files were modified"
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  command_step "local-cluster" \
 | 
			
		||||
    ". ci/rust-version.sh; ci/docker-run.sh \$\$rust_stable_docker_image ci/test-local-cluster.sh" \
 | 
			
		||||
    45
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pull_or_push_steps() {
 | 
			
		||||
  command_step sanity "ci/test-sanity.sh" 5
 | 
			
		||||
  wait_step
 | 
			
		||||
 | 
			
		||||
  # Check for any .sh file changes
 | 
			
		||||
  if affects .sh$; then
 | 
			
		||||
    command_step shellcheck "ci/shellcheck.sh" 5
 | 
			
		||||
    wait_step
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  # Run the full test suite by default, skipping only if modifications are local
 | 
			
		||||
  # to some particular areas of the tree
 | 
			
		||||
  if affects_other_than ^.buildkite ^.mergify .md$ ^docs/ ^web3.js/ ^explorer/ ^.gitbook; then
 | 
			
		||||
    all_test_steps
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  # web3.js, explorer and docs changes run on Travis...
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if [[ -n $BUILDKITE_TAG ]]; then
 | 
			
		||||
  start_pipeline "Tag pipeline for $BUILDKITE_TAG"
 | 
			
		||||
 | 
			
		||||
  annotate --style info --context release-tag \
 | 
			
		||||
    "https://github.com/solana-labs/solana/releases/$BUILDKITE_TAG"
 | 
			
		||||
 | 
			
		||||
  # Jump directly to the secondary build to publish release artifacts quickly
 | 
			
		||||
  trigger_secondary_step
 | 
			
		||||
  exit 0
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if [[ $BUILDKITE_BRANCH =~ ^pull ]]; then
 | 
			
		||||
  echo "+++ Affected files in this PR"
 | 
			
		||||
  for file in "${affected_files[@]}"; do
 | 
			
		||||
    echo "- $file"
 | 
			
		||||
  done
 | 
			
		||||
 | 
			
		||||
  start_pipeline "Pull request pipeline for $BUILDKITE_BRANCH"
 | 
			
		||||
 | 
			
		||||
  # Add helpful link back to the corresponding Github Pull Request
 | 
			
		||||
  annotate --style info --context pr-backlink \
 | 
			
		||||
    "Github Pull Request: https://github.com/solana-labs/solana/$BUILDKITE_BRANCH"
 | 
			
		||||
 | 
			
		||||
  if [[ $GITHUB_USER = "dependabot-preview[bot]" ]]; then
 | 
			
		||||
    command_step dependabot "ci/dependabot-pr.sh" 5
 | 
			
		||||
    wait_step
 | 
			
		||||
  fi
 | 
			
		||||
  pull_or_push_steps
 | 
			
		||||
  exit 0
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
start_pipeline "Push pipeline for ${BUILDKITE_BRANCH:-?unknown branch?}"
 | 
			
		||||
pull_or_push_steps
 | 
			
		||||
wait_step
 | 
			
		||||
trigger_secondary_step
 | 
			
		||||
exit 0
 | 
			
		||||
							
								
								
									
										15
									
								
								ci/buildkite-release.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								ci/buildkite-release.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,15 @@
 | 
			
		||||
# Build steps that run on a release tag
 | 
			
		||||
#
 | 
			
		||||
# All the steps in `buildkite.yml` are skipped and we jump directly to the
 | 
			
		||||
# secondary build steps since it's assumed the commit that was tagged is known
 | 
			
		||||
# to be good so there's no need to rebuild and retest it.
 | 
			
		||||
steps:
 | 
			
		||||
  - trigger: "solana-secondary"
 | 
			
		||||
    branches: "!pull/*"
 | 
			
		||||
    async: true
 | 
			
		||||
    build:
 | 
			
		||||
      message: "${BUILDKITE_MESSAGE}"
 | 
			
		||||
      commit: "${BUILDKITE_COMMIT}"
 | 
			
		||||
      branch: "${BUILDKITE_BRANCH}"
 | 
			
		||||
      env:
 | 
			
		||||
        TRIGGERED_BUILDKITE_TAG: "${BUILDKITE_TAG}"
 | 
			
		||||
@@ -5,6 +5,9 @@ steps:
 | 
			
		||||
  - command: "ci/publish-tarball.sh"
 | 
			
		||||
    timeout_in_minutes: 60
 | 
			
		||||
    name: "publish tarball"
 | 
			
		||||
  - command: "ci/publish-docs.sh"
 | 
			
		||||
    timeout_in_minutes: 15
 | 
			
		||||
    name: "publish docs"
 | 
			
		||||
  - command: "ci/publish-bpf-sdk.sh"
 | 
			
		||||
    timeout_in_minutes: 5
 | 
			
		||||
    name: "publish bpf sdk"
 | 
			
		||||
@@ -16,3 +19,6 @@ steps:
 | 
			
		||||
    timeout_in_minutes: 240
 | 
			
		||||
    name: "publish crate"
 | 
			
		||||
    branches: "!master"
 | 
			
		||||
    #  - command: ". ci/rust-version.sh; ci/docker-run.sh $$rust_stable_docker_image ci/test-move.sh"
 | 
			
		||||
    #    name: "move"
 | 
			
		||||
    #    timeout_in_minutes: 20
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										26
									
								
								ci/buildkite-tests.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								ci/buildkite-tests.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,26 @@
 | 
			
		||||
# These steps are conditionally triggered by ci/buildkite.yml when files
 | 
			
		||||
# other than those in docs/ are modified
 | 
			
		||||
 | 
			
		||||
steps:
 | 
			
		||||
  - command: ". ci/rust-version.sh; ci/docker-run.sh $$rust_nightly_docker_image ci/test-coverage.sh"
 | 
			
		||||
    name: "coverage"
 | 
			
		||||
    timeout_in_minutes: 30
 | 
			
		||||
  - wait
 | 
			
		||||
  - command: ". ci/rust-version.sh; ci/docker-run.sh $$rust_stable_docker_image ci/test-stable.sh"
 | 
			
		||||
    name: "stable"
 | 
			
		||||
    timeout_in_minutes: 60
 | 
			
		||||
    artifact_paths: "log-*.txt"
 | 
			
		||||
  - wait
 | 
			
		||||
  - command: "ci/test-stable-perf.sh"
 | 
			
		||||
    name: "stable-perf"
 | 
			
		||||
    timeout_in_minutes: 40
 | 
			
		||||
    artifact_paths: "log-*.txt"
 | 
			
		||||
    agents:
 | 
			
		||||
      - "queue=cuda"
 | 
			
		||||
  - command: "ci/test-bench.sh"
 | 
			
		||||
    name: "bench"
 | 
			
		||||
    timeout_in_minutes: 30
 | 
			
		||||
  - command: ". ci/rust-version.sh; ci/docker-run.sh $$rust_stable_docker_image ci/test-local-cluster.sh"
 | 
			
		||||
    name: "local-cluster"
 | 
			
		||||
    timeout_in_minutes: 45
 | 
			
		||||
    artifact_paths: "log-*.txt"
 | 
			
		||||
							
								
								
									
										38
									
								
								ci/buildkite.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								ci/buildkite.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,38 @@
 | 
			
		||||
# Build steps that run on pushes and pull requests.
 | 
			
		||||
# If files other than those in docs/ were modified, this will be followed up by
 | 
			
		||||
# ci/buildkite-tests.yml
 | 
			
		||||
#
 | 
			
		||||
# Release tags use buildkite-release.yml instead
 | 
			
		||||
 | 
			
		||||
steps:
 | 
			
		||||
  - command: "ci/dependabot-pr.sh"
 | 
			
		||||
    name: "dependabot"
 | 
			
		||||
    timeout_in_minutes: 5
 | 
			
		||||
    if: build.env("GITHUB_USER") == "dependabot-preview[bot]"
 | 
			
		||||
 | 
			
		||||
  - wait
 | 
			
		||||
 | 
			
		||||
  - command: ". ci/rust-version.sh; ci/docker-run.sh $$rust_nightly_docker_image ci/test-checks.sh"
 | 
			
		||||
    name: "checks"
 | 
			
		||||
    timeout_in_minutes: 20
 | 
			
		||||
  - command: "ci/shellcheck.sh"
 | 
			
		||||
    name: "shellcheck"
 | 
			
		||||
    timeout_in_minutes: 5
 | 
			
		||||
 | 
			
		||||
  - wait
 | 
			
		||||
 | 
			
		||||
  - command: "ci/maybe-trigger-tests.sh"
 | 
			
		||||
    name: "maybe-trigger-tests"
 | 
			
		||||
    timeout_in_minutes: 2
 | 
			
		||||
 | 
			
		||||
  - wait
 | 
			
		||||
 | 
			
		||||
  - trigger: "solana-secondary"
 | 
			
		||||
    branches: "!pull/*"
 | 
			
		||||
    async: true
 | 
			
		||||
    build:
 | 
			
		||||
      message: "${BUILDKITE_MESSAGE}"
 | 
			
		||||
      commit: "${BUILDKITE_COMMIT}"
 | 
			
		||||
      branch: "${BUILDKITE_BRANCH}"
 | 
			
		||||
      env:
 | 
			
		||||
        TRIGGERED_BUILDKITE_TAG: "${BUILDKITE_TAG}"
 | 
			
		||||
@@ -89,21 +89,13 @@ BETA_CHANNEL_LATEST_TAG=${beta_tag:+v$beta_tag}
 | 
			
		||||
STABLE_CHANNEL_LATEST_TAG=${stable_tag:+v$stable_tag}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if [[ -n $CI_BASE_BRANCH ]]; then
 | 
			
		||||
  BRANCH="$CI_BASE_BRANCH"
 | 
			
		||||
elif [[ -n $CI_BRANCH ]]; then
 | 
			
		||||
  BRANCH="$CI_BRANCH"
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if [[ -z "$CHANNEL" ]]; then
 | 
			
		||||
  if [[ $BRANCH = "$STABLE_CHANNEL" ]]; then
 | 
			
		||||
if [[ $CI_BRANCH = "$STABLE_CHANNEL" ]]; then
 | 
			
		||||
  CHANNEL=stable
 | 
			
		||||
  elif [[ $BRANCH = "$EDGE_CHANNEL" ]]; then
 | 
			
		||||
elif [[ $CI_BRANCH = "$EDGE_CHANNEL" ]]; then
 | 
			
		||||
  CHANNEL=edge
 | 
			
		||||
  elif [[ $BRANCH = "$BETA_CHANNEL" ]]; then
 | 
			
		||||
elif [[ $CI_BRANCH = "$BETA_CHANNEL" ]]; then
 | 
			
		||||
  CHANNEL=beta
 | 
			
		||||
fi
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
echo EDGE_CHANNEL="$EDGE_CHANNEL"
 | 
			
		||||
echo BETA_CHANNEL="$BETA_CHANNEL"
 | 
			
		||||
 
 | 
			
		||||
@@ -7,14 +7,14 @@ source ci/_
 | 
			
		||||
commit_range="$(git merge-base HEAD origin/master)..HEAD"
 | 
			
		||||
parsed_update_args="$(
 | 
			
		||||
  git log "$commit_range" --author "dependabot-preview" --oneline -n1 |
 | 
			
		||||
    grep -o '[Bb]ump.*$' |
 | 
			
		||||
    sed -r 's/[Bb]ump ([^ ]+) from ([^ ]+) to ([^ ]+)/-p \1:\2 --precise \3/'
 | 
			
		||||
    grep -o 'Bump.*$' |
 | 
			
		||||
    sed -r 's/Bump ([^ ]+) from ([^ ]+) to ([^ ]+)/-p \1:\2 --precise \3/'
 | 
			
		||||
)"
 | 
			
		||||
# relaxed_parsed_update_args is temporal measure...
 | 
			
		||||
relaxed_parsed_update_args="$(
 | 
			
		||||
  git log "$commit_range" --author "dependabot-preview" --oneline -n1 |
 | 
			
		||||
    grep -o '[Bb]ump.*$' |
 | 
			
		||||
    sed -r 's/[Bb]ump ([^ ]+) from [^ ]+ to ([^ ]+)/-p \1 --precise \2/'
 | 
			
		||||
    grep -o 'Bump.*$' |
 | 
			
		||||
    sed -r 's/Bump ([^ ]+) from [^ ]+ to ([^ ]+)/-p \1 --precise \2/'
 | 
			
		||||
)"
 | 
			
		||||
package=$(echo "$parsed_update_args" | awk '{print $2}' | grep -o "^[^:]*")
 | 
			
		||||
if [[ -n $parsed_update_args ]]; then
 | 
			
		||||
 
 | 
			
		||||
@@ -60,12 +60,6 @@ if [[ -z "$SOLANA_DOCKER_RUN_NOSETUID" ]]; then
 | 
			
		||||
  ARGS+=(--user "$(id -u):$(id -g)")
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if [[ -n $SOLANA_ALLOCATE_TTY ]]; then
 | 
			
		||||
  # Colored output, progress bar and Ctrl-C:
 | 
			
		||||
  # https://stackoverflow.com/a/41099052/10242004
 | 
			
		||||
  ARGS+=(--interactive --tty)
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
# Environment variables to propagate into the container
 | 
			
		||||
ARGS+=(
 | 
			
		||||
  --env BUILDKITE
 | 
			
		||||
@@ -73,7 +67,6 @@ ARGS+=(
 | 
			
		||||
  --env BUILDKITE_JOB_ID
 | 
			
		||||
  --env CI
 | 
			
		||||
  --env CI_BRANCH
 | 
			
		||||
  --env CI_BASE_BRANCH
 | 
			
		||||
  --env CI_TAG
 | 
			
		||||
  --env CI_BUILD_ID
 | 
			
		||||
  --env CI_COMMIT
 | 
			
		||||
 
 | 
			
		||||
@@ -1,10 +1,9 @@
 | 
			
		||||
FROM solanalabs/rust:1.46.0
 | 
			
		||||
FROM solanalabs/rust:1.43.0
 | 
			
		||||
ARG date
 | 
			
		||||
 | 
			
		||||
RUN set -x \
 | 
			
		||||
 && rustup install nightly-$date \
 | 
			
		||||
 && rustup component add clippy --toolchain=nightly-$date \
 | 
			
		||||
 && rustup component add rustfmt --toolchain=nightly-$date \
 | 
			
		||||
 && rustup show \
 | 
			
		||||
 && rustc --version \
 | 
			
		||||
 && cargo --version \
 | 
			
		||||
 
 | 
			
		||||
@@ -2,27 +2,23 @@ Docker image containing rust nightly and some preinstalled crates used in CI.
 | 
			
		||||
 | 
			
		||||
This image may be manually updated by running `CI=true ./build.sh` if you are a member
 | 
			
		||||
of the [Solana Labs](https://hub.docker.com/u/solanalabs/) Docker Hub
 | 
			
		||||
organization.
 | 
			
		||||
organization, but it is also automatically updated periodically by
 | 
			
		||||
[this automation](https://buildkite.com/solana-labs/solana-ci-docker-rust-nightly).
 | 
			
		||||
 | 
			
		||||
## Moving to a newer nightly
 | 
			
		||||
 | 
			
		||||
NOTE: Follow instructions in docker-rust/README.md before this when updating the stable
 | 
			
		||||
rust version as well.
 | 
			
		||||
 | 
			
		||||
We pin the version of nightly (see the `ARG nightly=xyz` line in `Dockerfile`)
 | 
			
		||||
to avoid the build breaking at unexpected times, as occasionally nightly will
 | 
			
		||||
introduce breaking changes.
 | 
			
		||||
 | 
			
		||||
To update the pinned version:
 | 
			
		||||
1. Edit `Dockerfile` to match the desired stable rust version to base on if needed.
 | 
			
		||||
1. Run `ci/docker-rust-nightly/build.sh` to rebuild the nightly image locally,
 | 
			
		||||
   or potentially `ci/docker-rust-nightly/build.sh YYYY-MM-DD` if there's a
 | 
			
		||||
   specific YYYY-MM-DD that is desired (default is today's build).
 | 
			
		||||
   Check https://rust-lang.github.io/rustup-components-history/ for build
 | 
			
		||||
   status
 | 
			
		||||
1. Update `ci/rust-version.sh` to reflect the new nightly `YYYY-MM-DD`
 | 
			
		||||
1. Run `SOLANA_ALLOCATE_TTY=1 SOLANA_DOCKER_RUN_NOSETUID=1 ci/docker-run.sh --nopull solanalabs/rust-nightly:YYYY-MM-DD ci/test-checks.sh`
 | 
			
		||||
   and `SOLANA_ALLOCATE_TTY=1 SOLANA_DOCKER_RUN_NOSETUID=1 ci/docker-run.sh --nopull solanalabs/rust-nightly:YYYY-MM-DD ci/test-coverage.sh [args]...`
 | 
			
		||||
1. Update `ci/rust-version.sh` to reflect the new nightly `YYY-MM-DD`
 | 
			
		||||
1. Run `SOLANA_DOCKER_RUN_NOSETUID=1 ci/docker-run.sh --nopull solanalabs/rust-nightly:YYYY-MM-DD ci/test-coverage.sh`
 | 
			
		||||
   to confirm the new nightly image builds.  Fix any issues as needed
 | 
			
		||||
1. Run `docker login` to enable pushing images to Docker Hub, if you're authorized.
 | 
			
		||||
1. Run `CI=true ci/docker-rust-nightly/build.sh YYYY-MM-DD` to push the new nightly image to dockerhub.com.
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
# Note: when the rust version is changed also modify
 | 
			
		||||
# ci/rust-version.sh to pick up the new image tag
 | 
			
		||||
FROM rust:1.46.0
 | 
			
		||||
FROM rust:1.43.0
 | 
			
		||||
 | 
			
		||||
# Add Google Protocol Buffers for Libra's metrics library.
 | 
			
		||||
ENV PROTOC_VERSION 3.8.0
 | 
			
		||||
 
 | 
			
		||||
@@ -1,11 +1,7 @@
 | 
			
		||||
Docker image containing rust and some preinstalled packages used in CI.
 | 
			
		||||
 | 
			
		||||
NOTE: Recreate rust-nightly docker image after this when updating the stable rust
 | 
			
		||||
version! Both of docker images must be updated in tandem.
 | 
			
		||||
 | 
			
		||||
This image manually maintained:
 | 
			
		||||
1. Edit `Dockerfile` to match the desired rust version
 | 
			
		||||
1. Run `docker login` to enable pushing images to Docker Hub, if you're authorized.
 | 
			
		||||
1. Run `./build.sh` to publish the new image, if you are a member of the [Solana
 | 
			
		||||
2. Run `./build.sh` to publish the new image, if you are a member of the [Solana
 | 
			
		||||
   Labs](https://hub.docker.com/u/solanalabs/) Docker Hub organization.
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -8,11 +8,10 @@ if [[ -n $CI ]]; then
 | 
			
		||||
  export CI=1
 | 
			
		||||
  if [[ -n $TRAVIS ]]; then
 | 
			
		||||
    export CI_BRANCH=$TRAVIS_BRANCH
 | 
			
		||||
    export CI_BASE_BRANCH=$TRAVIS_BRANCH
 | 
			
		||||
    export CI_BUILD_ID=$TRAVIS_BUILD_ID
 | 
			
		||||
    export CI_COMMIT=$TRAVIS_COMMIT
 | 
			
		||||
    export CI_JOB_ID=$TRAVIS_JOB_ID
 | 
			
		||||
    if [[ $TRAVIS_PULL_REQUEST != false ]]; then
 | 
			
		||||
    if $TRAVIS_PULL_REQUEST; then
 | 
			
		||||
      export CI_PULL_REQUEST=true
 | 
			
		||||
    else
 | 
			
		||||
      export CI_PULL_REQUEST=
 | 
			
		||||
@@ -29,10 +28,8 @@ if [[ -n $CI ]]; then
 | 
			
		||||
    # to how solana-ci-gate is used to trigger PR builds rather than using the
 | 
			
		||||
    # standard Buildkite PR trigger.
 | 
			
		||||
    if [[ $CI_BRANCH =~ pull/* ]]; then
 | 
			
		||||
      export CI_BASE_BRANCH=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
 | 
			
		||||
      export CI_PULL_REQUEST=true
 | 
			
		||||
    else
 | 
			
		||||
      export CI_BASE_BRANCH=$BUILDKITE_BRANCH
 | 
			
		||||
      export CI_PULL_REQUEST=
 | 
			
		||||
    fi
 | 
			
		||||
    export CI_OS_NAME=linux
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										21
									
								
								ci/maybe-trigger-tests.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										21
									
								
								ci/maybe-trigger-tests.sh
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,21 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
set -e
 | 
			
		||||
cd "$(dirname "$0")/.."
 | 
			
		||||
 | 
			
		||||
annotate() {
 | 
			
		||||
  ${BUILDKITE:-false} && {
 | 
			
		||||
    buildkite-agent annotate "$@"
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Skip if only the docs have been modified
 | 
			
		||||
ci/affects-files.sh \
 | 
			
		||||
  \!^docs/ \
 | 
			
		||||
|| {
 | 
			
		||||
  annotate --style info \
 | 
			
		||||
    "Skipping all further tests as only docs/ files were modified"
 | 
			
		||||
  exit 0
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
annotate --style info "Triggering tests"
 | 
			
		||||
buildkite-agent pipeline upload ci/buildkite-tests.yml
 | 
			
		||||
@@ -26,9 +26,6 @@ declare print_free_tree=(
 | 
			
		||||
  ':runtime/src/**.rs'
 | 
			
		||||
  ':sdk/bpf/rust/rust-utils/**.rs'
 | 
			
		||||
  ':sdk/**.rs'
 | 
			
		||||
  ':^sdk/cargo-build-bpf/**.rs'
 | 
			
		||||
  ':^sdk/program/src/program_option.rs'
 | 
			
		||||
  ':^sdk/program/src/program_stubs.rs'
 | 
			
		||||
  ':programs/**.rs'
 | 
			
		||||
  ':^**bin**.rs'
 | 
			
		||||
  ':^**bench**.rs'
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
#!/usr/bin/env python2.7
 | 
			
		||||
#
 | 
			
		||||
# This script figures the order in which workspace crates must be published to
 | 
			
		||||
# crates.io.  Along the way it also ensures there are no circular dependencies
 | 
			
		||||
@@ -45,27 +45,21 @@ def get_packages():
 | 
			
		||||
    sorted_dependency_graph = []
 | 
			
		||||
    max_iterations = pow(len(dependency_graph),2)
 | 
			
		||||
    while dependency_graph:
 | 
			
		||||
        deleted_packages = []
 | 
			
		||||
        if max_iterations == 0:
 | 
			
		||||
            # One day be more helpful and find the actual cycle for the user...
 | 
			
		||||
            sys.exit('Error: Circular dependency suspected between these packages: \n {}\n'.format('\n '.join(dependency_graph.keys())))
 | 
			
		||||
 | 
			
		||||
        max_iterations -= 1
 | 
			
		||||
 | 
			
		||||
        for package, dependencies in dependency_graph.items():
 | 
			
		||||
            if package in deleted_packages:
 | 
			
		||||
                continue
 | 
			
		||||
            for dependency in dependencies:
 | 
			
		||||
                if dependency in dependency_graph:
 | 
			
		||||
                    break
 | 
			
		||||
            else:
 | 
			
		||||
                deleted_packages.append(package)
 | 
			
		||||
                del dependency_graph[package]
 | 
			
		||||
                sorted_dependency_graph.append((package, manifest_path[package]))
 | 
			
		||||
 | 
			
		||||
        dependency_graph = {p: d for p, d in dependency_graph.items() if not p in deleted_packages }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    return sorted_dependency_graph
 | 
			
		||||
 | 
			
		||||
for package, manifest in get_packages():
 | 
			
		||||
    print(os.path.relpath(manifest))
 | 
			
		||||
    print os.path.relpath(manifest)
 | 
			
		||||
 
 | 
			
		||||
@@ -4,8 +4,6 @@ cd "$(dirname "$0")/.."
 | 
			
		||||
source ci/semver_bash/semver.sh
 | 
			
		||||
source ci/rust-version.sh stable
 | 
			
		||||
 | 
			
		||||
cargo="$(readlink -f ./cargo)"
 | 
			
		||||
 | 
			
		||||
# shellcheck disable=SC2086
 | 
			
		||||
is_crate_version_uploaded() {
 | 
			
		||||
  name=$1
 | 
			
		||||
@@ -40,7 +38,7 @@ for Cargo_toml in $Cargo_tomls; do
 | 
			
		||||
  crate_name=$(grep -m 1 '^name = ' "$Cargo_toml" | cut -f 3 -d ' ' | tr -d \")
 | 
			
		||||
 | 
			
		||||
  if grep -q "^publish = false" "$Cargo_toml"; then
 | 
			
		||||
    echo "$crate_name is marked as unpublishable"
 | 
			
		||||
    echo "$crate_name is is marked as unpublishable"
 | 
			
		||||
    continue
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
@@ -68,11 +66,11 @@ for Cargo_toml in $Cargo_tomls; do
 | 
			
		||||
      (
 | 
			
		||||
        set -x
 | 
			
		||||
        rm -rf crate-test
 | 
			
		||||
        "$cargo" stable init crate-test
 | 
			
		||||
        cargo +"$rust_stable" init crate-test
 | 
			
		||||
        cd crate-test/
 | 
			
		||||
        echo "${crate_name} = \"${expectedCrateVersion}\"" >> Cargo.toml
 | 
			
		||||
        echo "[workspace]" >> Cargo.toml
 | 
			
		||||
        "$cargo" stable check
 | 
			
		||||
        cargo +"$rust_stable" check
 | 
			
		||||
      ) && really_uploaded=1
 | 
			
		||||
      if ((really_uploaded)); then
 | 
			
		||||
        break;
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										32
									
								
								ci/publish-docs.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										32
									
								
								ci/publish-docs.sh
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,32 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
set -e
 | 
			
		||||
 | 
			
		||||
cd "$(dirname "$0")/.."
 | 
			
		||||
 | 
			
		||||
echo --- build docs
 | 
			
		||||
(
 | 
			
		||||
  set -x
 | 
			
		||||
  . ci/rust-version.sh stable
 | 
			
		||||
  ci/docker-run.sh "$rust_stable_docker_image" docs/build.sh
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
echo --- update gitbook-cage
 | 
			
		||||
if [[ -n $CI_BRANCH ]]; then
 | 
			
		||||
  (
 | 
			
		||||
    # make a local commit for the svgs and generated/updated markdown
 | 
			
		||||
    set -x
 | 
			
		||||
    git add -f docs/src
 | 
			
		||||
    if ! git diff-index --quiet HEAD; then
 | 
			
		||||
      git config user.email maintainers@solana.com
 | 
			
		||||
      git config user.name "$(basename "$0")"
 | 
			
		||||
      git commit -m "gitbook-cage update $(date -Is)"
 | 
			
		||||
      git push -f git@github.com:solana-labs/solana-gitbook-cage.git HEAD:refs/heads/"$CI_BRANCH"
 | 
			
		||||
      # pop off the local commit
 | 
			
		||||
      git reset --hard HEAD~
 | 
			
		||||
    fi
 | 
			
		||||
  )
 | 
			
		||||
else
 | 
			
		||||
  echo CI_BRANCH not set
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
exit 0
 | 
			
		||||
@@ -45,16 +45,7 @@ linux)
 | 
			
		||||
  TARGET=x86_64-unknown-linux-gnu
 | 
			
		||||
  ;;
 | 
			
		||||
windows)
 | 
			
		||||
  TARGET=x86_64-pc-windows-msvc
 | 
			
		||||
  # Enable symlinks used by some build.rs files
 | 
			
		||||
  # source: https://stackoverflow.com/a/52097145/10242004
 | 
			
		||||
  (
 | 
			
		||||
    set -x
 | 
			
		||||
    git --version
 | 
			
		||||
    git config core.symlinks true
 | 
			
		||||
    find . -type l -delete
 | 
			
		||||
    git reset --hard
 | 
			
		||||
  )
 | 
			
		||||
  TARGET=x86_64-pc-windows-gnu
 | 
			
		||||
  ;;
 | 
			
		||||
*)
 | 
			
		||||
  echo CI_OS_NAME unset
 | 
			
		||||
@@ -62,14 +53,11 @@ windows)
 | 
			
		||||
  ;;
 | 
			
		||||
esac
 | 
			
		||||
 | 
			
		||||
RELEASE_BASENAME="${RELEASE_BASENAME:=solana-release}"
 | 
			
		||||
TARBALL_BASENAME="${TARBALL_BASENAME:="$RELEASE_BASENAME"}"
 | 
			
		||||
 | 
			
		||||
echo --- Creating release tarball
 | 
			
		||||
(
 | 
			
		||||
  set -x
 | 
			
		||||
  rm -rf "${RELEASE_BASENAME:?}"/
 | 
			
		||||
  mkdir "${RELEASE_BASENAME}"/
 | 
			
		||||
  rm -rf solana-release/
 | 
			
		||||
  mkdir solana-release/
 | 
			
		||||
 | 
			
		||||
  COMMIT="$(git rev-parse HEAD)"
 | 
			
		||||
 | 
			
		||||
@@ -77,34 +65,36 @@ echo --- Creating release tarball
 | 
			
		||||
    echo "channel: $CHANNEL_OR_TAG"
 | 
			
		||||
    echo "commit: $COMMIT"
 | 
			
		||||
    echo "target: $TARGET"
 | 
			
		||||
  ) > "${RELEASE_BASENAME}"/version.yml
 | 
			
		||||
  ) > solana-release/version.yml
 | 
			
		||||
 | 
			
		||||
  # Make CHANNEL available to include in the software version information
 | 
			
		||||
  export CHANNEL
 | 
			
		||||
 | 
			
		||||
  source ci/rust-version.sh stable
 | 
			
		||||
  scripts/cargo-install-all.sh +"$rust_stable" "${RELEASE_BASENAME}"
 | 
			
		||||
  scripts/cargo-install-all.sh +"$rust_stable" solana-release
 | 
			
		||||
 | 
			
		||||
  tar cvf "${TARBALL_BASENAME}"-$TARGET.tar "${RELEASE_BASENAME}"
 | 
			
		||||
  bzip2 "${TARBALL_BASENAME}"-$TARGET.tar
 | 
			
		||||
  cp "${RELEASE_BASENAME}"/bin/solana-install-init solana-install-init-$TARGET
 | 
			
		||||
  cp "${RELEASE_BASENAME}"/version.yml "${TARBALL_BASENAME}"-$TARGET.yml
 | 
			
		||||
  tar cvf solana-release-$TARGET.tar solana-release
 | 
			
		||||
  bzip2 solana-release-$TARGET.tar
 | 
			
		||||
  cp solana-release/bin/solana-install-init solana-install-init-$TARGET
 | 
			
		||||
  cp solana-release/version.yml solana-release-$TARGET.yml
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
# Maybe tarballs are platform agnostic, only publish them from the Linux build
 | 
			
		||||
# Metrics tarball is platform agnostic, only publish it from Linux
 | 
			
		||||
MAYBE_TARBALLS=
 | 
			
		||||
if [[ "$CI_OS_NAME" = linux ]]; then
 | 
			
		||||
  metrics/create-metrics-tarball.sh
 | 
			
		||||
  (
 | 
			
		||||
    set -x
 | 
			
		||||
    sdk/bpf/scripts/package.sh
 | 
			
		||||
    [[ -f bpf-sdk.tar.bz2 ]]
 | 
			
		||||
 | 
			
		||||
  )
 | 
			
		||||
  MAYBE_TARBALLS="bpf-sdk.tar.bz2"
 | 
			
		||||
  MAYBE_TARBALLS="bpf-sdk.tar.bz2 solana-metrics.tar.bz2"
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
source ci/upload-ci-artifact.sh
 | 
			
		||||
 | 
			
		||||
for file in "${TARBALL_BASENAME}"-$TARGET.tar.bz2 "${TARBALL_BASENAME}"-$TARGET.yml solana-install-init-"$TARGET"* $MAYBE_TARBALLS; do
 | 
			
		||||
for file in solana-release-$TARGET.tar.bz2 solana-release-$TARGET.yml solana-install-init-"$TARGET"* $MAYBE_TARBALLS; do
 | 
			
		||||
  if [[ -n $DO_NOT_PUBLISH_TAR ]]; then
 | 
			
		||||
    upload-ci-artifact "$file"
 | 
			
		||||
    echo "Skipped $file due to DO_NOT_PUBLISH_TAR"
 | 
			
		||||
@@ -124,7 +114,7 @@ for file in "${TARBALL_BASENAME}"-$TARGET.tar.bz2 "${TARBALL_BASENAME}"-$TARGET.
 | 
			
		||||
        /usr/bin/s3cmd --acl-public put /solana/"$file" s3://release.solana.com/"$CHANNEL_OR_TAG"/"$file"
 | 
			
		||||
 | 
			
		||||
      echo Published to:
 | 
			
		||||
      $DRYRUN ci/format-url.sh https://release.solana.com/"$CHANNEL_OR_TAG"/"$file"
 | 
			
		||||
      $DRYRUN ci/format-url.sh http://release.solana.com/"$CHANNEL_OR_TAG"/"$file"
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if [[ -n $TAG ]]; then
 | 
			
		||||
@@ -147,30 +137,4 @@ for file in "${TARBALL_BASENAME}"-$TARGET.tar.bz2 "${TARBALL_BASENAME}"-$TARGET.
 | 
			
		||||
  fi
 | 
			
		||||
done
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Create install wrapper for release.solana.com
 | 
			
		||||
if [[ -n $BUILDKITE ]]; then
 | 
			
		||||
  cat > release.solana.com-install <<EOF
 | 
			
		||||
SOLANA_RELEASE=$CHANNEL_OR_TAG
 | 
			
		||||
SOLANA_INSTALL_INIT_ARGS=$CHANNEL_OR_TAG
 | 
			
		||||
SOLANA_DOWNLOAD_ROOT=http://release.solana.com
 | 
			
		||||
EOF
 | 
			
		||||
  cat install/solana-install-init.sh >> release.solana.com-install
 | 
			
		||||
 | 
			
		||||
  echo --- AWS S3 Store: "install"
 | 
			
		||||
  (
 | 
			
		||||
    set -x
 | 
			
		||||
    $DRYRUN docker run \
 | 
			
		||||
      --rm \
 | 
			
		||||
      --env AWS_ACCESS_KEY_ID \
 | 
			
		||||
      --env AWS_SECRET_ACCESS_KEY \
 | 
			
		||||
      --volume "$PWD:/solana" \
 | 
			
		||||
      eremite/aws-cli:2018.12.18 \
 | 
			
		||||
      /usr/bin/s3cmd --acl-public put /solana/release.solana.com-install s3://release.solana.com/"$CHANNEL_OR_TAG"/install
 | 
			
		||||
 | 
			
		||||
    echo Published to:
 | 
			
		||||
    $DRYRUN ci/format-url.sh https://release.solana.com/"$CHANNEL_OR_TAG"/install
 | 
			
		||||
  )
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
echo --- ok
 | 
			
		||||
 
 | 
			
		||||
@@ -7,7 +7,7 @@ source multinode-demo/common.sh
 | 
			
		||||
 | 
			
		||||
rm -rf config/run/init-completed config/ledger config/snapshot-ledger
 | 
			
		||||
 | 
			
		||||
timeout 120 ./run.sh &
 | 
			
		||||
timeout 15 ./run.sh &
 | 
			
		||||
pid=$!
 | 
			
		||||
 | 
			
		||||
attempts=20
 | 
			
		||||
@@ -19,16 +19,13 @@ while [[ ! -f config/run/init-completed ]]; do
 | 
			
		||||
  fi
 | 
			
		||||
done
 | 
			
		||||
 | 
			
		||||
snapshot_slot=1
 | 
			
		||||
 | 
			
		||||
# wait a bit longer than snapshot_slot
 | 
			
		||||
while [[ $($solana_cli --url http://localhost:8899 slot --commitment recent) -le $((snapshot_slot + 1)) ]]; do
 | 
			
		||||
while [[ $($solana_cli slot --commitment recent) -eq 0 ]]; do
 | 
			
		||||
  sleep 1
 | 
			
		||||
done
 | 
			
		||||
curl -X POST -H 'Content-Type: application/json' -d '{"jsonrpc":"2.0","id":1, "method":"validatorExit"}' http://localhost:8899
 | 
			
		||||
 | 
			
		||||
wait $pid
 | 
			
		||||
 | 
			
		||||
$solana_ledger_tool create-snapshot --ledger config/ledger "$snapshot_slot" config/snapshot-ledger
 | 
			
		||||
$solana_ledger_tool create-snapshot --ledger config/ledger 1 config/snapshot-ledger
 | 
			
		||||
cp config/ledger/genesis.tar.bz2 config/snapshot-ledger
 | 
			
		||||
$solana_ledger_tool verify --ledger config/snapshot-ledger
 | 
			
		||||
 
 | 
			
		||||
@@ -18,13 +18,13 @@
 | 
			
		||||
if [[ -n $RUST_STABLE_VERSION ]]; then
 | 
			
		||||
  stable_version="$RUST_STABLE_VERSION"
 | 
			
		||||
else
 | 
			
		||||
  stable_version=1.46.0
 | 
			
		||||
  stable_version=1.43.0
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
if [[ -n $RUST_NIGHTLY_VERSION ]]; then
 | 
			
		||||
  nightly_version="$RUST_NIGHTLY_VERSION"
 | 
			
		||||
else
 | 
			
		||||
  nightly_version=2020-08-17
 | 
			
		||||
  nightly_version=2020-04-23
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -38,8 +38,7 @@ export rust_nightly_docker_image=solanalabs/rust-nightly:"$nightly_version"
 | 
			
		||||
 | 
			
		||||
  rustup_install() {
 | 
			
		||||
    declare toolchain=$1
 | 
			
		||||
    if ! cargo +"$toolchain" -V > /dev/null; then
 | 
			
		||||
      echo "$0: Missing toolchain? Installing...: $toolchain" >&2
 | 
			
		||||
    if ! cargo +"$toolchain" -V; then
 | 
			
		||||
      rustup install "$toolchain"
 | 
			
		||||
      cargo +"$toolchain" -V
 | 
			
		||||
    fi
 | 
			
		||||
@@ -59,7 +58,7 @@ export rust_nightly_docker_image=solanalabs/rust-nightly:"$nightly_version"
 | 
			
		||||
     rustup_install "$rust_nightly"
 | 
			
		||||
    ;;
 | 
			
		||||
  *)
 | 
			
		||||
    echo "$0: Note: ignoring unknown argument: $1" >&2
 | 
			
		||||
    echo "Note: ignoring unknown argument: $1"
 | 
			
		||||
    ;;
 | 
			
		||||
  esac
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
@@ -76,7 +76,7 @@ RestartForceExitStatus=SIGPIPE
 | 
			
		||||
TimeoutStartSec=10
 | 
			
		||||
TimeoutStopSec=0
 | 
			
		||||
KillMode=process
 | 
			
		||||
LimitNOFILE=500000
 | 
			
		||||
LimitNOFILE=65536
 | 
			
		||||
 | 
			
		||||
[Install]
 | 
			
		||||
WantedBy=multi-user.target
 | 
			
		||||
 
 | 
			
		||||
@@ -8,5 +8,5 @@ source "$HERE"/utils.sh
 | 
			
		||||
ensure_env || exit 1
 | 
			
		||||
 | 
			
		||||
# Allow more files to be opened by a user
 | 
			
		||||
echo "* - nofile 500000" > /etc/security/limits.d/90-solana-nofiles.conf
 | 
			
		||||
sed -i 's/^\(# End of file\)/* soft nofile 65535\n\n\1/' /etc/security/limits.conf
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -2,12 +2,30 @@
 | 
			
		||||
set -e
 | 
			
		||||
cd "$(dirname "$0")/.."
 | 
			
		||||
 | 
			
		||||
annotate() {
 | 
			
		||||
  ${BUILDKITE:-false} && {
 | 
			
		||||
    buildkite-agent annotate "$@"
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
ci/affects-files.sh \
 | 
			
		||||
  .rs$ \
 | 
			
		||||
  Cargo.lock$ \
 | 
			
		||||
  Cargo.toml$ \
 | 
			
		||||
  ^ci/rust-version.sh \
 | 
			
		||||
  ^ci/test-bench.sh \
 | 
			
		||||
|| {
 | 
			
		||||
  annotate --style info --context test-bench \
 | 
			
		||||
    "Bench skipped as no .rs files were modified"
 | 
			
		||||
  exit 0
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
source ci/_
 | 
			
		||||
source ci/upload-ci-artifact.sh
 | 
			
		||||
 | 
			
		||||
eval "$(ci/channel-info.sh)"
 | 
			
		||||
 | 
			
		||||
cargo="$(readlink -f "./cargo")"
 | 
			
		||||
source ci/rust-version.sh all
 | 
			
		||||
 | 
			
		||||
set -o pipefail
 | 
			
		||||
export RUST_BACKTRACE=1
 | 
			
		||||
@@ -28,35 +46,35 @@ test -d target/debug/bpf && find target/debug/bpf -name '*.d' -delete
 | 
			
		||||
test -d target/release/bpf && find target/release/bpf -name '*.d' -delete
 | 
			
		||||
 | 
			
		||||
# Ensure all dependencies are built
 | 
			
		||||
_ "$cargo" nightly build --release
 | 
			
		||||
_ cargo +$rust_nightly build --release
 | 
			
		||||
 | 
			
		||||
# Remove "BENCH_FILE", if it exists so that the following commands can append
 | 
			
		||||
rm -f "$BENCH_FILE"
 | 
			
		||||
 | 
			
		||||
# Run sdk benches
 | 
			
		||||
_ "$cargo" nightly bench --manifest-path sdk/Cargo.toml ${V:+--verbose} \
 | 
			
		||||
_ cargo +$rust_nightly bench --manifest-path sdk/Cargo.toml ${V:+--verbose} \
 | 
			
		||||
  -- -Z unstable-options --format=json | tee -a "$BENCH_FILE"
 | 
			
		||||
 | 
			
		||||
# Run runtime benches
 | 
			
		||||
_ "$cargo" nightly bench --manifest-path runtime/Cargo.toml ${V:+--verbose} \
 | 
			
		||||
_ cargo +$rust_nightly bench --manifest-path runtime/Cargo.toml ${V:+--verbose} \
 | 
			
		||||
  -- -Z unstable-options --format=json | tee -a "$BENCH_FILE"
 | 
			
		||||
 | 
			
		||||
# Run core benches
 | 
			
		||||
_ "$cargo" nightly bench --manifest-path core/Cargo.toml ${V:+--verbose} \
 | 
			
		||||
_ cargo +$rust_nightly bench --manifest-path core/Cargo.toml ${V:+--verbose} \
 | 
			
		||||
  -- -Z unstable-options --format=json | tee -a "$BENCH_FILE"
 | 
			
		||||
 | 
			
		||||
# Run bpf benches
 | 
			
		||||
_ "$cargo" nightly bench --manifest-path programs/bpf/Cargo.toml ${V:+--verbose} --features=bpf_c \
 | 
			
		||||
_ cargo +$rust_nightly bench --manifest-path programs/bpf/Cargo.toml ${V:+--verbose} --features=bpf_c \
 | 
			
		||||
  -- -Z unstable-options --format=json --nocapture | tee -a "$BENCH_FILE"
 | 
			
		||||
 | 
			
		||||
# Run banking/accounts bench. Doesn't require nightly, but use since it is already built.
 | 
			
		||||
_ "$cargo" nightly run --release --manifest-path banking-bench/Cargo.toml ${V:+--verbose} | tee -a "$BENCH_FILE"
 | 
			
		||||
_ "$cargo" nightly run --release --manifest-path accounts-bench/Cargo.toml ${V:+--verbose} -- --num_accounts 10000 --num_slots 4 | tee -a "$BENCH_FILE"
 | 
			
		||||
_ cargo +$rust_nightly run --release --manifest-path banking-bench/Cargo.toml ${V:+--verbose} | tee -a "$BENCH_FILE"
 | 
			
		||||
_ cargo +$rust_nightly run --release --manifest-path accounts-bench/Cargo.toml ${V:+--verbose} -- --num_accounts 10000 --num_slots 4 | tee -a "$BENCH_FILE"
 | 
			
		||||
 | 
			
		||||
# `solana-upload-perf` disabled as it can take over 30 minutes to complete for some
 | 
			
		||||
# reason
 | 
			
		||||
exit 0
 | 
			
		||||
_ "$cargo" nightly run --release --package solana-upload-perf \
 | 
			
		||||
_ cargo +$rust_nightly run --release --package solana-upload-perf \
 | 
			
		||||
  -- "$BENCH_FILE" "$TARGET_BRANCH" "$UPLOAD_METRICS" | tee "$BENCH_ARTIFACT"
 | 
			
		||||
 | 
			
		||||
upload-ci-artifact "$BENCH_FILE"
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
 | 
			
		||||
set -e
 | 
			
		||||
 | 
			
		||||
cd "$(dirname "$0")/.."
 | 
			
		||||
@@ -7,94 +6,45 @@ cd "$(dirname "$0")/.."
 | 
			
		||||
source ci/_
 | 
			
		||||
source ci/rust-version.sh stable
 | 
			
		||||
source ci/rust-version.sh nightly
 | 
			
		||||
eval "$(ci/channel-info.sh)"
 | 
			
		||||
cargo="$(readlink -f "./cargo")"
 | 
			
		||||
 | 
			
		||||
scripts/increment-cargo-version.sh check
 | 
			
		||||
 | 
			
		||||
echo --- build environment
 | 
			
		||||
(
 | 
			
		||||
  set -x
 | 
			
		||||
 | 
			
		||||
  rustup run "$rust_stable" rustc --version --verbose
 | 
			
		||||
  rustup run "$rust_nightly" rustc --version --verbose
 | 
			
		||||
 | 
			
		||||
  "$cargo" stable --version --verbose
 | 
			
		||||
  "$cargo" nightly --version --verbose
 | 
			
		||||
 | 
			
		||||
  "$cargo" stable clippy --version --verbose
 | 
			
		||||
  "$cargo" nightly clippy --version --verbose
 | 
			
		||||
 | 
			
		||||
  # audit is done only with stable
 | 
			
		||||
  "$cargo" stable audit --version
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
export RUST_BACKTRACE=1
 | 
			
		||||
export RUSTFLAGS="-D warnings -A incomplete_features"
 | 
			
		||||
export RUSTFLAGS="-D warnings"
 | 
			
		||||
 | 
			
		||||
# Only force up-to-date lock files on edge
 | 
			
		||||
if [[ $CI_BASE_BRANCH = "$EDGE_CHANNEL" ]]; then
 | 
			
		||||
  # Exclude --benches as it's not available in rust stable yet
 | 
			
		||||
  if _ scripts/cargo-for-all-lock-files.sh +"$rust_stable" check --locked --tests --bins --examples; then
 | 
			
		||||
# Look for failed mergify.io backports
 | 
			
		||||
_ git show HEAD --check --oneline
 | 
			
		||||
 | 
			
		||||
if _ scripts/cargo-for-all-lock-files.sh +"$rust_nightly" check --locked --all-targets; then
 | 
			
		||||
  true
 | 
			
		||||
else
 | 
			
		||||
  check_status=$?
 | 
			
		||||
    echo "$0: Some Cargo.lock might be outdated; sync them (or just be a compilation error?)" >&2
 | 
			
		||||
    echo "$0: protip: $ ./scripts/cargo-for-all-lock-files.sh [--ignore-exit-code] ... \\" >&2
 | 
			
		||||
    echo "$0:   [tree (for outdated Cargo.lock sync)|check (for compilation error)|update -p foo --precise x.y.z (for your Cargo.toml update)] ..." >&2
 | 
			
		||||
  echo "Some Cargo.lock is outdated; please update them as well"
 | 
			
		||||
  echo "protip: you can use ./scripts/cargo-for-all-lock-files.sh update ..."
 | 
			
		||||
  exit "$check_status"
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
  # Ensure nightly and --benches
 | 
			
		||||
  _ scripts/cargo-for-all-lock-files.sh +"$rust_nightly" check --locked --all-targets
 | 
			
		||||
else
 | 
			
		||||
  echo "Note: cargo-for-all-lock-files.sh skipped because $CI_BASE_BRANCH != $EDGE_CHANNEL"
 | 
			
		||||
fi
 | 
			
		||||
_ cargo +"$rust_stable" fmt --all -- --check
 | 
			
		||||
 | 
			
		||||
_ cargo +"$rust_stable" clippy --version
 | 
			
		||||
_ cargo +"$rust_stable" clippy --workspace -- --deny=warnings
 | 
			
		||||
 | 
			
		||||
_ cargo +"$rust_stable" audit --version
 | 
			
		||||
_ scripts/cargo-for-all-lock-files.sh +"$rust_stable" audit --ignore RUSTSEC-2020-0002 --ignore RUSTSEC-2020-0008
 | 
			
		||||
_ ci/nits.sh
 | 
			
		||||
_ ci/order-crates-for-publishing.py
 | 
			
		||||
_ "$cargo" stable fmt --all -- --check
 | 
			
		||||
 | 
			
		||||
# -Z... is needed because of clippy bug: https://github.com/rust-lang/rust-clippy/issues/4612
 | 
			
		||||
# run nightly clippy for `sdk/` as there's a moderate amount of nightly-only code there
 | 
			
		||||
_ "$cargo" nightly clippy \
 | 
			
		||||
  -Zunstable-options --workspace --all-targets \
 | 
			
		||||
  -- --deny=warnings --allow=clippy::stable_sort_primitive
 | 
			
		||||
 | 
			
		||||
cargo_audit_ignores=(
 | 
			
		||||
  # failure is officially deprecated/unmaintained
 | 
			
		||||
  #
 | 
			
		||||
  # Blocked on multiple upstream crates removing their `failure` dependency.
 | 
			
		||||
  --ignore RUSTSEC-2020-0036
 | 
			
		||||
 | 
			
		||||
  # `net2` crate has been deprecated; use `socket2` instead
 | 
			
		||||
  #
 | 
			
		||||
  # Blocked on https://github.com/paritytech/jsonrpc/issues/575
 | 
			
		||||
  --ignore RUSTSEC-2020-0016
 | 
			
		||||
 | 
			
		||||
  # stdweb is unmaintained
 | 
			
		||||
  #
 | 
			
		||||
  # Blocked on multiple upstream crates removing their `stdweb` dependency.
 | 
			
		||||
  --ignore RUSTSEC-2020-0056
 | 
			
		||||
 | 
			
		||||
  # Potential segfault in the time crate
 | 
			
		||||
  #
 | 
			
		||||
  # Blocked on multiple crates updating `time` to >= 0.2.23
 | 
			
		||||
  --ignore RUSTSEC-2020-0071
 | 
			
		||||
)
 | 
			
		||||
_ scripts/cargo-for-all-lock-files.sh +"$rust_stable" audit "${cargo_audit_ignores[@]}"
 | 
			
		||||
_ docs/build.sh
 | 
			
		||||
_ ci/check-ssh-keys.sh
 | 
			
		||||
 | 
			
		||||
{
 | 
			
		||||
  cd programs/bpf
 | 
			
		||||
  _ "$cargo" stable audit
 | 
			
		||||
  _ cargo +"$rust_stable" audit
 | 
			
		||||
  for project in rust/*/ ; do
 | 
			
		||||
    echo "+++ do_bpf_checks $project"
 | 
			
		||||
    (
 | 
			
		||||
      cd "$project"
 | 
			
		||||
      _ "$cargo" stable fmt -- --check
 | 
			
		||||
      _ "$cargo" nightly test
 | 
			
		||||
      _ "$cargo" nightly clippy -- --deny=warnings \
 | 
			
		||||
        --allow=clippy::missing_safety_doc \
 | 
			
		||||
        --allow=clippy::stable_sort_primitive
 | 
			
		||||
      _ cargo +"$rust_stable" fmt -- --check
 | 
			
		||||
      _ cargo +"$rust_nightly" test
 | 
			
		||||
      _ cargo +"$rust_nightly" clippy --version
 | 
			
		||||
      _ cargo +"$rust_nightly" clippy -- --deny=warnings --allow=clippy::missing_safety_doc
 | 
			
		||||
    )
 | 
			
		||||
  done
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -8,14 +8,23 @@ annotate() {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
ci/affects-files.sh \
 | 
			
		||||
  .rs$ \
 | 
			
		||||
  Cargo.lock$ \
 | 
			
		||||
  Cargo.toml$ \
 | 
			
		||||
  ^ci/rust-version.sh \
 | 
			
		||||
  ^ci/test-coverage.sh \
 | 
			
		||||
  ^scripts/coverage.sh \
 | 
			
		||||
|| {
 | 
			
		||||
  annotate --style info --context test-coverage \
 | 
			
		||||
    "Coverage skipped as no .rs files were modified"
 | 
			
		||||
  exit 0
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
source ci/upload-ci-artifact.sh
 | 
			
		||||
source scripts/ulimit-n.sh
 | 
			
		||||
 | 
			
		||||
scripts/coverage.sh "$@"
 | 
			
		||||
 | 
			
		||||
if [[ -z $CI ]]; then
 | 
			
		||||
  exit
 | 
			
		||||
fi
 | 
			
		||||
scripts/coverage.sh
 | 
			
		||||
 | 
			
		||||
report=coverage-"${CI_COMMIT:0:9}".tar.gz
 | 
			
		||||
mv target/cov/report.tar.gz "$report"
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										1
									
								
								ci/test-move.sh
									
									
									
									
									
										Symbolic link
									
								
							
							
						
						
									
										1
									
								
								ci/test-move.sh
									
									
									
									
									
										Symbolic link
									
								
							@@ -0,0 +1 @@
 | 
			
		||||
test-stable.sh
 | 
			
		||||
@@ -1,22 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
set -e
 | 
			
		||||
 | 
			
		||||
cd "$(dirname "$0")/.."
 | 
			
		||||
 | 
			
		||||
source ci/_
 | 
			
		||||
 | 
			
		||||
(
 | 
			
		||||
  echo --- git diff --check
 | 
			
		||||
  set -x
 | 
			
		||||
  # Look for failed mergify.io backports by searching leftover conflict markers
 | 
			
		||||
  # Also check for any trailing whitespaces!
 | 
			
		||||
  git fetch origin "$CI_BASE_BRANCH"
 | 
			
		||||
  git diff "$(git merge-base HEAD "origin/$CI_BASE_BRANCH")..HEAD" --check --oneline
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
echo
 | 
			
		||||
 | 
			
		||||
_ ci/nits.sh
 | 
			
		||||
_ ci/check-ssh-keys.sh
 | 
			
		||||
 | 
			
		||||
echo --- ok
 | 
			
		||||
@@ -2,8 +2,6 @@
 | 
			
		||||
set -e
 | 
			
		||||
cd "$(dirname "$0")/.."
 | 
			
		||||
 | 
			
		||||
cargo="$(readlink -f "./cargo")"
 | 
			
		||||
 | 
			
		||||
source ci/_
 | 
			
		||||
 | 
			
		||||
annotate() {
 | 
			
		||||
@@ -15,6 +13,15 @@ annotate() {
 | 
			
		||||
# Run the appropriate test based on entrypoint
 | 
			
		||||
testName=$(basename "$0" .sh)
 | 
			
		||||
 | 
			
		||||
# Skip if only the docs have been modified
 | 
			
		||||
ci/affects-files.sh \
 | 
			
		||||
  \!^docs/ \
 | 
			
		||||
|| {
 | 
			
		||||
  annotate --style info \
 | 
			
		||||
    "Skipped $testName as only docs/ files were modified"
 | 
			
		||||
  exit 0
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
source ci/rust-version.sh stable
 | 
			
		||||
 | 
			
		||||
export RUST_BACKTRACE=1
 | 
			
		||||
@@ -32,22 +39,38 @@ test -d target/release/bpf && find target/release/bpf -name '*.d' -delete
 | 
			
		||||
rm -rf target/xargo # Issue #3105
 | 
			
		||||
 | 
			
		||||
# Limit compiler jobs to reduce memory usage
 | 
			
		||||
# on machines with 2gb/thread of memory
 | 
			
		||||
# on machines with 1gb/thread of memory
 | 
			
		||||
NPROC=$(nproc)
 | 
			
		||||
NPROC=$((NPROC>14 ? 14 : NPROC))
 | 
			
		||||
NPROC=$((NPROC>16 ? 16 : NPROC))
 | 
			
		||||
 | 
			
		||||
echo "Executing $testName"
 | 
			
		||||
case $testName in
 | 
			
		||||
test-stable)
 | 
			
		||||
  _ "$cargo" stable test --jobs "$NPROC" --all --exclude solana-local-cluster ${V:+--verbose} -- --nocapture
 | 
			
		||||
  _ cargo +"$rust_stable" test --jobs "$NPROC" --all --exclude solana-local-cluster ${V:+--verbose} -- --nocapture
 | 
			
		||||
  _ cargo +"$rust_stable" test --manifest-path bench-tps/Cargo.toml --features=move ${V:+--verbose} test_bench_tps_local_cluster_move -- --nocapture
 | 
			
		||||
  ;;
 | 
			
		||||
test-stable-perf)
 | 
			
		||||
  # BPF solana-sdk legacy compile test
 | 
			
		||||
  ./cargo-build-bpf --manifest-path sdk/Cargo.toml
 | 
			
		||||
  ci/affects-files.sh \
 | 
			
		||||
    .rs$ \
 | 
			
		||||
    Cargo.lock$ \
 | 
			
		||||
    Cargo.toml$ \
 | 
			
		||||
    ^ci/rust-version.sh \
 | 
			
		||||
    ^ci/test-stable-perf.sh \
 | 
			
		||||
    ^ci/test-stable.sh \
 | 
			
		||||
    ^ci/test-local-cluster.sh \
 | 
			
		||||
    ^core/build.rs \
 | 
			
		||||
    ^fetch-perf-libs.sh \
 | 
			
		||||
    ^programs/ \
 | 
			
		||||
    ^sdk/ \
 | 
			
		||||
  || {
 | 
			
		||||
    annotate --style info \
 | 
			
		||||
      "Skipped $testName as no relevant files were modified"
 | 
			
		||||
    exit 0
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  # BPF program tests
 | 
			
		||||
  _ make -C programs/bpf/c tests
 | 
			
		||||
  _ "$cargo" stable test \
 | 
			
		||||
  _ cargo +"$rust_stable" test \
 | 
			
		||||
    --manifest-path programs/bpf/Cargo.toml \
 | 
			
		||||
    --no-default-features --features=bpf_c,bpf_rust -- --nocapture
 | 
			
		||||
 | 
			
		||||
@@ -67,13 +90,33 @@ test-stable-perf)
 | 
			
		||||
    export SOLANA_CUDA=1
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  _ "$cargo" stable build --bins ${V:+--verbose}
 | 
			
		||||
  _ "$cargo" stable test --package solana-perf --package solana-ledger --package solana-core --lib ${V:+--verbose} -- --nocapture
 | 
			
		||||
  _ "$cargo" stable run --manifest-path poh-bench/Cargo.toml ${V:+--verbose} -- --hashes-per-tick 10
 | 
			
		||||
  _ cargo +"$rust_stable" build --bins ${V:+--verbose}
 | 
			
		||||
  _ cargo +"$rust_stable" test --package solana-perf --package solana-ledger --package solana-core --lib ${V:+--verbose} -- --nocapture
 | 
			
		||||
  ;;
 | 
			
		||||
test-move)
 | 
			
		||||
  ci/affects-files.sh \
 | 
			
		||||
    Cargo.lock$ \
 | 
			
		||||
    Cargo.toml$ \
 | 
			
		||||
    ^ci/rust-version.sh \
 | 
			
		||||
    ^ci/test-stable.sh \
 | 
			
		||||
    ^ci/test-move.sh \
 | 
			
		||||
    ^programs/move_loader \
 | 
			
		||||
    ^programs/librapay \
 | 
			
		||||
    ^logger/ \
 | 
			
		||||
    ^runtime/ \
 | 
			
		||||
    ^sdk/ \
 | 
			
		||||
  || {
 | 
			
		||||
    annotate --style info \
 | 
			
		||||
      "Skipped $testName as no relevant files were modified"
 | 
			
		||||
    exit 0
 | 
			
		||||
  }
 | 
			
		||||
  _ cargo +"$rust_stable" test --manifest-path programs/move_loader/Cargo.toml ${V:+--verbose} -- --nocapture
 | 
			
		||||
  _ cargo +"$rust_stable" test --manifest-path programs/librapay/Cargo.toml ${V:+--verbose} -- --nocapture
 | 
			
		||||
  exit 0
 | 
			
		||||
  ;;
 | 
			
		||||
test-local-cluster)
 | 
			
		||||
  _ "$cargo" stable build --release --bins ${V:+--verbose}
 | 
			
		||||
  _ "$cargo" stable test --release --package solana-local-cluster ${V:+--verbose} -- --nocapture --test-threads=1
 | 
			
		||||
  _ cargo +"$rust_stable" build --release --bins ${V:+--verbose}
 | 
			
		||||
  _ cargo +"$rust_stable" test --release --package solana-local-cluster ${V:+--verbose} -- --nocapture --test-threads=1
 | 
			
		||||
  exit 0
 | 
			
		||||
  ;;
 | 
			
		||||
*)
 | 
			
		||||
 
 | 
			
		||||
@@ -23,14 +23,10 @@ if [[ -z $CI_TAG ]]; then
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
# Force CI_REPO_SLUG since sometimes
 | 
			
		||||
# BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG is not set correctly, causing the
 | 
			
		||||
# artifact upload to fail
 | 
			
		||||
CI_REPO_SLUG=solana-labs/solana
 | 
			
		||||
#if [[ -z $CI_REPO_SLUG ]]; then
 | 
			
		||||
#  echo Error: CI_REPO_SLUG not defined
 | 
			
		||||
#  exit 1
 | 
			
		||||
#fi
 | 
			
		||||
if [[ -z $CI_REPO_SLUG ]]; then
 | 
			
		||||
  echo Error: CI_REPO_SLUG not defined
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
releaseId=$( \
 | 
			
		||||
  curl -s "https://api.github.com/repos/$CI_REPO_SLUG/releases/tags/$CI_TAG" \
 | 
			
		||||
@@ -42,7 +38,6 @@ echo "Github release id for $CI_TAG is $releaseId"
 | 
			
		||||
for file in "$@"; do
 | 
			
		||||
  echo "--- Uploading $file to tag $CI_TAG of $CI_REPO_SLUG"
 | 
			
		||||
  curl \
 | 
			
		||||
    --verbose \
 | 
			
		||||
    --data-binary @"$file" \
 | 
			
		||||
    -H "Authorization: token $GITHUB_TOKEN" \
 | 
			
		||||
    -H "Content-Type: application/octet-stream" \
 | 
			
		||||
 
 | 
			
		||||
@@ -1,8 +1,8 @@
 | 
			
		||||
[package]
 | 
			
		||||
name = "solana-clap-utils"
 | 
			
		||||
version = "1.4.18"
 | 
			
		||||
version = "1.2.0"
 | 
			
		||||
description = "Solana utilities for the clap"
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.foundation>"]
 | 
			
		||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
 | 
			
		||||
repository = "https://github.com/solana-labs/solana"
 | 
			
		||||
license = "Apache-2.0"
 | 
			
		||||
homepage = "https://solana.com/"
 | 
			
		||||
@@ -11,9 +11,9 @@ edition = "2018"
 | 
			
		||||
[dependencies]
 | 
			
		||||
clap = "2.33.0"
 | 
			
		||||
rpassword = "4.0"
 | 
			
		||||
solana-remote-wallet = { path = "../remote-wallet", version = "1.4.18" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.4.18" }
 | 
			
		||||
thiserror = "1.0.20"
 | 
			
		||||
solana-remote-wallet = { path = "../remote-wallet", version = "1.2.0" }
 | 
			
		||||
solana-sdk = { path = "../sdk", version = "1.2.0" }
 | 
			
		||||
thiserror = "1.0.11"
 | 
			
		||||
tiny-bip39 = "0.7.0"
 | 
			
		||||
url = "2.1.0"
 | 
			
		||||
chrono = "0.4"
 | 
			
		||||
 
 | 
			
		||||
@@ -15,7 +15,7 @@ pub fn commitment_arg_with_default<'a, 'b>(default_value: &'static str) -> Arg<'
 | 
			
		||||
    Arg::with_name(COMMITMENT_ARG.name)
 | 
			
		||||
        .long(COMMITMENT_ARG.long)
 | 
			
		||||
        .takes_value(true)
 | 
			
		||||
        .possible_values(&["recent", "single", "singleGossip", "root", "max"])
 | 
			
		||||
        .possible_values(&["recent", "single", "root", "max"])
 | 
			
		||||
        .default_value(default_value)
 | 
			
		||||
        .value_name("COMMITMENT_LEVEL")
 | 
			
		||||
        .help(COMMITMENT_ARG.help)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,19 +0,0 @@
 | 
			
		||||
use crate::{input_validators, ArgConstant};
 | 
			
		||||
use clap::Arg;
 | 
			
		||||
 | 
			
		||||
pub const FEE_PAYER_ARG: ArgConstant<'static> = ArgConstant {
 | 
			
		||||
    name: "fee_payer",
 | 
			
		||||
    long: "fee-payer",
 | 
			
		||||
    help: "Specify the fee-payer account. This may be a keypair file, the ASK keyword \n\
 | 
			
		||||
           or the pubkey of an offline signer, provided an appropriate --signer argument \n\
 | 
			
		||||
           is also passed. Defaults to the client keypair.",
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
pub fn fee_payer_arg<'a, 'b>() -> Arg<'a, 'b> {
 | 
			
		||||
    Arg::with_name(FEE_PAYER_ARG.name)
 | 
			
		||||
        .long(FEE_PAYER_ARG.long)
 | 
			
		||||
        .takes_value(true)
 | 
			
		||||
        .value_name("KEYPAIR")
 | 
			
		||||
        .validator(input_validators::is_valid_signer)
 | 
			
		||||
        .help(FEE_PAYER_ARG.help)
 | 
			
		||||
}
 | 
			
		||||
@@ -8,7 +8,6 @@ use solana_remote_wallet::remote_wallet::RemoteWalletManager;
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    clock::UnixTimestamp,
 | 
			
		||||
    commitment_config::CommitmentConfig,
 | 
			
		||||
    genesis_config::ClusterType,
 | 
			
		||||
    native_token::sol_to_lamports,
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    signature::{read_keypair_file, Keypair, Signature, Signer},
 | 
			
		||||
@@ -179,17 +178,12 @@ pub fn lamports_of_sol(matches: &ArgMatches<'_>, name: &str) -> Option<u64> {
 | 
			
		||||
    value_of(matches, name).map(sol_to_lamports)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn cluster_type_of(matches: &ArgMatches<'_>, name: &str) -> Option<ClusterType> {
 | 
			
		||||
    value_of(matches, name)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn commitment_of(matches: &ArgMatches<'_>, name: &str) -> Option<CommitmentConfig> {
 | 
			
		||||
    matches.value_of(name).map(|value| match value {
 | 
			
		||||
        "max" => CommitmentConfig::max(),
 | 
			
		||||
        "recent" => CommitmentConfig::recent(),
 | 
			
		||||
        "root" => CommitmentConfig::root(),
 | 
			
		||||
        "single" => CommitmentConfig::single(),
 | 
			
		||||
        "singleGossip" => CommitmentConfig::single_gossip(),
 | 
			
		||||
        _ => CommitmentConfig::default(),
 | 
			
		||||
    })
 | 
			
		||||
}
 | 
			
		||||
@@ -229,8 +223,8 @@ mod tests {
 | 
			
		||||
        assert_eq!(values_of(&matches, "multiple"), Some(vec![50, 39]));
 | 
			
		||||
        assert_eq!(values_of::<u64>(&matches, "single"), None);
 | 
			
		||||
 | 
			
		||||
        let pubkey0 = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let pubkey1 = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let pubkey0 = Pubkey::new_rand();
 | 
			
		||||
        let pubkey1 = Pubkey::new_rand();
 | 
			
		||||
        let matches = app().clone().get_matches_from(vec![
 | 
			
		||||
            "test",
 | 
			
		||||
            "--multiple",
 | 
			
		||||
@@ -252,7 +246,7 @@ mod tests {
 | 
			
		||||
        assert_eq!(value_of(&matches, "single"), Some(50));
 | 
			
		||||
        assert_eq!(value_of::<u64>(&matches, "multiple"), None);
 | 
			
		||||
 | 
			
		||||
        let pubkey = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let pubkey = Pubkey::new_rand();
 | 
			
		||||
        let matches = app()
 | 
			
		||||
            .clone()
 | 
			
		||||
            .get_matches_from(vec!["test", "--single", &pubkey.to_string()]);
 | 
			
		||||
@@ -332,8 +326,8 @@ mod tests {
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_pubkeys_sigs_of() {
 | 
			
		||||
        let key1 = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let key2 = solana_sdk::pubkey::new_rand();
 | 
			
		||||
        let key1 = Pubkey::new_rand();
 | 
			
		||||
        let key2 = Pubkey::new_rand();
 | 
			
		||||
        let sig1 = Keypair::new().sign_message(&[0u8]);
 | 
			
		||||
        let sig2 = Keypair::new().sign_message(&[1u8]);
 | 
			
		||||
        let signer1 = format!("{}={}", key1, sig1);
 | 
			
		||||
 
 | 
			
		||||
@@ -6,86 +6,50 @@ use solana_sdk::{
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    signature::{read_keypair_file, Signature},
 | 
			
		||||
};
 | 
			
		||||
use std::fmt::Display;
 | 
			
		||||
use std::str::FromStr;
 | 
			
		||||
 | 
			
		||||
fn is_parsable_generic<U, T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
    U: FromStr,
 | 
			
		||||
    U::Err: Display,
 | 
			
		||||
{
 | 
			
		||||
    string
 | 
			
		||||
        .as_ref()
 | 
			
		||||
        .parse::<U>()
 | 
			
		||||
        .map(|_| ())
 | 
			
		||||
        .map_err(|err| format!("error parsing '{}': {}", string, err))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Return an error if string cannot be parsed as type T.
 | 
			
		||||
// Takes a String to avoid second type parameter when used as a clap validator
 | 
			
		||||
pub fn is_parsable<T>(string: String) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: FromStr,
 | 
			
		||||
    T::Err: Display,
 | 
			
		||||
{
 | 
			
		||||
    is_parsable_generic::<T, String>(string)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Return an error if a pubkey cannot be parsed.
 | 
			
		||||
pub fn is_pubkey<T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    is_parsable_generic::<Pubkey, _>(string)
 | 
			
		||||
pub fn is_pubkey(string: String) -> Result<(), String> {
 | 
			
		||||
    match string.parse::<Pubkey>() {
 | 
			
		||||
        Ok(_) => Ok(()),
 | 
			
		||||
        Err(err) => Err(format!("{}", err)),
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Return an error if a hash cannot be parsed.
 | 
			
		||||
pub fn is_hash<T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    is_parsable_generic::<Hash, _>(string)
 | 
			
		||||
pub fn is_hash(string: String) -> Result<(), String> {
 | 
			
		||||
    match string.parse::<Hash>() {
 | 
			
		||||
        Ok(_) => Ok(()),
 | 
			
		||||
        Err(err) => Err(format!("{}", err)),
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Return an error if a keypair file cannot be parsed.
 | 
			
		||||
pub fn is_keypair<T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    read_keypair_file(string.as_ref())
 | 
			
		||||
pub fn is_keypair(string: String) -> Result<(), String> {
 | 
			
		||||
    read_keypair_file(&string)
 | 
			
		||||
        .map(|_| ())
 | 
			
		||||
        .map_err(|err| format!("{}", err))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Return an error if a keypair file cannot be parsed
 | 
			
		||||
pub fn is_keypair_or_ask_keyword<T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    if string.as_ref() == ASK_KEYWORD {
 | 
			
		||||
pub fn is_keypair_or_ask_keyword(string: String) -> Result<(), String> {
 | 
			
		||||
    if string.as_str() == ASK_KEYWORD {
 | 
			
		||||
        return Ok(());
 | 
			
		||||
    }
 | 
			
		||||
    read_keypair_file(string.as_ref())
 | 
			
		||||
    read_keypair_file(&string)
 | 
			
		||||
        .map(|_| ())
 | 
			
		||||
        .map_err(|err| format!("{}", err))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Return an error if string cannot be parsed as pubkey string or keypair file location
 | 
			
		||||
pub fn is_pubkey_or_keypair<T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    is_pubkey(string.as_ref()).or_else(|_| is_keypair(string))
 | 
			
		||||
pub fn is_pubkey_or_keypair(string: String) -> Result<(), String> {
 | 
			
		||||
    is_pubkey(string.clone()).or_else(|_| is_keypair(string))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Return an error if string cannot be parsed as a pubkey string, or a valid Signer that can
 | 
			
		||||
// produce a pubkey()
 | 
			
		||||
pub fn is_valid_pubkey<T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    match parse_keypair_path(string.as_ref()) {
 | 
			
		||||
pub fn is_valid_pubkey(string: String) -> Result<(), String> {
 | 
			
		||||
    match parse_keypair_path(&string) {
 | 
			
		||||
        KeypairUrl::Filepath(path) => is_keypair(path),
 | 
			
		||||
        _ => Ok(()),
 | 
			
		||||
    }
 | 
			
		||||
@@ -99,19 +63,13 @@ where
 | 
			
		||||
// when paired with an offline `--signer` argument to provide a Presigner (pubkey + signature).
 | 
			
		||||
// Clap validators can't check multiple fields at once, so the verification that a `--signer` is
 | 
			
		||||
// also provided and correct happens in parsing, not in validation.
 | 
			
		||||
pub fn is_valid_signer<T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
pub fn is_valid_signer(string: String) -> Result<(), String> {
 | 
			
		||||
    is_valid_pubkey(string)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Return an error if string cannot be parsed as pubkey=signature string
 | 
			
		||||
pub fn is_pubkey_sig<T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    let mut signer = string.as_ref().split('=');
 | 
			
		||||
pub fn is_pubkey_sig(string: String) -> Result<(), String> {
 | 
			
		||||
    let mut signer = string.split('=');
 | 
			
		||||
    match Pubkey::from_str(
 | 
			
		||||
        signer
 | 
			
		||||
            .next()
 | 
			
		||||
@@ -132,11 +90,8 @@ where
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Return an error if a url cannot be parsed.
 | 
			
		||||
pub fn is_url<T>(string: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    match url::Url::parse(string.as_ref()) {
 | 
			
		||||
pub fn is_url(string: String) -> Result<(), String> {
 | 
			
		||||
    match url::Url::parse(&string) {
 | 
			
		||||
        Ok(url) => {
 | 
			
		||||
            if url.has_host() {
 | 
			
		||||
                Ok(())
 | 
			
		||||
@@ -148,26 +103,20 @@ where
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn is_slot<T>(slot: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    is_parsable_generic::<Slot, _>(slot)
 | 
			
		||||
pub fn is_slot(slot: String) -> Result<(), String> {
 | 
			
		||||
    slot.parse::<Slot>()
 | 
			
		||||
        .map(|_| ())
 | 
			
		||||
        .map_err(|e| format!("{}", e))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn is_port<T>(port: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    is_parsable_generic::<u16, _>(port)
 | 
			
		||||
pub fn is_port(port: String) -> Result<(), String> {
 | 
			
		||||
    port.parse::<u16>()
 | 
			
		||||
        .map(|_| ())
 | 
			
		||||
        .map_err(|e| format!("{}", e))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn is_valid_percentage<T>(percentage: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
pub fn is_valid_percentage(percentage: String) -> Result<(), String> {
 | 
			
		||||
    percentage
 | 
			
		||||
        .as_ref()
 | 
			
		||||
        .parse::<u8>()
 | 
			
		||||
        .map_err(|e| {
 | 
			
		||||
            format!(
 | 
			
		||||
@@ -187,11 +136,8 @@ where
 | 
			
		||||
        })
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn is_amount<T>(amount: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    if amount.as_ref().parse::<u64>().is_ok() || amount.as_ref().parse::<f64>().is_ok() {
 | 
			
		||||
pub fn is_amount(amount: String) -> Result<(), String> {
 | 
			
		||||
    if amount.parse::<u64>().is_ok() || amount.parse::<f64>().is_ok() {
 | 
			
		||||
        Ok(())
 | 
			
		||||
    } else {
 | 
			
		||||
        Err(format!(
 | 
			
		||||
@@ -201,14 +147,8 @@ where
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn is_amount_or_all<T>(amount: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    if amount.as_ref().parse::<u64>().is_ok()
 | 
			
		||||
        || amount.as_ref().parse::<f64>().is_ok()
 | 
			
		||||
        || amount.as_ref() == "ALL"
 | 
			
		||||
    {
 | 
			
		||||
pub fn is_amount_or_all(amount: String) -> Result<(), String> {
 | 
			
		||||
    if amount.parse::<u64>().is_ok() || amount.parse::<f64>().is_ok() || amount == "ALL" {
 | 
			
		||||
        Ok(())
 | 
			
		||||
    } else {
 | 
			
		||||
        Err(format!(
 | 
			
		||||
@@ -218,20 +158,14 @@ where
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn is_rfc3339_datetime<T>(value: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    DateTime::parse_from_rfc3339(value.as_ref())
 | 
			
		||||
pub fn is_rfc3339_datetime(value: String) -> Result<(), String> {
 | 
			
		||||
    DateTime::parse_from_rfc3339(&value)
 | 
			
		||||
        .map(|_| ())
 | 
			
		||||
        .map_err(|e| format!("{}", e))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn is_derivation<T>(value: T) -> Result<(), String>
 | 
			
		||||
where
 | 
			
		||||
    T: AsRef<str> + Display,
 | 
			
		||||
{
 | 
			
		||||
    let value = value.as_ref().replace("'", "");
 | 
			
		||||
pub fn is_derivation(value: String) -> Result<(), String> {
 | 
			
		||||
    let value = value.replace("'", "");
 | 
			
		||||
    let mut parts = value.split('/');
 | 
			
		||||
    let account = parts.next().unwrap();
 | 
			
		||||
    account
 | 
			
		||||
@@ -263,14 +197,14 @@ mod tests {
 | 
			
		||||
 | 
			
		||||
    #[test]
 | 
			
		||||
    fn test_is_derivation() {
 | 
			
		||||
        assert_eq!(is_derivation("2"), Ok(()));
 | 
			
		||||
        assert_eq!(is_derivation("0"), Ok(()));
 | 
			
		||||
        assert_eq!(is_derivation("65537"), Ok(()));
 | 
			
		||||
        assert_eq!(is_derivation("0/2"), Ok(()));
 | 
			
		||||
        assert_eq!(is_derivation("0'/2'"), Ok(()));
 | 
			
		||||
        assert!(is_derivation("a").is_err());
 | 
			
		||||
        assert!(is_derivation("4294967296").is_err());
 | 
			
		||||
        assert!(is_derivation("a/b").is_err());
 | 
			
		||||
        assert!(is_derivation("0/4294967296").is_err());
 | 
			
		||||
        assert_eq!(is_derivation("2".to_string()), Ok(()));
 | 
			
		||||
        assert_eq!(is_derivation("0".to_string()), Ok(()));
 | 
			
		||||
        assert_eq!(is_derivation("65537".to_string()), Ok(()));
 | 
			
		||||
        assert_eq!(is_derivation("0/2".to_string()), Ok(()));
 | 
			
		||||
        assert_eq!(is_derivation("0'/2'".to_string()), Ok(()));
 | 
			
		||||
        assert!(is_derivation("a".to_string()).is_err());
 | 
			
		||||
        assert!(is_derivation("4294967296".to_string()).is_err());
 | 
			
		||||
        assert!(is_derivation("a/b".to_string()).is_err());
 | 
			
		||||
        assert!(is_derivation("0/4294967296".to_string()).is_err());
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,6 @@ use solana_remote_wallet::{
 | 
			
		||||
    remote_wallet::{maybe_wallet_manager, RemoteWalletError, RemoteWalletManager},
 | 
			
		||||
};
 | 
			
		||||
use solana_sdk::{
 | 
			
		||||
    hash::Hash,
 | 
			
		||||
    pubkey::Pubkey,
 | 
			
		||||
    signature::{
 | 
			
		||||
        keypair_from_seed, keypair_from_seed_phrase_and_passphrase, read_keypair,
 | 
			
		||||
@@ -26,81 +25,6 @@ use std::{
 | 
			
		||||
    sync::Arc,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
pub struct SignOnly {
 | 
			
		||||
    pub blockhash: Hash,
 | 
			
		||||
    pub present_signers: Vec<(Pubkey, Signature)>,
 | 
			
		||||
    pub absent_signers: Vec<Pubkey>,
 | 
			
		||||
    pub bad_signers: Vec<Pubkey>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl SignOnly {
 | 
			
		||||
    pub fn has_all_signers(&self) -> bool {
 | 
			
		||||
        self.absent_signers.is_empty() && self.bad_signers.is_empty()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn presigner_of(&self, pubkey: &Pubkey) -> Option<Presigner> {
 | 
			
		||||
        presigner_from_pubkey_sigs(pubkey, &self.present_signers)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
pub type CliSigners = Vec<Box<dyn Signer>>;
 | 
			
		||||
pub type SignerIndex = usize;
 | 
			
		||||
pub struct CliSignerInfo {
 | 
			
		||||
    pub signers: CliSigners,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl CliSignerInfo {
 | 
			
		||||
    pub fn index_of(&self, pubkey: Option<Pubkey>) -> Option<usize> {
 | 
			
		||||
        if let Some(pubkey) = pubkey {
 | 
			
		||||
            self.signers
 | 
			
		||||
                .iter()
 | 
			
		||||
                .position(|signer| signer.pubkey() == pubkey)
 | 
			
		||||
        } else {
 | 
			
		||||
            Some(0)
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub struct DefaultSigner {
 | 
			
		||||
    pub arg_name: String,
 | 
			
		||||
    pub path: String,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl DefaultSigner {
 | 
			
		||||
    pub fn generate_unique_signers(
 | 
			
		||||
        &self,
 | 
			
		||||
        bulk_signers: Vec<Option<Box<dyn Signer>>>,
 | 
			
		||||
        matches: &ArgMatches<'_>,
 | 
			
		||||
        wallet_manager: &mut Option<Arc<RemoteWalletManager>>,
 | 
			
		||||
    ) -> Result<CliSignerInfo, Box<dyn error::Error>> {
 | 
			
		||||
        let mut unique_signers = vec![];
 | 
			
		||||
 | 
			
		||||
        // Determine if the default signer is needed
 | 
			
		||||
        if bulk_signers.iter().any(|signer| signer.is_none()) {
 | 
			
		||||
            let default_signer = self.signer_from_path(matches, wallet_manager)?;
 | 
			
		||||
            unique_signers.push(default_signer);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        for signer in bulk_signers.into_iter() {
 | 
			
		||||
            if let Some(signer) = signer {
 | 
			
		||||
                if !unique_signers.iter().any(|s| s == &signer) {
 | 
			
		||||
                    unique_signers.push(signer);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        Ok(CliSignerInfo {
 | 
			
		||||
            signers: unique_signers,
 | 
			
		||||
        })
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn signer_from_path(
 | 
			
		||||
        &self,
 | 
			
		||||
        matches: &ArgMatches,
 | 
			
		||||
        wallet_manager: &mut Option<Arc<RemoteWalletManager>>,
 | 
			
		||||
    ) -> Result<Box<dyn Signer>, Box<dyn std::error::Error>> {
 | 
			
		||||
        signer_from_path(matches, &self.path, &self.arg_name, wallet_manager)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub enum KeypairUrl {
 | 
			
		||||
    Ask,
 | 
			
		||||
    Filepath(String),
 | 
			
		||||
@@ -154,7 +78,7 @@ pub fn signer_from_path(
 | 
			
		||||
        KeypairUrl::Filepath(path) => match read_keypair_file(&path) {
 | 
			
		||||
            Err(e) => Err(std::io::Error::new(
 | 
			
		||||
                std::io::ErrorKind::Other,
 | 
			
		||||
                format!("could not read keypair file \"{}\". Run \"solana-keygen new\" to create a keypair file: {}", path, e),
 | 
			
		||||
                format!("could not find keypair file: {} error: {}", path, e),
 | 
			
		||||
            )
 | 
			
		||||
            .into()),
 | 
			
		||||
            Ok(file) => Ok(Box::new(file)),
 | 
			
		||||
@@ -225,7 +149,7 @@ pub fn resolve_signer_from_path(
 | 
			
		||||
        KeypairUrl::Filepath(path) => match read_keypair_file(&path) {
 | 
			
		||||
            Err(e) => Err(std::io::Error::new(
 | 
			
		||||
                std::io::ErrorKind::Other,
 | 
			
		||||
                format!("could not read keypair file \"{}\". Run \"solana-keygen new\" to create a keypair file: {}", path, e),
 | 
			
		||||
                format!("could not find keypair file: {} error: {}", path, e),
 | 
			
		||||
            )
 | 
			
		||||
            .into()),
 | 
			
		||||
            Ok(_) => Ok(Some(path.to_string())),
 | 
			
		||||
@@ -298,24 +222,7 @@ pub fn keypair_from_seed_phrase(
 | 
			
		||||
        keypair_from_seed_phrase_and_passphrase(&seed_phrase, &passphrase)?
 | 
			
		||||
    } else {
 | 
			
		||||
        let sanitized = sanitize_seed_phrase(seed_phrase);
 | 
			
		||||
        let parse_language_fn = || {
 | 
			
		||||
            for language in &[
 | 
			
		||||
                Language::English,
 | 
			
		||||
                Language::ChineseSimplified,
 | 
			
		||||
                Language::ChineseTraditional,
 | 
			
		||||
                Language::Japanese,
 | 
			
		||||
                Language::Spanish,
 | 
			
		||||
                Language::Korean,
 | 
			
		||||
                Language::French,
 | 
			
		||||
                Language::Italian,
 | 
			
		||||
            ] {
 | 
			
		||||
                if let Ok(mnemonic) = Mnemonic::from_phrase(&sanitized, *language) {
 | 
			
		||||
                    return Ok(mnemonic);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            Err("Can't get mnemonic from seed phrases")
 | 
			
		||||
        };
 | 
			
		||||
        let mnemonic = parse_language_fn()?;
 | 
			
		||||
        let mnemonic = Mnemonic::from_phrase(&sanitized, Language::English)?;
 | 
			
		||||
        let passphrase = prompt_passphrase(&passphrase_prompt)?;
 | 
			
		||||
        let seed = Seed::new(&mnemonic, &passphrase);
 | 
			
		||||
        keypair_from_seed(seed.as_bytes())?
 | 
			
		||||
 
 | 
			
		||||
@@ -24,9 +24,7 @@ impl std::fmt::Debug for DisplayError {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub mod commitment;
 | 
			
		||||
pub mod fee_payer;
 | 
			
		||||
pub mod input_parsers;
 | 
			
		||||
pub mod input_validators;
 | 
			
		||||
pub mod keypair;
 | 
			
		||||
pub mod nonce;
 | 
			
		||||
pub mod offline;
 | 
			
		||||
 
 | 
			
		||||
@@ -1,50 +0,0 @@
 | 
			
		||||
use crate::{input_validators::*, offline::BLOCKHASH_ARG, ArgConstant};
 | 
			
		||||
use clap::{App, Arg};
 | 
			
		||||
 | 
			
		||||
pub const NONCE_ARG: ArgConstant<'static> = ArgConstant {
 | 
			
		||||
    name: "nonce",
 | 
			
		||||
    long: "nonce",
 | 
			
		||||
    help: "Provide the nonce account to use when creating a nonced \n\
 | 
			
		||||
           transaction. Nonced transactions are useful when a transaction \n\
 | 
			
		||||
           requires a lengthy signing process. Learn more about nonced \n\
 | 
			
		||||
           transactions at https://docs.solana.com/offline-signing/durable-nonce",
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
pub const NONCE_AUTHORITY_ARG: ArgConstant<'static> = ArgConstant {
 | 
			
		||||
    name: "nonce_authority",
 | 
			
		||||
    long: "nonce-authority",
 | 
			
		||||
    help: "Provide the nonce authority keypair to use when signing a nonced transaction",
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
fn nonce_arg<'a, 'b>() -> Arg<'a, 'b> {
 | 
			
		||||
    Arg::with_name(NONCE_ARG.name)
 | 
			
		||||
        .long(NONCE_ARG.long)
 | 
			
		||||
        .takes_value(true)
 | 
			
		||||
        .value_name("PUBKEY")
 | 
			
		||||
        .requires(BLOCKHASH_ARG.name)
 | 
			
		||||
        .validator(is_valid_pubkey)
 | 
			
		||||
        .help(NONCE_ARG.help)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub fn nonce_authority_arg<'a, 'b>() -> Arg<'a, 'b> {
 | 
			
		||||
    Arg::with_name(NONCE_AUTHORITY_ARG.name)
 | 
			
		||||
        .long(NONCE_AUTHORITY_ARG.long)
 | 
			
		||||
        .takes_value(true)
 | 
			
		||||
        .value_name("KEYPAIR")
 | 
			
		||||
        .validator(is_valid_signer)
 | 
			
		||||
        .help(NONCE_AUTHORITY_ARG.help)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub trait NonceArgs {
 | 
			
		||||
    fn nonce_args(self, global: bool) -> Self;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl NonceArgs for App<'_, '_> {
 | 
			
		||||
    fn nonce_args(self, global: bool) -> Self {
 | 
			
		||||
        self.arg(nonce_arg().global(global)).arg(
 | 
			
		||||
            nonce_authority_arg()
 | 
			
		||||
                .requires(NONCE_ARG.name)
 | 
			
		||||
                .global(global),
 | 
			
		||||
        )
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user