Compare commits
49 Commits
Author | SHA1 | Date | |
---|---|---|---|
9a30100a9c | |||
aa741b3147 | |||
09db7b5b52 | |||
fa9faa2cec | |||
d2dc585974 | |||
6721bdde3d | |||
a733873b8f | |||
7c02bbc47c | |||
16a815d2b1 | |||
ddb490e2fb | |||
242d0a23fb | |||
869009243d | |||
7b61f5279c | |||
7ef0b815ec | |||
8742de789e | |||
bfadd7b787 | |||
2e14bfcf4e | |||
a19426f055 | |||
df366017a7 | |||
7d76badd03 | |||
8047ab777c | |||
0d0a1c2919 | |||
1da90017ce | |||
0909618efa | |||
28bb7849f4 | |||
9cffd3a1ea | |||
917151ce54 | |||
6dcd127634 | |||
af66edf8c0 | |||
ab5b921e8f | |||
6c2843543b | |||
85f74cc537 | |||
43665115b4 | |||
156115c04c | |||
a66577eb87 | |||
3345d059e8 | |||
8c8c5de779 | |||
f03e971598 | |||
b4a1cdceaa | |||
b250d20059 | |||
dc3b270410 | |||
9d5092a71c | |||
a287c9e5fa | |||
ee85d534f9 | |||
6e1b291c17 | |||
68f7b1ecf3 | |||
58fe5cabd6 | |||
8993c6ae24 | |||
0e56473add |
10
Cargo.toml
10
Cargo.toml
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "solana"
|
||||
description = "Blockchain, Rebuilt for Scale"
|
||||
version = "0.10.0"
|
||||
version = "0.10.4"
|
||||
documentation = "https://docs.rs/solana"
|
||||
homepage = "http://solana.com/"
|
||||
readme = "README.md"
|
||||
@ -104,14 +104,14 @@ serde_cbor = "0.9.0"
|
||||
serde_derive = "1.0.27"
|
||||
serde_json = "1.0.10"
|
||||
socket2 = "0.3.8"
|
||||
solana-sdk = { path = "sdk", version = "0.10.0" }
|
||||
solana-sdk = { path = "sdk", version = "0.10.4" }
|
||||
sys-info = "0.5.6"
|
||||
tokio = "0.1"
|
||||
tokio-codec = "0.1"
|
||||
untrusted = "0.6.2"
|
||||
solana-noop = { path = "programs/native/noop", version = "0.10.0" }
|
||||
solana-bpfloader = { path = "programs/native/bpf_loader", version = "0.10.0" }
|
||||
solana-lualoader = { path = "programs/native/lua_loader", version = "0.10.0" }
|
||||
solana-noop = { path = "programs/native/noop", version = "0.10.4" }
|
||||
solana-bpfloader = { path = "programs/native/bpf_loader", version = "0.10.4" }
|
||||
solana-lualoader = { path = "programs/native/lua_loader", version = "0.10.4" }
|
||||
|
||||
[[bench]]
|
||||
name = "bank"
|
||||
|
4
build.rs
4
build.rs
@ -8,7 +8,7 @@ fn main() {
|
||||
// Ensure target/perf-libs/ exists. It's been observed that
|
||||
// a cargo:rerun-if-changed= directive with a non-existent
|
||||
// directory triggers a rebuild on every |cargo build| invocation
|
||||
fs::create_dir("target/perf-libs").unwrap_or_else(|err| {
|
||||
fs::create_dir_all("target/perf-libs").unwrap_or_else(|err| {
|
||||
if err.kind() != std::io::ErrorKind::AlreadyExists {
|
||||
panic!("Unable to create target/perf-libs: {:?}", err);
|
||||
}
|
||||
@ -29,8 +29,6 @@ fn main() {
|
||||
println!("cargo:rerun-if-changed=programs/bpf/c/makefile");
|
||||
println!("cargo:rerun-if-changed=programs/bpf/c/src/move_funds.c");
|
||||
println!("cargo:rerun-if-changed=programs/bpf/c/src/noop.c");
|
||||
println!("cargo:rerun-if-changed=programs/bpf/c/src/tictactoe.c");
|
||||
println!("cargo:rerun-if-changed=programs/bpf/c/src/tictactoe_dashboard.c");
|
||||
println!("cargo:warning=(not a warning) Compiling C-based BPF programs");
|
||||
let status = Command::new("make")
|
||||
.current_dir("programs/bpf/c")
|
||||
|
@ -1,5 +1,5 @@
|
||||
steps:
|
||||
- command: "ci/docker-run.sh solanalabs/rust:1.30.0 ci/test-stable.sh"
|
||||
- command: "ci/docker-run.sh solanalabs/rust:1.30.1 ci/test-stable.sh"
|
||||
name: "stable [public]"
|
||||
env:
|
||||
CARGO_TARGET_CACHE_NAME: "stable"
|
||||
@ -36,7 +36,7 @@ steps:
|
||||
timeout_in_minutes: 20
|
||||
name: "snap [public]"
|
||||
- wait
|
||||
- trigger: "solana-snap"
|
||||
- trigger: "solana-secondary"
|
||||
branches: "!pull/*"
|
||||
async: true
|
||||
build:
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Note: when the rust version is changed also modify
|
||||
# ci/buildkite.yml to pick up the new image tag
|
||||
FROM rust:1.30.0
|
||||
FROM rust:1.30.1
|
||||
|
||||
RUN set -x && \
|
||||
apt update && \
|
||||
|
@ -5,6 +5,7 @@ cd "$(dirname "$0")/.."
|
||||
DRYRUN=
|
||||
if [[ -z $BUILDKITE_BRANCH ]]; then
|
||||
DRYRUN="echo"
|
||||
CHANNEL=unknown
|
||||
fi
|
||||
|
||||
eval "$(ci/channel-info.sh)"
|
||||
@ -17,42 +18,54 @@ elif [[ $BUILDKITE_BRANCH = "$BETA_CHANNEL" ]]; then
|
||||
CHANNEL=beta
|
||||
fi
|
||||
|
||||
if [[ -z $CHANNEL ]]; then
|
||||
if [[ -n "$BUILDKITE_TAG" ]]; then
|
||||
CHANNEL_OR_TAG=$BUILDKITE_TAG
|
||||
elif [[ -n "$TRIGGERED_BUILDKITE_TAG" ]]; then
|
||||
CHANNEL_OR_TAG=$TRIGGERED_BUILDKITE_TAG
|
||||
else
|
||||
CHANNEL_OR_TAG=$CHANNEL
|
||||
fi
|
||||
|
||||
if [[ -z $CHANNEL_OR_TAG ]]; then
|
||||
echo Unable to determine channel to publish into, exiting.
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
echo --- Creating tarball
|
||||
if [[ -z $DRYRUN ]]; then
|
||||
(
|
||||
set -x
|
||||
rm -rf solana-release/
|
||||
mkdir solana-release/
|
||||
(
|
||||
echo "$CHANNEL"
|
||||
echo "$CHANNEL_OR_TAG"
|
||||
git rev-parse HEAD
|
||||
) > solana-release/version.txt
|
||||
|
||||
cargo install --root solana-release
|
||||
./scripts/install-native-programs.sh solana-release/bin
|
||||
./fetch-perf-libs.sh
|
||||
cargo install --features=cuda --root solana-release
|
||||
./scripts/install-native-programs.sh solana-release
|
||||
cargo install --features=cuda --root solana-release-cuda
|
||||
cp solana-release-cuda/bin/solana-fullnode solana-release/bin/solana-fullnode-cuda
|
||||
|
||||
tar jvcf solana-release.tar.bz2 solana-release/
|
||||
)
|
||||
fi
|
||||
|
||||
|
||||
echo --- AWS S3 Store
|
||||
if [[ -z $DRYRUN ]]; then
|
||||
(
|
||||
set -x
|
||||
if [[ ! -r s3cmd-2.0.1/s3cmd ]]; then
|
||||
rm -rf s3cmd-2.0.1.tar.gz s3cmd-2.0.1
|
||||
$DRYRUN wget https://github.com/s3tools/s3cmd/releases/download/v2.0.1/s3cmd-2.0.1.tar.gz
|
||||
$DRYRUN tar zxf s3cmd-2.0.1.tar.gz
|
||||
fi
|
||||
|
||||
set -x
|
||||
if [[ ! -r s3cmd-2.0.1/s3cmd ]]; then
|
||||
rm -rf s3cmd-2.0.1.tar.gz s3cmd-2.0.1
|
||||
$DRYRUN wget https://github.com/s3tools/s3cmd/releases/download/v2.0.1/s3cmd-2.0.1.tar.gz
|
||||
$DRYRUN tar zxf s3cmd-2.0.1.tar.gz
|
||||
$DRYRUN python ./s3cmd-2.0.1/s3cmd --acl-public put solana-release.tar.bz2 \
|
||||
s3://solana-release/"$CHANNEL_OR_TAG"/solana-release.tar.bz2
|
||||
)
|
||||
else
|
||||
echo Skipped due to DRYRUN
|
||||
fi
|
||||
|
||||
$DRYRUN python ./s3cmd-2.0.1/s3cmd --acl-public put solana-release.tar.bz2 \
|
||||
s3://solana-release/"$CHANNEL"/solana-release.tar.bz2
|
||||
|
||||
exit 0
|
||||
|
||||
|
@ -9,10 +9,10 @@ clientNodeCount=0
|
||||
validatorNodeCount=10
|
||||
publicNetwork=false
|
||||
snapChannel=edge
|
||||
releaseChannel=edge
|
||||
tarChannelOrTag=edge
|
||||
delete=false
|
||||
enableGpu=false
|
||||
useReleaseChannel=false
|
||||
useTarReleaseChannel=false
|
||||
|
||||
usage() {
|
||||
exitcode=0
|
||||
@ -21,18 +21,21 @@ usage() {
|
||||
echo "Error: $*"
|
||||
fi
|
||||
cat <<EOF
|
||||
usage: $0 [name] [zone] [options...]
|
||||
usage: $0 [name] [cloud] [zone] [options...]
|
||||
|
||||
Deploys a CD testnet
|
||||
|
||||
name - name of the network
|
||||
zone - zone to deploy the network into
|
||||
cloud - cloud provider to use (gce, ec2)
|
||||
zone - cloud provider zone to deploy the network into
|
||||
|
||||
options:
|
||||
-s edge|beta|stable - Deploy the specified Snap release channel
|
||||
(default: $snapChannel)
|
||||
-t edge|beta|stable - Deploy the specified prebuilt tar from channel
|
||||
(default: $releaseChannel)
|
||||
-t edge|beta|stable|vX.Y.Z - Deploy the latest tarball release for the
|
||||
specified release channel (edge|beta|stable) or release tag
|
||||
(vX.Y.Z)
|
||||
(default: $tarChannelOrTag)
|
||||
-n [number] - Number of validator nodes (default: $validatorNodeCount)
|
||||
-c [number] - Number of client nodes (default: $clientNodeCount)
|
||||
-P - Use public network IP addresses (default: $publicNetwork)
|
||||
@ -48,10 +51,12 @@ EOF
|
||||
}
|
||||
|
||||
netName=$1
|
||||
zone=$2
|
||||
cloudProvider=$2
|
||||
zone=$3
|
||||
[[ -n $netName ]] || usage
|
||||
[[ -n $cloudProvider ]] || usage "Cloud provider not specified"
|
||||
[[ -n $zone ]] || usage "Zone not specified"
|
||||
shift 2
|
||||
shift 3
|
||||
|
||||
while getopts "h?p:Pn:c:s:t:gG:a:d" opt; do
|
||||
case $opt in
|
||||
@ -79,9 +84,9 @@ while getopts "h?p:Pn:c:s:t:gG:a:d" opt; do
|
||||
;;
|
||||
t)
|
||||
case $OPTARG in
|
||||
edge|beta|stable)
|
||||
releaseChannel=$OPTARG
|
||||
useReleaseChannel=true
|
||||
edge|beta|stable|v*)
|
||||
tarChannelOrTag=$OPTARG
|
||||
useTarReleaseChannel=true
|
||||
;;
|
||||
*)
|
||||
usage "Invalid release channel: $OPTARG"
|
||||
@ -108,7 +113,7 @@ while getopts "h?p:Pn:c:s:t:gG:a:d" opt; do
|
||||
done
|
||||
|
||||
|
||||
gce_create_args=(
|
||||
create_args=(
|
||||
-a "$leaderAddress"
|
||||
-c "$clientNodeCount"
|
||||
-n "$validatorNodeCount"
|
||||
@ -118,26 +123,26 @@ gce_create_args=(
|
||||
|
||||
if $enableGpu; then
|
||||
if [[ -z $leaderMachineType ]]; then
|
||||
gce_create_args+=(-g)
|
||||
create_args+=(-g)
|
||||
else
|
||||
gce_create_args+=(-G "$leaderMachineType")
|
||||
create_args+=(-G "$leaderMachineType")
|
||||
fi
|
||||
fi
|
||||
|
||||
if $publicNetwork; then
|
||||
gce_create_args+=(-P)
|
||||
create_args+=(-P)
|
||||
fi
|
||||
|
||||
set -x
|
||||
|
||||
echo --- gce.sh delete
|
||||
time net/gce.sh delete -z "$zone" -p "$netName"
|
||||
echo "--- $cloudProvider.sh delete"
|
||||
time net/"$cloudProvider".sh delete -z "$zone" -p "$netName"
|
||||
if $delete; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo --- gce.sh create
|
||||
time net/gce.sh create "${gce_create_args[@]}"
|
||||
echo "--- $cloudProvider.sh create"
|
||||
time net/"$cloudProvider".sh create "${create_args[@]}"
|
||||
net/init-metrics.sh -e
|
||||
|
||||
echo --- net.sh start
|
||||
@ -154,9 +159,9 @@ if [[ -n $NO_LEDGER_VERIFY ]]; then
|
||||
maybeNoLedgerVerify="-o noLedgerVerify"
|
||||
fi
|
||||
# shellcheck disable=SC2086 # Don't want to double quote maybeRejectExtraNodes
|
||||
if ! $useReleaseChannel; then
|
||||
time net/net.sh start -s "$snapChannel" $maybeRejectExtraNodes $maybeNoValidatorSanity $maybeNoLedgerVerify
|
||||
if $useTarReleaseChannel; then
|
||||
time net/net.sh start -t "$tarChannelOrTag" $maybeRejectExtraNodes $maybeNoValidatorSanity $maybeNoLedgerVerify
|
||||
else
|
||||
time net/net.sh start -t "$releaseChannel" $maybeRejectExtraNodes $maybeNoValidatorSanity $maybeNoLedgerVerify
|
||||
time net/net.sh start -s "$snapChannel" $maybeRejectExtraNodes $maybeNoValidatorSanity $maybeNoLedgerVerify
|
||||
fi
|
||||
exit 0
|
||||
|
359
ci/testnet-manager.sh
Executable file
359
ci/testnet-manager.sh
Executable file
@ -0,0 +1,359 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
if [[ -z $BUILDKITE ]]; then
|
||||
echo BUILDKITE not defined
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z $SOLANA_METRICS_PARTIAL_CONFIG ]]; then
|
||||
echo SOLANA_METRICS_PARTIAL_CONFIG not defined
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z $TESTNET ]]; then
|
||||
TESTNET=$(buildkite-agent meta-data get "testnet" --default "")
|
||||
fi
|
||||
|
||||
if [[ -z $TESTNET_OP ]]; then
|
||||
TESTNET_OP=$(buildkite-agent meta-data get "testnet-operation" --default "")
|
||||
fi
|
||||
|
||||
if [[ -z $TESTNET || -z $TESTNET_OP ]]; then
|
||||
(
|
||||
cat <<EOF
|
||||
steps:
|
||||
- block: "Manage Testnet"
|
||||
fields:
|
||||
- select: "Network"
|
||||
key: "testnet"
|
||||
options:
|
||||
- label: "testnet"
|
||||
value: "testnet"
|
||||
- label: "testnet-perf"
|
||||
value: "testnet-perf"
|
||||
- label: "testnet-master"
|
||||
value: "testnet-master"
|
||||
- label: "testnet-master-perf"
|
||||
value: "testnet-master-perf"
|
||||
- label: "testnet-edge"
|
||||
value: "testnet-edge"
|
||||
- label: "testnet-edge-perf"
|
||||
value: "testnet-edge-perf"
|
||||
- label: "testnet-beta"
|
||||
value: "testnet-beta"
|
||||
- label: "testnet-beta-perf"
|
||||
value: "testnet-beta-perf"
|
||||
- select: "Operation"
|
||||
key: "testnet-operation"
|
||||
default: "sanity-or-restart"
|
||||
options:
|
||||
- label: "Sanity check. Restart network on failure"
|
||||
value: "sanity-or-restart"
|
||||
- label: "Start (or restart) the network"
|
||||
value: "start"
|
||||
- label: "Stop the network"
|
||||
value: "stop"
|
||||
- label: "Sanity check only"
|
||||
value: "sanity"
|
||||
- command: "ci/$(basename "$0")"
|
||||
agents:
|
||||
- "queue=$BUILDKITE_AGENT_META_DATA_QUEUE"
|
||||
EOF
|
||||
) | buildkite-agent pipeline upload
|
||||
exit 0
|
||||
fi
|
||||
|
||||
export SOLANA_METRICS_CONFIG="db=$TESTNET,$SOLANA_METRICS_PARTIAL_CONFIG"
|
||||
echo "SOLANA_METRICS_CONFIG: $SOLANA_METRICS_CONFIG"
|
||||
|
||||
ci/channel-info.sh
|
||||
eval "$(ci/channel-info.sh)"
|
||||
|
||||
case $TESTNET in
|
||||
testnet-edge|testnet-edge-perf|testnet-master|testnet-master-perf)
|
||||
CHANNEL_OR_TAG=edge
|
||||
CHANNEL_BRANCH=$EDGE_CHANNEL
|
||||
;;
|
||||
testnet-beta|testnet-beta-perf)
|
||||
CHANNEL_OR_TAG=beta
|
||||
CHANNEL_BRANCH=$BETA_CHANNEL
|
||||
;;
|
||||
testnet|testnet-perf)
|
||||
if [[ -n $BETA_CHANNEL_LATEST_TAG ]]; then
|
||||
CHANNEL_OR_TAG=$BETA_CHANNEL_LATEST_TAG
|
||||
else
|
||||
CHANNEL_OR_TAG=$STABLE_CHANNEL_LATEST_TAG
|
||||
fi
|
||||
CHANNEL_BRANCH=$BETA_CHANNEL
|
||||
;;
|
||||
*)
|
||||
echo "Error: Invalid TESTNET=$TESTNET"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ $BUILDKITE_BRANCH != "$CHANNEL_BRANCH" ]]; then
|
||||
(
|
||||
cat <<EOF
|
||||
steps:
|
||||
- trigger: "$BUILDKITE_PIPELINE_SLUG"
|
||||
async: true
|
||||
build:
|
||||
message: "$BUILDKITE_MESSAGE"
|
||||
branch: "$CHANNEL_BRANCH"
|
||||
env:
|
||||
TESTNET: "$TESTNET"
|
||||
TESTNET_OP: "$TESTNET_OP"
|
||||
EOF
|
||||
) | buildkite-agent pipeline upload
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
sanity() {
|
||||
echo "--- sanity $TESTNET"
|
||||
case $TESTNET in
|
||||
testnet-edge)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-sanity.sh edge-testnet-solana-com ec2 us-west-1a
|
||||
)
|
||||
;;
|
||||
testnet-edge-perf)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export REJECT_EXTRA_NODES=1
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-sanity.sh edge-perf-testnet-solana-com ec2 us-west-2b
|
||||
)
|
||||
;;
|
||||
testnet-beta)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-sanity.sh beta-testnet-solana-com ec2 us-west-1a
|
||||
)
|
||||
;;
|
||||
testnet-beta-perf)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export REJECT_EXTRA_NODES=1
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-sanity.sh beta-perf-testnet-solana-com ec2 us-west-2b
|
||||
)
|
||||
;;
|
||||
testnet-master)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-sanity.sh master-testnet-solana-com gce us-west1-b
|
||||
)
|
||||
;;
|
||||
testnet-master-perf)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export REJECT_EXTRA_NODES=1
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-sanity.sh master-perf-testnet-solana-com gce us-west1-b
|
||||
)
|
||||
;;
|
||||
testnet)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
#ci/testnet-sanity.sh testnet-solana-com gce us-east1-c
|
||||
ci/testnet-sanity.sh testnet-solana-com ec2 us-west-1a
|
||||
)
|
||||
;;
|
||||
testnet-perf)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export REJECT_EXTRA_NODES=1
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
#ci/testnet-sanity.sh perf-testnet-solana-com ec2 us-east-1a
|
||||
ci/testnet-sanity.sh perf-testnet-solana-com gce us-west1-b
|
||||
)
|
||||
;;
|
||||
*)
|
||||
echo "Error: Invalid TESTNET=$TESTNET"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
|
||||
start() {
|
||||
declare maybeDelete=$1
|
||||
if [[ -z $maybeDelete ]]; then
|
||||
echo "--- start $TESTNET"
|
||||
else
|
||||
echo "--- stop $TESTNET"
|
||||
fi
|
||||
|
||||
case $TESTNET in
|
||||
testnet-edge)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-deploy.sh edge-testnet-solana-com ec2 us-west-1a \
|
||||
-s "$CHANNEL_OR_TAG" -n 3 -c 0 -P -a eipalloc-0ccd4f2239886fa94 \
|
||||
${maybeDelete:+-d}
|
||||
)
|
||||
;;
|
||||
testnet-edge-perf)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-deploy.sh edge-perf-testnet-solana-com ec2 us-west-2b \
|
||||
-g -t "$CHANNEL_OR_TAG" -c 2 \
|
||||
${maybeDelete:+-d}
|
||||
)
|
||||
;;
|
||||
testnet-beta)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-deploy.sh beta-testnet-solana-com ec2 us-west-1a \
|
||||
-t "$CHANNEL_OR_TAG" -n 3 -c 0 -P -a eipalloc-0f286cf8a0771ce35 \
|
||||
${maybeDelete:+-d}
|
||||
)
|
||||
;;
|
||||
testnet-beta-perf)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-deploy.sh beta-perf-testnet-solana-com ec2 us-west-2b \
|
||||
-g -t "$CHANNEL_OR_TAG" -c 2 \
|
||||
${maybeDelete:+-d}
|
||||
)
|
||||
;;
|
||||
testnet-master)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-deploy.sh master-testnet-solana-com gce us-west1-b \
|
||||
-s "$CHANNEL_OR_TAG" -n 3 -c 0 -P -a master-testnet-solana-com \
|
||||
${maybeDelete:+-d}
|
||||
)
|
||||
;;
|
||||
testnet-master-perf)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-deploy.sh master-perf-testnet-solana-com gce us-west1-b \
|
||||
-G "n1-standard-16 --accelerator count=2,type=nvidia-tesla-v100" \
|
||||
-t "$CHANNEL_OR_TAG" -c 2 \
|
||||
${maybeDelete:+-d}
|
||||
)
|
||||
;;
|
||||
testnet)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
#ci/testnet-deploy.sh testnet-solana-com gce us-east1-c \
|
||||
# -s "$CHANNEL_OR_TAG" -n 3 -c 0 -P -a testnet-solana-com \
|
||||
# ${maybeDelete:+-d}
|
||||
ci/testnet-deploy.sh testnet-solana-com ec2 us-west-1a \
|
||||
-t "$CHANNEL_OR_TAG" -n 3 -c 0 -P -a eipalloc-0fa502bf95f6f18b2 \
|
||||
${maybeDelete:+-d}
|
||||
)
|
||||
;;
|
||||
testnet-perf)
|
||||
# shellcheck disable=2030
|
||||
# shellcheck disable=2031
|
||||
(
|
||||
set -ex
|
||||
export NO_LEDGER_VERIFY=1
|
||||
export NO_VALIDATOR_SANITY=1
|
||||
ci/testnet-deploy.sh perf-testnet-solana-com gce us-west1-b \
|
||||
-G "n1-standard-16 --accelerator count=2,type=nvidia-tesla-v100" \
|
||||
-t "$CHANNEL_OR_TAG" -c 2 \
|
||||
${maybeDelete:+-d}
|
||||
#ci/testnet-deploy.sh perf-testnet-solana-com ec2 us-east-1a \
|
||||
# -g \
|
||||
# -t "$CHANNEL_OR_TAG" -c 2 \
|
||||
# ${maybeDelete:+-d}
|
||||
)
|
||||
;;
|
||||
*)
|
||||
echo "Error: Invalid TESTNET=$TESTNET"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
stop() {
|
||||
start delete
|
||||
}
|
||||
|
||||
case $TESTNET_OP in
|
||||
sanity)
|
||||
sanity
|
||||
;;
|
||||
start)
|
||||
start
|
||||
;;
|
||||
stop)
|
||||
stop
|
||||
;;
|
||||
sanity-or-restart)
|
||||
if sanity; then
|
||||
echo Pass
|
||||
else
|
||||
echo "Sanity failed, restarting the network"
|
||||
echo "^^^ +++"
|
||||
start
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
echo --- fin
|
||||
exit 0
|
@ -9,12 +9,13 @@ usage() {
|
||||
echo "Error: $*"
|
||||
fi
|
||||
cat <<EOF
|
||||
usage: $0 [name] [zone]
|
||||
usage: $0 [name] [cloud] [zone]
|
||||
|
||||
Sanity check a CD testnet
|
||||
|
||||
name - name of the network
|
||||
zone - zone of the network
|
||||
cloud - cloud provider to use (gce, ec2)
|
||||
zone - cloud provider zone of the network
|
||||
|
||||
Note: the SOLANA_METRICS_CONFIG environment variable is used to configure
|
||||
metrics
|
||||
@ -23,13 +24,15 @@ EOF
|
||||
}
|
||||
|
||||
netName=$1
|
||||
zone=$2
|
||||
cloudProvider=$2
|
||||
zone=$3
|
||||
[[ -n $netName ]] || usage ""
|
||||
[[ -n $cloudProvider ]] || usage "Cloud provider not specified"
|
||||
[[ -n $zone ]] || usage "Zone not specified"
|
||||
|
||||
set -x
|
||||
echo --- gce.sh config
|
||||
net/gce.sh config -p "$netName" -z "$zone"
|
||||
echo "--- $cloudProvider.sh config"
|
||||
net/"$cloudProvider".sh config -p "$netName" -z "$zone"
|
||||
net/init-metrics.sh -e
|
||||
echo --- net.sh sanity
|
||||
net/net.sh sanity \
|
||||
|
@ -4,14 +4,18 @@ Currently we have three testnets:
|
||||
* `testnet` - public beta channel testnet accessible via testnet.solana.com. Runs 24/7
|
||||
* `testnet-perf` - private beta channel testnet with clients trying to flood the network
|
||||
with transactions until failure. Runs 24/7
|
||||
* `testnet-master` - private edge channel testnet with clients trying to flood the network
|
||||
* `testnet-msater` - public edge channel testnet accessible via master.testnet.solana.com. Runs 24/7
|
||||
* `testnet-master-perf` - private edge channel testnet with clients trying to flood the network
|
||||
with transactions until failure. Runs on weekday mornings for a couple hours
|
||||
|
||||
## Deploy process
|
||||
|
||||
They are deployed with the `ci/testnet-deploy.sh` script. There is a scheduled buildkite job which runs to do the deploy,
|
||||
look at `testnet-deploy` to see the agent which ran it and the logs. There is also a manual job to do the deploy manually..
|
||||
Validators are selected based on their machine name and everyone gets the binaries installed from snap.
|
||||
They are deployed with the `ci/testnet-manager.sh` script through a list of [scheduled
|
||||
buildkite jobs](https://buildkite.com/solana-labs/testnet-management/settings/schedules).
|
||||
Each testnet can be manually manipulated from buildkite as well. The `-perf`
|
||||
testnets use a release tarball while the non`-perf` builds use the snap build
|
||||
(we've observed that the snap build runs slower than a tarball but this has yet
|
||||
to be root caused).
|
||||
|
||||
## Where are the testnet logs?
|
||||
|
||||
@ -29,7 +33,8 @@ $ net/ssh.sh
|
||||
for log location details
|
||||
|
||||
## How do I reset the testnet?
|
||||
Manually trigger the [testnet-deploy](https://buildkite.com/solana-labs/testnet-deploy/) pipeline
|
||||
Manually trigger the [testnet-management](https://buildkite.com/solana-labs/testnet-management) pipeline
|
||||
and when prompted select the desired testnet
|
||||
|
||||
## How can I scale the tx generation rate?
|
||||
|
||||
@ -43,5 +48,5 @@ Currently, a merged PR is the only way to test a change on the testnet. But you
|
||||
can run your own testnet using the scripts in the `net/` directory.
|
||||
|
||||
## Adjusting the number of clients or validators on the testnet
|
||||
Through the [testnet-deploy](https://buildkite.com/solana-labs/testnet-deploy/) settings.
|
||||
Edit `ci/testnet-manager.sh`
|
||||
|
||||
|
@ -49,8 +49,6 @@ elif [[ -n $USE_INSTALL ]]; then # Assume |cargo install| was run
|
||||
declare program="$1"
|
||||
printf "solana-%s" "$program"
|
||||
}
|
||||
# CUDA was/wasn't selected at build time, can't affect CUDA state here
|
||||
unset SOLANA_CUDA
|
||||
else
|
||||
solana_program() {
|
||||
declare program="$1"
|
||||
|
67
net/gce.sh
67
net/gce.sh
@ -11,7 +11,6 @@ gce)
|
||||
# shellcheck source=net/scripts/gce-provider.sh
|
||||
source "$here"/scripts/gce-provider.sh
|
||||
|
||||
imageName="ubuntu-16-04-cuda-9-2-new"
|
||||
cpuLeaderMachineType=n1-standard-16
|
||||
gpuLeaderMachineType="$cpuLeaderMachineType --accelerator count=4,type=nvidia-tesla-k80"
|
||||
leaderMachineType=$cpuLeaderMachineType
|
||||
@ -22,12 +21,11 @@ ec2)
|
||||
# shellcheck source=net/scripts/ec2-provider.sh
|
||||
source "$here"/scripts/ec2-provider.sh
|
||||
|
||||
imageName="ami-0466e26ccc0e752c1"
|
||||
cpuLeaderMachineType=m4.4xlarge
|
||||
gpuLeaderMachineType=p2.xlarge
|
||||
leaderMachineType=$cpuLeaderMachineType
|
||||
validatorMachineType=m4.xlarge
|
||||
clientMachineType=m4.4xlarge
|
||||
validatorMachineType=m4.2xlarge
|
||||
clientMachineType=m4.2xlarge
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown cloud provider: $cloudProvider"
|
||||
@ -118,7 +116,7 @@ while getopts "h?p:Pn:c:z:gG:a:d:" opt; do
|
||||
;;
|
||||
g)
|
||||
enableGpu=true
|
||||
leaderMachineType="$gpuLeaderMachineType"
|
||||
leaderMachineType=$gpuLeaderMachineType
|
||||
;;
|
||||
G)
|
||||
enableGpu=true
|
||||
@ -131,14 +129,53 @@ while getopts "h?p:Pn:c:z:gG:a:d:" opt; do
|
||||
bootDiskType=$OPTARG
|
||||
;;
|
||||
*)
|
||||
usage "Error: unhandled option: $opt"
|
||||
usage "unhandled option: $opt"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND - 1))
|
||||
|
||||
[[ -z $1 ]] || usage "Unexpected argument: $1"
|
||||
sshPrivateKey="$netConfigDir/id_$prefix"
|
||||
if [[ $cloudProvider = ec2 ]]; then
|
||||
# EC2 keys can't be retrieved from running instances like GCE keys can so save
|
||||
# EC2 keys in the user's home directory so |./ec2.sh config| can at least be
|
||||
# used on the same host that ran |./ec2.sh create| .
|
||||
sshPrivateKey="$HOME/.ssh/solana-net-id_$prefix"
|
||||
else
|
||||
sshPrivateKey="$netConfigDir/id_$prefix"
|
||||
fi
|
||||
|
||||
case $cloudProvider in
|
||||
gce)
|
||||
if $enableGpu; then
|
||||
# TODO: GPU image is still 16.04-based pending resolution of
|
||||
# https://github.com/solana-labs/solana/issues/1702
|
||||
imageName="ubuntu-16-04-cuda-9-2-new"
|
||||
else
|
||||
imageName="ubuntu-1804-bionic-v20181029 --image-project ubuntu-os-cloud"
|
||||
fi
|
||||
;;
|
||||
ec2)
|
||||
# Deep Learning AMI (Ubuntu 16.04-based)
|
||||
case $region in # (region global variable is set by cloud_SetZone)
|
||||
us-east-1)
|
||||
imageName="ami-047daf3f2b162fc35"
|
||||
;;
|
||||
us-west-1)
|
||||
imageName="ami-08c8c7c4a57a6106d"
|
||||
;;
|
||||
us-west-2)
|
||||
imageName="ami-0b63040ee445728bf"
|
||||
;;
|
||||
*)
|
||||
usage "Unsupported region: $region"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown cloud provider: $cloudProvider"
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
# cloud_ForEachInstance [cmd] [extra args to cmd]
|
||||
@ -206,13 +243,18 @@ EOF
|
||||
|
||||
echo "Waiting for $name to finish booting..."
|
||||
(
|
||||
for i in $(seq 1 30); do
|
||||
if (set -x; ssh "${sshOptions[@]}" "$publicIp" "test -f /.instance-startup-complete"); then
|
||||
break
|
||||
set -x +e
|
||||
for i in $(seq 1 60); do
|
||||
timeout 20s ssh "${sshOptions[@]}" "$publicIp" "ls -l /.instance-startup-complete"
|
||||
ret=$?
|
||||
if [[ $ret -eq 0 ]]; then
|
||||
exit 0
|
||||
fi
|
||||
sleep 2
|
||||
echo "Retry $i..."
|
||||
done
|
||||
echo "$name failed to boot."
|
||||
exit 1
|
||||
)
|
||||
echo "$name has booted."
|
||||
}
|
||||
@ -230,7 +272,7 @@ EOF
|
||||
IFS=: read -r leaderName leaderIp _ < <(echo "${instances[0]}")
|
||||
|
||||
# Try to ping the machine first.
|
||||
timeout 60s bash -c "set -o pipefail; until ping -c 3 $leaderIp | tr - _; do echo .; done"
|
||||
timeout 90s bash -c "set -o pipefail; until ping -c 3 $leaderIp | tr - _; do echo .; done"
|
||||
|
||||
if [[ ! -r $sshPrivateKey ]]; then
|
||||
echo "Fetching $sshPrivateKey from $leaderName"
|
||||
@ -377,6 +419,9 @@ $(
|
||||
install-libssl-compatability.sh \
|
||||
install-rsync.sh \
|
||||
network-config.sh \
|
||||
remove-docker-interface.sh \
|
||||
update-default-cuda.sh \
|
||||
|
||||
)
|
||||
|
||||
cat > /etc/motd <<EOM
|
||||
|
59
net/net.sh
59
net/net.sh
@ -23,11 +23,14 @@ Operate a configured testnet
|
||||
restart - Shortcut for stop then start
|
||||
|
||||
start-specific options:
|
||||
-S [snapFilename] - Deploy the specified Snap file
|
||||
-s edge|beta|stable - Deploy the latest Snap on the specified Snap release channel
|
||||
-t edge|beta|stable - Deploy the latest tarball release for the specified channel
|
||||
-f [cargoFeatures] - List of |cargo --feaures=| to activate
|
||||
(ignored if -s or -S is specified)
|
||||
-S [snapFilename] - Deploy the specified Snap file
|
||||
-s edge|beta|stable - Deploy the latest Snap on the specified Snap release channel
|
||||
-T [tarFilename] - Deploy the specified release tarball
|
||||
-t edge|beta|stable|vX.Y.Z - Deploy the latest tarball release for the
|
||||
specified release channel (edge|beta|stable) or release tag
|
||||
(vX.Y.Z)
|
||||
-f [cargoFeatures] - List of |cargo --feaures=| to activate
|
||||
(ignored if -s or -S is specified)
|
||||
|
||||
Note: if RUST_LOG is set in the environment it will be propogated into the
|
||||
network nodes.
|
||||
@ -55,7 +58,7 @@ command=$1
|
||||
[[ -n $command ]] || usage
|
||||
shift
|
||||
|
||||
while getopts "h?S:s:t:o:f:" opt; do
|
||||
while getopts "h?S:s:T:t:o:f:" opt; do
|
||||
case $opt in
|
||||
h | \?)
|
||||
usage
|
||||
@ -76,9 +79,14 @@ while getopts "h?S:s:t:o:f:" opt; do
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
T)
|
||||
tarballFilename=$OPTARG
|
||||
[[ -f $tarballFilename ]] || usage "Snap not readable: $tarballFilename"
|
||||
deployMethod=tar
|
||||
;;
|
||||
t)
|
||||
case $OPTARG in
|
||||
edge|beta|stable)
|
||||
edge|beta|stable|v*)
|
||||
releaseChannel=$OPTARG
|
||||
deployMethod=tar
|
||||
;;
|
||||
@ -198,7 +206,7 @@ startClient() {
|
||||
set -x
|
||||
startCommon "$ipAddress"
|
||||
ssh "${sshOptions[@]}" -f "$ipAddress" \
|
||||
"./solana/net/remote/remote-client.sh $deployMethod $entrypointIp $expectedNodeCount \"$RUST_LOG\""
|
||||
"./solana/net/remote/remote-client.sh $deployMethod $entrypointIp \"$RUST_LOG\""
|
||||
) >> "$logFile" 2>&1 || {
|
||||
cat "$logFile"
|
||||
echo "^^^ +++"
|
||||
@ -213,10 +221,11 @@ sanity() {
|
||||
echo "--- Sanity"
|
||||
$metricsWriteDatapoint "testnet-deploy net-sanity-begin=1"
|
||||
|
||||
declare host=$leaderIp # TODO: maybe use ${validatorIpList[0]} ?
|
||||
(
|
||||
set -x
|
||||
# shellcheck disable=SC2029 # remote-client.sh args are expanded on client side intentionally
|
||||
ssh "${sshOptions[@]}" "$leaderIp" \
|
||||
ssh "${sshOptions[@]}" "$host" \
|
||||
"./solana/net/remote/remote-sanity.sh $sanityExtraArgs"
|
||||
) || ok=false
|
||||
|
||||
@ -236,13 +245,17 @@ start() {
|
||||
set -ex;
|
||||
apt-get -qq update;
|
||||
apt-get -qq -y install snapd;
|
||||
snap download --channel=$snapChannel solana;
|
||||
until snap download --channel=$snapChannel solana; do
|
||||
sleep 1;
|
||||
done
|
||||
"
|
||||
)
|
||||
else
|
||||
(
|
||||
cd "$SOLANA_ROOT"
|
||||
snap download --channel="$snapChannel" solana
|
||||
until snap download --channel="$snapChannel" solana; do
|
||||
sleep 1
|
||||
done
|
||||
)
|
||||
fi
|
||||
snapFilename="$(echo "$SOLANA_ROOT"/solana_*.snap)"
|
||||
@ -259,8 +272,9 @@ start() {
|
||||
|
||||
set -x
|
||||
curl -o solana-release.tar.bz2 http://solana-release.s3.amazonaws.com/"$releaseChannel"/solana-release.tar.bz2
|
||||
tar jxvf solana-release.tar.bz2
|
||||
tarballFilename=solana-release.tar.bz2
|
||||
fi
|
||||
tar jxvf $tarballFilename
|
||||
;;
|
||||
local)
|
||||
build
|
||||
@ -313,15 +327,28 @@ start() {
|
||||
clientDeployTime=$SECONDS
|
||||
$metricsWriteDatapoint "testnet-deploy net-start-complete=1"
|
||||
|
||||
if [[ $deployMethod = "snap" ]]; then
|
||||
declare networkVersion=unknown
|
||||
declare networkVersion=unknown
|
||||
case $deployMethod in
|
||||
snap)
|
||||
IFS=\ read -r _ networkVersion _ < <(
|
||||
ssh "${sshOptions[@]}" "$leaderIp" \
|
||||
"snap info solana | grep \"^installed:\""
|
||||
)
|
||||
networkVersion=${networkVersion/0+git./}
|
||||
$metricsWriteDatapoint "testnet-deploy version=\"$networkVersion\""
|
||||
fi
|
||||
;;
|
||||
tar)
|
||||
networkVersion="$(
|
||||
tail -n1 "$SOLANA_ROOT"/solana-release/version.txt || echo "tar-unknown"
|
||||
)"
|
||||
;;
|
||||
local)
|
||||
networkVersion="$(git rev-parse HEAD || echo local-unknown)"
|
||||
;;
|
||||
*)
|
||||
usage "Internal error: invalid deployMethod: $deployMethod"
|
||||
;;
|
||||
esac
|
||||
$metricsWriteDatapoint "testnet-deploy version=\"${networkVersion:0:9}\""
|
||||
|
||||
echo
|
||||
echo "+++ Deployment Successful"
|
||||
|
@ -6,8 +6,7 @@ echo "$(date) | $0 $*" > client.log
|
||||
|
||||
deployMethod="$1"
|
||||
entrypointIp="$2"
|
||||
numNodes="$3"
|
||||
RUST_LOG="$4"
|
||||
RUST_LOG="$3"
|
||||
export RUST_LOG=${RUST_LOG:-solana=info} # if RUST_LOG is unset, default to info
|
||||
|
||||
missing() {
|
||||
@ -17,7 +16,6 @@ missing() {
|
||||
|
||||
[[ -n $deployMethod ]] || missing deployMethod
|
||||
[[ -n $entrypointIp ]] || missing entrypointIp
|
||||
[[ -n $numNodes ]] || missing numNodes
|
||||
|
||||
source net/common.sh
|
||||
loadConfigFile
|
||||
@ -58,7 +56,6 @@ clientCommand="\
|
||||
$solana_bench_tps \
|
||||
--network $entrypointIp:8001 \
|
||||
--identity client.json \
|
||||
--num-nodes $numNodes \
|
||||
--duration 7500 \
|
||||
--sustained \
|
||||
--threads $threadCount \
|
||||
|
@ -84,13 +84,18 @@ local|tar)
|
||||
export SOLANA_DEFAULT_METRICS_RATE=1
|
||||
|
||||
./fetch-perf-libs.sh
|
||||
export LD_LIBRARY_PATH="$PWD/target/perf-libs:$LD_LIBRARY_PATH"
|
||||
export LD_LIBRARY_PATH="$PWD/target/perf-libs:/usr/local/cuda/lib64:$LD_LIBRARY_PATH"
|
||||
echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH"
|
||||
|
||||
scripts/oom-monitor.sh > oom-monitor.log 2>&1 &
|
||||
scripts/net-stats.sh > net-stats.log 2>&1 &
|
||||
|
||||
case $nodeType in
|
||||
leader)
|
||||
if [[ -e /dev/nvidia0 && -x ~/.cargo/bin/solana-fullnode-cuda ]]; then
|
||||
echo Selecting solana-fullnode-cuda
|
||||
export SOLANA_CUDA=1
|
||||
fi
|
||||
./multinode-demo/setup.sh -t leader $setupArgs
|
||||
./multinode-demo/drone.sh > drone.log 2>&1 &
|
||||
./multinode-demo/leader.sh > leader.log 2>&1 &
|
||||
@ -98,6 +103,11 @@ local|tar)
|
||||
validator)
|
||||
net/scripts/rsync-retry.sh -vPrc "$entrypointIp:~/.cargo/bin/solana*" ~/.cargo/bin/
|
||||
|
||||
if [[ -e /dev/nvidia0 && -x ~/.cargo/bin/solana-fullnode-cuda ]]; then
|
||||
echo Selecting solana-fullnode-cuda
|
||||
export SOLANA_CUDA=1
|
||||
fi
|
||||
|
||||
./multinode-demo/setup.sh -t validator $setupArgs
|
||||
./multinode-demo/validator.sh "$entrypointIp":~/solana "$entrypointIp:8001" >validator.log 2>&1 &
|
||||
;;
|
||||
|
@ -31,7 +31,7 @@ __cloud_FindInstances() {
|
||||
|
||||
declare name zone publicIp privateIp status
|
||||
while read -r name publicIp privateIp status; do
|
||||
printf "%-30s | publicIp=%-16s privateIp=%s staus=%s\n" "$name" "$publicIp" "$privateIp" "$status"
|
||||
printf "%-30s | publicIp=%-16s privateIp=%s status=%s\n" "$name" "$publicIp" "$privateIp" "$status"
|
||||
|
||||
instances+=("$name:$publicIp:$privateIp")
|
||||
done < <(gcloud compute instances list \
|
||||
@ -128,6 +128,9 @@ cloud_CreateInstances() {
|
||||
--no-restart-on-failure
|
||||
)
|
||||
|
||||
# shellcheck disable=SC2206 # Do not want to quote $imageName as it may contain extra args
|
||||
args+=(--image $imageName)
|
||||
|
||||
# shellcheck disable=SC2206 # Do not want to quote $machineType as it may contain extra args
|
||||
args+=(--machine-type $machineType)
|
||||
if [[ -n $optionalBootDiskSize ]]; then
|
||||
|
11
net/scripts/remove-docker-interface.sh
Executable file
11
net/scripts/remove-docker-interface.sh
Executable file
@ -0,0 +1,11 @@
|
||||
#!/bin/bash -ex
|
||||
#
|
||||
# Some instances have docker running and docker0 network interface confuses
|
||||
# gossip and airdrops fail. As a workaround for now simply remove the docker0
|
||||
# interface
|
||||
#
|
||||
|
||||
[[ $(uname) = Linux ]] || exit 1
|
||||
[[ $USER = root ]] || exit 1
|
||||
|
||||
ip link delete docker0 || true
|
9
net/scripts/update-default-cuda.sh
Executable file
9
net/scripts/update-default-cuda.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/bin/bash -ex
|
||||
#
|
||||
# Updates the default cuda symlink to the supported version
|
||||
#
|
||||
|
||||
[[ $(uname) = Linux ]] || exit 1
|
||||
[[ $USER = root ]] || exit 1
|
||||
|
||||
ln -sfT /usr/local/cuda-9.2 /usr/local/cuda
|
@ -21,7 +21,6 @@ $ sudo apt-get install -y clang-7
|
||||
```
|
||||
|
||||
### macOS
|
||||
|
||||
The following depends on Homebrew, instructions on how to install Homebrew are at https://brew.sh
|
||||
|
||||
Once Homebrew is installed, ensure the latest llvm is installed:
|
||||
@ -31,3 +30,34 @@ $ brew install llvm # <- should output “Warning: llvm 7.0.0 is already instal
|
||||
$ brew --prefix llvm # <- should output “/usr/local/opt/llvm”
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
### Quick start
|
||||
To get started create a `makefile` containing:
|
||||
```make
|
||||
include path/to/bpf.mk
|
||||
```
|
||||
and `src/program.c` containing:
|
||||
```c
|
||||
#include <solana_sdk.h>
|
||||
|
||||
bool entrypoint(const uint8_t *input) {
|
||||
SolKeyedAccounts ka[1];
|
||||
uint8_t *data;
|
||||
uint64_t data_len;
|
||||
|
||||
if (!sol_deserialize(buf, ka, SOL_ARRAY_SIZE(ka), NULL, &data, &data_len)) {
|
||||
return false;
|
||||
}
|
||||
print_params(1, ka, data, data_len);
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
Then run `make` to build `out/program.o`.
|
||||
Run `make help` for more details.
|
||||
|
||||
### Limitations
|
||||
* Programs must be fully contained within a single .c file
|
||||
* No libc is available but `solana_sdk.h` provides a minimal set of
|
||||
primitives.
|
||||
|
@ -37,7 +37,6 @@ CC_FLAGS := \
|
||||
LLC_FLAGS := \
|
||||
-march=bpf \
|
||||
-filetype=obj \
|
||||
-function-sections \
|
||||
|
||||
OBJ_DUMP_FLAGS := \
|
||||
-color \
|
||||
|
@ -34,36 +34,15 @@ typedef unsigned long int uint64_t;
|
||||
typedef enum { false = 0, true } bool;
|
||||
|
||||
/**
|
||||
* Built-in helper functions
|
||||
* @{
|
||||
* The BPF VM makes a limited number of helper functions available to BPF
|
||||
* programs. They are resolved at run-time and identified by a function index.
|
||||
* Calling any of these functions results in `Call` instruction out of the
|
||||
* user's BPF program.
|
||||
*
|
||||
* The helper functions all follow the same signature:
|
||||
*
|
||||
* int helper(uint64_t, uint64_t, uint64_t, uint64_t, uint64_t)
|
||||
*
|
||||
* The meaning of each argument and return value is dependent on the particular
|
||||
* helper function being called.
|
||||
* Helper function that prints a string to stdout
|
||||
*/
|
||||
extern void sol_log(const char*);
|
||||
|
||||
/**
|
||||
* Helper function that prints to stdout
|
||||
*
|
||||
* Prints the hexadecimal representation of each parameter
|
||||
* Helper function that prints a 64 bit values represented in hexadecimal
|
||||
* to stdout
|
||||
*/
|
||||
#define BPF_TRACE_PRINTK_IDX 6
|
||||
static int (*sol_print)(
|
||||
uint64_t,
|
||||
uint64_t,
|
||||
uint64_t,
|
||||
uint64_t,
|
||||
uint64_t
|
||||
) = (void *)BPF_TRACE_PRINTK_IDX;
|
||||
|
||||
/**@}*/
|
||||
extern void sol_log_64(uint64_t, uint64_t, uint64_t, uint64_t, uint64_t);
|
||||
|
||||
/**
|
||||
* Prefix for all BPF functions
|
||||
@ -147,7 +126,7 @@ SOL_FN_PREFIX int sol_memcmp(const void *s1, const void *s2, int n) {
|
||||
*/
|
||||
#define sol_panic() _sol_panic(__LINE__)
|
||||
SOL_FN_PREFIX void _sol_panic(uint64_t line) {
|
||||
sol_print(0xFF, 0xFF, 0xFF, 0xFF, line);
|
||||
sol_log_64(0xFF, 0xFF, 0xFF, 0xFF, line);
|
||||
uint8_t *pv = (uint8_t *)1;
|
||||
*pv = 1;
|
||||
}
|
||||
@ -241,9 +220,9 @@ SOL_FN_PREFIX bool sol_deserialize(
|
||||
*
|
||||
* @param key The public key to print
|
||||
*/
|
||||
SOL_FN_PREFIX void sol_print_key(const SolPubkey *key) {
|
||||
SOL_FN_PREFIX void sol_log_key(const SolPubkey *key) {
|
||||
for (int j = 0; j < sizeof(*key); j++) {
|
||||
sol_print(0, 0, 0, j, key->x[j]);
|
||||
sol_log_64(0, 0, 0, j, key->x[j]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -252,9 +231,9 @@ SOL_FN_PREFIX void sol_print_key(const SolPubkey *key) {
|
||||
*
|
||||
* @param array The array to print
|
||||
*/
|
||||
SOL_FN_PREFIX void sol_print_array(const uint8_t *array, int len) {
|
||||
SOL_FN_PREFIX void sol_log_array(const uint8_t *array, int len) {
|
||||
for (int j = 0; j < len; j++) {
|
||||
sol_print(0, 0, 0, j, array[j]);
|
||||
sol_log_64(0, 0, 0, j, array[j]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -266,20 +245,20 @@ SOL_FN_PREFIX void sol_print_array(const uint8_t *array, int len) {
|
||||
* @param data A pointer to the instruction data to print
|
||||
* @param data_len The length in bytes of the instruction data
|
||||
*/
|
||||
SOL_FN_PREFIX void sol_print_params(
|
||||
SOL_FN_PREFIX void sol_log_params(
|
||||
uint64_t num_ka,
|
||||
const SolKeyedAccounts *ka,
|
||||
const uint8_t *data,
|
||||
uint64_t data_len
|
||||
) {
|
||||
sol_print(0, 0, 0, 0, num_ka);
|
||||
sol_log_64(0, 0, 0, 0, num_ka);
|
||||
for (int i = 0; i < num_ka; i++) {
|
||||
sol_print_key(ka[i].key);
|
||||
sol_print(0, 0, 0, 0, *ka[i].tokens);
|
||||
sol_print_array(ka[i].userdata, ka[i].userdata_len);
|
||||
sol_print_key(ka[i].program_id);
|
||||
sol_log_key(ka[i].key);
|
||||
sol_log_64(0, 0, 0, 0, *ka[i].tokens);
|
||||
sol_log_array(ka[i].userdata, ka[i].userdata_len);
|
||||
sol_log_key(ka[i].program_id);
|
||||
}
|
||||
sol_print_array(data, data_len);
|
||||
sol_log_array(data, data_len);
|
||||
}
|
||||
|
||||
/**@}*/
|
||||
@ -299,7 +278,7 @@ SOL_FN_PREFIX void sol_print_params(
|
||||
* if (!sol_deserialize(buf, ka, SOL_ARRAY_SIZE(ka), NULL, &data, &data_len)) {
|
||||
* return false;
|
||||
* }
|
||||
* print_params(1, ka, data, data_len);
|
||||
* sol_log_params(1, ka, data, data_len);
|
||||
* return true;
|
||||
* }
|
||||
*/
|
||||
|
@ -24,9 +24,9 @@ extern bool entrypoint(const uint8_t *input) {
|
||||
if (*ka[0].tokens >= tokens) {
|
||||
*ka[0].tokens -= tokens;
|
||||
*ka[2].tokens += tokens;
|
||||
// sol_print(0, 0, *ka[0].tokens, *ka[2].tokens, tokens);
|
||||
// sol_log_64(0, 0, *ka[0].tokens, *ka[2].tokens, tokens);
|
||||
} else {
|
||||
// sol_print(0, 0, 0xFF, *ka[0].tokens, tokens);
|
||||
// sol_log_64(0, 0, 0xFF, *ka[0].tokens, tokens);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -15,11 +15,13 @@ extern bool entrypoint(const uint8_t *input) {
|
||||
SolKeyedAccounts ka[NUM_KA];
|
||||
const uint8_t *data;
|
||||
uint64_t data_len;
|
||||
|
||||
sol_log("noop");
|
||||
|
||||
if (!sol_deserialize(input, ka, NUM_KA, NULL, &data, &data_len)) {
|
||||
return false;
|
||||
}
|
||||
sol_print_params(NUM_KA, ka, data, data_len);
|
||||
sol_log_params(NUM_KA, ka, data, data_len);
|
||||
|
||||
sol_assert(sizeof(int8_t) == 1);
|
||||
sol_assert(sizeof(uint8_t) == 1);
|
||||
|
@ -1,231 +0,0 @@
|
||||
/**
|
||||
* @brief TicTacToe Dashboard C-based BPF program
|
||||
*/
|
||||
|
||||
#include <solana_sdk.h>
|
||||
#include "tictactoe.h"
|
||||
|
||||
typedef enum {
|
||||
Result_Ok,
|
||||
Result_Panic,
|
||||
Result_GameInProgress,
|
||||
Result_InvalidArguments,
|
||||
Result_InvalidMove,
|
||||
Result_InvalidUserdata,
|
||||
Result_InvalidTimestamp,
|
||||
Result_NoGame,
|
||||
Result_NotYourTurn,
|
||||
Result_PlayerNotFound,
|
||||
Result_UserdataTooSmall,
|
||||
} Result;
|
||||
|
||||
typedef enum {
|
||||
Command_Init = 0,
|
||||
Command_Join,
|
||||
Command_KeepAlive,
|
||||
Command_Move,
|
||||
} Command;
|
||||
|
||||
SOL_FN_PREFIX void game_dump_board(Game *self) {
|
||||
sol_print(0x9, 0x9, 0x9, 0x9, 0x9);
|
||||
sol_print(0, 0, self->board[0], self->board[1], self->board[2]);
|
||||
sol_print(0, 0, self->board[3], self->board[4], self->board[5]);
|
||||
sol_print(0, 0, self->board[6], self->board[7], self->board[8]);
|
||||
sol_print(0x9, 0x9, 0x9, 0x9, 0x9);
|
||||
}
|
||||
|
||||
SOL_FN_PREFIX void game_create(Game *self, SolPubkey *player_x) {
|
||||
// account memory is zero-initialized
|
||||
sol_memcpy(self->player_x.x, player_x, SIZE_PUBKEY);
|
||||
self->state = State_Waiting;
|
||||
for (int i = 0; i < 9; i++) {
|
||||
self->board[i] = BoardItem_F;
|
||||
}
|
||||
}
|
||||
|
||||
SOL_FN_PREFIX Result game_join(Game *self, SolPubkey *player_o,
|
||||
int64_t timestamp) {
|
||||
if (self->state == State_Waiting) {
|
||||
sol_memcpy(self->player_o.x, player_o, SIZE_PUBKEY);
|
||||
self->state = State_XMove;
|
||||
|
||||
if (timestamp <= self->keep_alive[1]) {
|
||||
return Result_InvalidTimestamp;
|
||||
} else {
|
||||
self->keep_alive[1] = timestamp;
|
||||
return Result_Ok;
|
||||
}
|
||||
}
|
||||
return Result_GameInProgress;
|
||||
}
|
||||
|
||||
SOL_FN_PREFIX bool game_same(BoardItem x_or_o, BoardItem one, BoardItem two,
|
||||
BoardItem three) {
|
||||
if (x_or_o == one && x_or_o == two && x_or_o == three) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
SOL_FN_PREFIX bool game_same_player(SolPubkey *one, SolPubkey *two) {
|
||||
for (int i = 0; i < SIZE_PUBKEY; i++) {
|
||||
if (one->x[i] != two->x[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
SOL_FN_PREFIX Result game_next_move(Game *self, SolPubkey *player, int x,
|
||||
int y) {
|
||||
int board_index = y * 3 + x;
|
||||
if (board_index >= 9 || self->board[board_index] != BoardItem_F) {
|
||||
return Result_InvalidMove;
|
||||
}
|
||||
|
||||
BoardItem x_or_o;
|
||||
State won_state;
|
||||
|
||||
switch (self->state) {
|
||||
case State_XMove:
|
||||
if (!game_same_player(player, &self->player_x)) {
|
||||
return Result_PlayerNotFound;
|
||||
}
|
||||
self->state = State_OMove;
|
||||
x_or_o = BoardItem_X;
|
||||
won_state = State_XWon;
|
||||
break;
|
||||
|
||||
case State_OMove:
|
||||
if (!game_same_player(player, &self->player_o)) {
|
||||
return Result_PlayerNotFound;
|
||||
}
|
||||
self->state = State_XMove;
|
||||
x_or_o = BoardItem_O;
|
||||
won_state = State_OWon;
|
||||
break;
|
||||
|
||||
default:
|
||||
return Result_NotYourTurn;
|
||||
}
|
||||
|
||||
self->board[board_index] = x_or_o;
|
||||
|
||||
// game_dump_board(self);
|
||||
|
||||
bool winner =
|
||||
// Check rows
|
||||
game_same(x_or_o, self->board[0], self->board[1], self->board[2]) ||
|
||||
game_same(x_or_o, self->board[3], self->board[4], self->board[5]) ||
|
||||
game_same(x_or_o, self->board[6], self->board[7], self->board[8]) ||
|
||||
// Check columns
|
||||
game_same(x_or_o, self->board[0], self->board[3], self->board[6]) ||
|
||||
game_same(x_or_o, self->board[1], self->board[4], self->board[7]) ||
|
||||
game_same(x_or_o, self->board[2], self->board[5], self->board[8]) ||
|
||||
// Check both diagonals
|
||||
game_same(x_or_o, self->board[0], self->board[4], self->board[8]) ||
|
||||
game_same(x_or_o, self->board[2], self->board[4], self->board[6]);
|
||||
|
||||
if (winner) {
|
||||
self->state = won_state;
|
||||
}
|
||||
|
||||
{
|
||||
int draw = true;
|
||||
for (int i = 0; i < 9; i++) {
|
||||
if (BoardItem_F == self->board[i]) {
|
||||
draw = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (draw) {
|
||||
self->state = State_Draw;
|
||||
}
|
||||
}
|
||||
return Result_Ok;
|
||||
}
|
||||
|
||||
SOL_FN_PREFIX Result game_keep_alive(Game *self, SolPubkey *player,
|
||||
int64_t timestamp) {
|
||||
switch (self->state) {
|
||||
case State_Waiting:
|
||||
case State_XMove:
|
||||
case State_OMove:
|
||||
if (game_same_player(player, &self->player_x)) {
|
||||
if (timestamp <= self->keep_alive[0]) {
|
||||
return Result_InvalidTimestamp;
|
||||
}
|
||||
self->keep_alive[0] = timestamp;
|
||||
} else if (game_same_player(player, &self->player_o)) {
|
||||
if (timestamp <= self->keep_alive[1]) {
|
||||
return Result_InvalidTimestamp;
|
||||
}
|
||||
self->keep_alive[1] = timestamp;
|
||||
} else {
|
||||
return Result_PlayerNotFound;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return Result_Ok;
|
||||
}
|
||||
|
||||
/**
|
||||
* Number of SolKeyedAccounts expected. The program should bail if an
|
||||
* unexpected number of accounts are passed to the program's entrypoint
|
||||
*
|
||||
* accounts[0] On Init must be player X, after that doesn't matter,
|
||||
* anybody can cause a dashboard update
|
||||
* accounts[1] must be a TicTacToe state account
|
||||
* accounts[2] must be account of current player, only Pubkey is used
|
||||
*/
|
||||
#define NUM_KA 3
|
||||
|
||||
extern bool entrypoint(const uint8_t *input) {
|
||||
SolKeyedAccounts ka[NUM_KA];
|
||||
const uint8_t *data;
|
||||
uint64_t data_len;
|
||||
int err = 0;
|
||||
|
||||
if (!sol_deserialize(input, ka, NUM_KA, NULL, &data, &data_len)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (sizeof(Game) > ka[1].userdata_len) {
|
||||
sol_print(0, 0, 0xFF, sizeof(Game), ka[2].userdata_len);
|
||||
return false;
|
||||
}
|
||||
Game game;
|
||||
sol_memcpy(&game, ka[1].userdata, sizeof(game));
|
||||
|
||||
Command command = *data;
|
||||
switch (command) {
|
||||
case Command_Init:
|
||||
game_create(&game, ka[2].key);
|
||||
break;
|
||||
|
||||
case Command_Join:
|
||||
err = game_join(&game, ka[2].key, *((int64_t *)(data + 4)));
|
||||
break;
|
||||
|
||||
case Command_KeepAlive:
|
||||
err = game_keep_alive(&game, ka[2].key, /*TODO*/ 0);
|
||||
break;
|
||||
|
||||
case Command_Move:
|
||||
err = game_next_move(&game, ka[2].key, data[4], data[5]);
|
||||
break;
|
||||
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
|
||||
sol_memcpy(ka[1].userdata, &game, sizeof(game));
|
||||
sol_print(0, 0, 0, err, game.state);
|
||||
if (Result_Ok != err) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
#ifndef TICTACTOE_H
|
||||
#define TICTACTOE_H
|
||||
/**
|
||||
* @brief Definitions common to tictactoe and tictactoe_dashboard
|
||||
*/
|
||||
|
||||
typedef enum {
|
||||
State_Waiting,
|
||||
State_XMove,
|
||||
State_OMove,
|
||||
State_XWon,
|
||||
State_OWon,
|
||||
State_Draw,
|
||||
} State;
|
||||
|
||||
typedef enum { BoardItem_F, BoardItem_X, BoardItem_O } BoardItem;
|
||||
|
||||
/**
|
||||
* Game state
|
||||
*
|
||||
* This structure is stored in the owner's account userdata
|
||||
*
|
||||
* Board Coordinates
|
||||
* | 0,0 | 1,0 | 2,0 |
|
||||
* | 0,1 | 1,1 | 2,1 |
|
||||
* | 0,2 | 1,2 | 2,2 |
|
||||
*/
|
||||
typedef struct {
|
||||
SolPubkey player_x; /** Player who initialized the game */
|
||||
SolPubkey player_o; /** Player who joined the game */
|
||||
State state; /** Current state of the game */
|
||||
BoardItem board[9]; /** Tracks the player moves */
|
||||
int64_t keep_alive[2]; /** Keep Alive for each player */
|
||||
} Game;
|
||||
|
||||
#endif // TICTACTOE_H
|
@ -1,98 +0,0 @@
|
||||
/**
|
||||
* @brief TicTacToe C-based BPF program
|
||||
*/
|
||||
|
||||
#include <solana_sdk.h>
|
||||
#include "tictactoe.h"
|
||||
|
||||
#define MAX_GAMES_TRACKED 5
|
||||
|
||||
/**
|
||||
* Dashboard state
|
||||
*
|
||||
* This structure is stored in the owner's account userdata
|
||||
*/
|
||||
typedef struct {
|
||||
SolPubkey pending; /** Latest pending game */
|
||||
SolPubkey completed[MAX_GAMES_TRACKED]; /** Last N completed games (0 is the
|
||||
latest) */
|
||||
uint32_t latest_game; /** Index into completed pointing to latest game completed */
|
||||
uint32_t total; /** Total number of completed games */
|
||||
} Dashboard;
|
||||
|
||||
SOL_FN_PREFIX bool update(Dashboard *self, Game *game, SolPubkey *game_pubkey) {
|
||||
switch (game->state) {
|
||||
case State_Waiting:
|
||||
sol_memcpy(&self->pending, game_pubkey, SIZE_PUBKEY);
|
||||
break;
|
||||
case State_XMove:
|
||||
case State_OMove:
|
||||
// Nothing to do. In progress games are not managed by the dashboard
|
||||
break;
|
||||
case State_XWon:
|
||||
case State_OWon:
|
||||
case State_Draw:
|
||||
for (int i = 0; i < MAX_GAMES_TRACKED; i++) {
|
||||
if (SolPubkey_same(&self->completed[i], game_pubkey)) {
|
||||
// TODO: Once the PoH height is exposed to programs, it could be used
|
||||
// to ensure
|
||||
// that old games are not being re-added and causing total to
|
||||
// increment incorrectly.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
self->total += 1;
|
||||
self->latest_game = (self->latest_game + 1) % MAX_GAMES_TRACKED;
|
||||
sol_memcpy(self->completed[self->latest_game].x, game_pubkey,
|
||||
SIZE_PUBKEY);
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Number of SolKeyedAccounts expected. The program should bail if an
|
||||
* unexpected number of accounts are passed to the program's entrypoint
|
||||
*
|
||||
* accounts[0] doesn't matter, anybody can cause a dashboard update
|
||||
* accounts[1] must be a Dashboard account
|
||||
* accounts[2] must be a Game account
|
||||
*/
|
||||
#define NUM_KA 3
|
||||
|
||||
extern bool entrypoint(const uint8_t *input) {
|
||||
SolKeyedAccounts ka[NUM_KA];
|
||||
const uint8_t *data;
|
||||
uint64_t data_len;
|
||||
int err = 0;
|
||||
|
||||
if (!sol_deserialize(input, ka, NUM_KA, NULL, &data, &data_len)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO check dashboard and game program ids (how to check now that they are
|
||||
// not known values)
|
||||
// TODO check validity of dashboard and game structures contents
|
||||
if (sizeof(Dashboard) > ka[1].userdata_len) {
|
||||
sol_print(0, 0, 0xFF, sizeof(Dashboard), ka[2].userdata_len);
|
||||
return false;
|
||||
}
|
||||
Dashboard dashboard;
|
||||
sol_memcpy(&dashboard, ka[1].userdata, sizeof(dashboard));
|
||||
|
||||
if (sizeof(Game) > ka[2].userdata_len) {
|
||||
sol_print(0, 0, 0xFF, sizeof(Game), ka[2].userdata_len);
|
||||
return false;
|
||||
}
|
||||
Game game;
|
||||
sol_memcpy(&game, ka[2].userdata, sizeof(game));
|
||||
if (true != update(&dashboard, &game, ka[2].key)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
sol_memcpy(ka[1].userdata, &dashboard, sizeof(dashboard));
|
||||
return true;
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-bpf-noop"
|
||||
version = "0.10.0"
|
||||
version = "0.10.4"
|
||||
description = "Solana BPF noop program"
|
||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@ -8,4 +8,4 @@ license = "Apache-2.0"
|
||||
|
||||
[dependencies]
|
||||
rbpf = "0.1.0"
|
||||
solana-sdk = { path = "../../../../sdk", version = "0.10.0" }
|
||||
solana-sdk = { path = "../../../../sdk", version = "0.10.4" }
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-bpfloader"
|
||||
version = "0.10.0"
|
||||
version = "0.10.4"
|
||||
description = "Solana BPF Loader"
|
||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@ -13,10 +13,10 @@ elf = "0.0.10"
|
||||
env_logger = "0.5.12"
|
||||
libc = "0.2.43"
|
||||
log = "0.4.2"
|
||||
solana_rbpf = "0.1.2"
|
||||
solana_rbpf = "0.1.3"
|
||||
serde = "1.0.27"
|
||||
serde_derive = "1.0.27"
|
||||
solana-sdk = { path = "../../../sdk", version = "0.10.0" }
|
||||
solana-sdk = { path = "../../../sdk", version = "0.10.4" }
|
||||
|
||||
[lib]
|
||||
name = "solana_bpf_loader"
|
||||
|
@ -5,20 +5,24 @@ extern crate byteorder;
|
||||
extern crate env_logger;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate libc;
|
||||
extern crate solana_rbpf;
|
||||
extern crate solana_sdk;
|
||||
|
||||
use bincode::deserialize;
|
||||
use byteorder::{ByteOrder, LittleEndian, WriteBytesExt};
|
||||
use solana_rbpf::{helpers, EbpfVmRaw};
|
||||
use libc::c_char;
|
||||
use solana_rbpf::EbpfVmRaw;
|
||||
use solana_sdk::account::KeyedAccount;
|
||||
use solana_sdk::loader_instruction::LoaderInstruction;
|
||||
use solana_sdk::pubkey::Pubkey;
|
||||
use std::ffi::CStr;
|
||||
use std::io::prelude::*;
|
||||
use std::io::Error;
|
||||
use std::io::{Error, ErrorKind};
|
||||
use std::mem;
|
||||
use std::sync::{Once, ONCE_INIT};
|
||||
|
||||
// TODO use rbpf's disassemble
|
||||
#[allow(dead_code)]
|
||||
fn dump_program(key: &Pubkey, prog: &[u8]) {
|
||||
let mut eight_bytes: Vec<u8> = Vec::new();
|
||||
@ -33,32 +37,64 @@ fn dump_program(key: &Pubkey, prog: &[u8]) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn helper_printf(arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u64) -> u64 {
|
||||
#[allow(unused_variables)]
|
||||
pub fn helper_sol_log_verify(
|
||||
addr: u64,
|
||||
unused2: u64,
|
||||
unused3: u64,
|
||||
unused4: u64,
|
||||
unused5: u64,
|
||||
ro_regions: &[&[u8]],
|
||||
unused7: &[&[u8]],
|
||||
) -> Result<(()), Error> {
|
||||
for region in ro_regions.iter() {
|
||||
if region.as_ptr() as u64 <= addr
|
||||
&& addr as u64 <= region.as_ptr() as u64 + region.len() as u64
|
||||
{
|
||||
let c_buf: *const c_char = addr as *const c_char;
|
||||
let max_size = (region.as_ptr() as u64 + region.len() as u64) - addr;
|
||||
unsafe {
|
||||
for i in 0..max_size {
|
||||
if std::ptr::read(c_buf.offset(i as isize)) == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
return Err(Error::new(ErrorKind::Other, "Error, Unterminated string"));
|
||||
}
|
||||
}
|
||||
Err(Error::new(
|
||||
ErrorKind::Other,
|
||||
"Error: Load segfault, bad string pointer",
|
||||
))
|
||||
}
|
||||
|
||||
#[allow(unused_variables)]
|
||||
pub fn helper_sol_log(addr: u64, unused2: u64, unused3: u64, unused4: u64, unused5: u64) -> u64 {
|
||||
let c_buf: *const c_char = addr as *const c_char;
|
||||
let c_str: &CStr = unsafe { CStr::from_ptr(c_buf) };
|
||||
match c_str.to_str() {
|
||||
Ok(slice) => info!("sol_log: {:?}", slice),
|
||||
Err(e) => warn!("Error: Cannot print invalid string"),
|
||||
};
|
||||
0
|
||||
}
|
||||
|
||||
pub fn helper_sol_log_u64(arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u64) -> u64 {
|
||||
info!(
|
||||
"bpf_trace_printf: {:#x}, {:#x}, {:#x}, {:#x}, {:#x}",
|
||||
"sol_log_u64: {:#x}, {:#x}, {:#x}, {:#x}, {:#x}",
|
||||
arg1, arg2, arg3, arg4, arg5
|
||||
);
|
||||
let size_arg = |x| {
|
||||
if x == 0 {
|
||||
1
|
||||
} else {
|
||||
(x as f64).log(16.0).floor() as u64 + 1
|
||||
}
|
||||
};
|
||||
"bpf_trace_printf: 0x, 0x, 0x, 0x, 0x\n".len() as u64
|
||||
+ size_arg(arg1)
|
||||
+ size_arg(arg2)
|
||||
+ size_arg(arg3)
|
||||
+ size_arg(arg4)
|
||||
+ size_arg(arg5)
|
||||
0
|
||||
}
|
||||
|
||||
fn create_vm(prog: &[u8]) -> Result<EbpfVmRaw, Error> {
|
||||
let mut vm = EbpfVmRaw::new(None)?;
|
||||
vm.set_verifier(bpf_verifier::check)?;
|
||||
vm.set_max_instruction_count(36000)?; // 36000 is a wag, need to tune
|
||||
vm.set_program(&prog)?;
|
||||
vm.register_helper(helpers::BPF_TRACE_PRINTK_IDX, helper_printf)?;
|
||||
vm.set_elf(&prog)?;
|
||||
vm.register_helper_ex("sol_log", Some(helper_sol_log_verify), helper_sol_log)?;
|
||||
vm.register_helper_ex("sol_log_64", None, helper_sol_log_u64)?;
|
||||
Ok(vm)
|
||||
}
|
||||
|
||||
@ -163,6 +199,8 @@ pub extern "C" fn process(keyed_accounts: &mut [KeyedAccount], tx_data: &[u8]) -
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use solana_rbpf::helpers;
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Error: Execution exceeded maximum number of instructions")]
|
||||
fn test_non_terminating_program() {
|
||||
@ -180,7 +218,13 @@ mod tests {
|
||||
0x95, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // exit
|
||||
];
|
||||
let input = &mut [0x00];
|
||||
let mut vm = create_vm(prog).unwrap();
|
||||
|
||||
let mut vm = EbpfVmRaw::new(None).unwrap();
|
||||
vm.set_verifier(bpf_verifier::check).unwrap();
|
||||
vm.set_max_instruction_count(36000).unwrap(); // 36000 is a wag, need to tune
|
||||
vm.set_program(prog).unwrap();
|
||||
vm.register_helper(helpers::BPF_TRACE_PRINTK_IDX, helpers::bpf_trace_printf)
|
||||
.unwrap();
|
||||
vm.execute_program(input).unwrap();
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-lualoader"
|
||||
version = "0.10.0"
|
||||
version = "0.10.4"
|
||||
description = "Solana Lua Loader"
|
||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
@ -13,7 +13,7 @@ log = "0.4.2"
|
||||
rlua = "0.15.2"
|
||||
serde = "1.0.27"
|
||||
serde_derive = "1.0.27"
|
||||
solana-sdk = { path = "../../../sdk", version = "0.10.0" }
|
||||
solana-sdk = { path = "../../../sdk", version = "0.10.4" }
|
||||
|
||||
[dev-dependencies]
|
||||
bincode = "1.0.0"
|
||||
|
@ -1,13 +1,13 @@
|
||||
[package]
|
||||
name = "solana-noop"
|
||||
version = "0.10.0"
|
||||
version = "0.10.4"
|
||||
description = "Solana noop program"
|
||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
license = "Apache-2.0"
|
||||
|
||||
[dependencies]
|
||||
solana-sdk = { path = "../../../sdk", version = "0.10.0" }
|
||||
solana-sdk = { path = "../../../sdk", version = "0.10.4" }
|
||||
|
||||
[lib]
|
||||
name = "noop"
|
||||
|
@ -117,6 +117,14 @@ $ solana-wallet send-timestamp <PUBKEY> <PROCESS_ID> --date 2018-12-24T23:59:00
|
||||
<TX_SIGNATURE>
|
||||
```
|
||||
|
||||
### Deploy program
|
||||
```
|
||||
// Command
|
||||
$ solana-wallet deploy <PATH>
|
||||
|
||||
// Return
|
||||
<PROGRAM_ID>
|
||||
```
|
||||
|
||||
## Javascript solana-web3.js Interface
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "solana-sdk"
|
||||
version = "0.10.0"
|
||||
version = "0.10.4"
|
||||
description = "Solana SDK"
|
||||
authors = ["Solana Maintainers <maintainers@solana.com>"]
|
||||
repository = "https://github.com/solana-labs/solana"
|
||||
|
@ -7,7 +7,6 @@ use chrono::prelude::*;
|
||||
use clap::ArgMatches;
|
||||
use cluster_info::NodeInfo;
|
||||
use drone::DroneRequest;
|
||||
use elf;
|
||||
use fullnode::Config;
|
||||
use hash::Hash;
|
||||
use loader_transaction::LoaderTransaction;
|
||||
@ -31,7 +30,6 @@ use std::{error, fmt, mem};
|
||||
use system_transaction::SystemTransaction;
|
||||
use transaction::Transaction;
|
||||
|
||||
const PLATFORM_SECTION_C: &str = ".text.entrypoint";
|
||||
const USERDATA_CHUNK_SIZE: usize = 256;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
@ -396,16 +394,14 @@ pub fn process_command(config: &WalletConfig) -> Result<String, Box<error::Error
|
||||
|
||||
let last_id = get_last_id(&config)?;
|
||||
let program = Keypair::new();
|
||||
let program_userdata = elf::File::open_path(program_location)
|
||||
let mut program_userdata = Vec::new();
|
||||
File::open(program_location)
|
||||
.map_err(|_| {
|
||||
WalletError::DynamicProgramError("Could not parse program file".to_string())
|
||||
})?.get_section(PLATFORM_SECTION_C)
|
||||
.ok_or_else(|| {
|
||||
WalletError::DynamicProgramError(
|
||||
"Could not find entrypoint in program file".to_string(),
|
||||
)
|
||||
})?.data
|
||||
.clone();
|
||||
})?.read_to_end(&mut program_userdata)
|
||||
.map_err(|_| {
|
||||
WalletError::DynamicProgramError("Could not read program file".to_string())
|
||||
})?;
|
||||
|
||||
let tx = Transaction::system_create(
|
||||
&config.id,
|
||||
|
@ -1,5 +1,6 @@
|
||||
extern crate bincode;
|
||||
extern crate elf;
|
||||
extern crate serde_derive;
|
||||
extern crate solana;
|
||||
extern crate solana_sdk;
|
||||
|
||||
@ -13,13 +14,15 @@ use solana::mint::Mint;
|
||||
use solana::native_loader;
|
||||
use solana::signature::{Keypair, KeypairUtil};
|
||||
use solana::system_transaction::SystemTransaction;
|
||||
#[cfg(feature = "bpf_c")]
|
||||
use solana::tictactoe_program::Command;
|
||||
use solana::transaction::Transaction;
|
||||
use solana_sdk::pubkey::Pubkey;
|
||||
#[cfg(feature = "bpf_c")]
|
||||
use std::env;
|
||||
#[cfg(feature = "bpf_c")]
|
||||
use std::fs::File;
|
||||
#[cfg(feature = "bpf_c")]
|
||||
use std::io::Read;
|
||||
#[cfg(feature = "bpf_c")]
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// BPF program file extension
|
||||
@ -119,7 +122,7 @@ struct Program {
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new(loader: &Loader, userdata: Vec<u8>) -> Self {
|
||||
pub fn new(loader: &Loader, userdata: &Vec<u8>) -> Self {
|
||||
let program = Keypair::new();
|
||||
|
||||
// allocate, populate, finalize and spawn program
|
||||
@ -183,7 +186,7 @@ fn test_program_native_noop() {
|
||||
let loader = Loader::new_native();
|
||||
let name = String::from("noop");
|
||||
let userdata = name.as_bytes().to_vec();
|
||||
let program = Program::new(&loader, userdata);
|
||||
let program = Program::new(&loader, &userdata);
|
||||
|
||||
// Call user program
|
||||
let tx = Transaction::new(
|
||||
@ -213,7 +216,7 @@ fn test_program_lua_move_funds() {
|
||||
accounts[2].tokens = accounts[2].tokens + tokens
|
||||
"#.as_bytes()
|
||||
.to_vec();
|
||||
let program = Program::new(&loader, userdata);
|
||||
let program = Program::new(&loader, &userdata);
|
||||
let from = Keypair::new();
|
||||
let to = Keypair::new().pubkey();
|
||||
|
||||
@ -272,16 +275,12 @@ fn test_program_lua_move_funds() {
|
||||
fn test_program_builtin_bpf_noop() {
|
||||
logger::setup();
|
||||
|
||||
let mut file = File::open(create_bpf_path("noop")).expect("file open failed");
|
||||
let mut elf = Vec::new();
|
||||
file.read_to_end(&mut elf).unwrap();
|
||||
|
||||
let loader = Loader::new_bpf();
|
||||
let program = Program::new(
|
||||
&loader,
|
||||
elf::File::open_path(&create_bpf_path("noop"))
|
||||
.unwrap()
|
||||
.get_section(PLATFORM_SECTION_C)
|
||||
.unwrap()
|
||||
.data
|
||||
.clone(),
|
||||
);
|
||||
let program = Program::new(&loader, &elf);
|
||||
|
||||
// Call user program
|
||||
let tx = Transaction::new(
|
||||
@ -304,16 +303,12 @@ fn test_program_builtin_bpf_noop() {
|
||||
fn test_program_bpf_noop_c() {
|
||||
logger::setup();
|
||||
|
||||
let mut file = File::open(create_bpf_path("noop")).expect("file open failed");
|
||||
let mut elf = Vec::new();
|
||||
file.read_to_end(&mut elf).unwrap();
|
||||
|
||||
let loader = Loader::new_dynamic("solana_bpf_loader");
|
||||
let program = Program::new(
|
||||
&loader,
|
||||
elf::File::open_path(&create_bpf_path("noop"))
|
||||
.unwrap()
|
||||
.get_section(PLATFORM_SECTION_C)
|
||||
.unwrap()
|
||||
.data
|
||||
.clone(),
|
||||
);
|
||||
let program = Program::new(&loader, &elf);
|
||||
|
||||
// Call user program
|
||||
let tx = Transaction::new(
|
||||
@ -330,268 +325,3 @@ fn test_program_bpf_noop_c() {
|
||||
loader.bank.process_transactions(&vec![tx.clone()]),
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(feature = "bpf_c")]
|
||||
struct TicTacToe {
|
||||
game: Keypair,
|
||||
}
|
||||
|
||||
#[cfg(feature = "bpf_c")]
|
||||
impl TicTacToe {
|
||||
pub fn new(loader: &Loader, program: &Program) -> Self {
|
||||
let game = Keypair::new();
|
||||
|
||||
// Create game account
|
||||
let tx = Transaction::system_create(
|
||||
&loader.mint.keypair(),
|
||||
game.pubkey(),
|
||||
loader.mint.last_id(),
|
||||
1,
|
||||
0x78, // corresponds to the C structure size
|
||||
program.program.pubkey(),
|
||||
0,
|
||||
);
|
||||
check_tx_results(
|
||||
&loader.bank,
|
||||
&tx,
|
||||
loader.bank.process_transactions(&vec![tx.clone()]),
|
||||
);
|
||||
|
||||
TicTacToe { game }
|
||||
}
|
||||
|
||||
pub fn id(&self) -> Pubkey {
|
||||
self.game.pubkey().clone()
|
||||
}
|
||||
|
||||
pub fn init(&self, loader: &Loader, program: &Program, player: &Pubkey) {
|
||||
let userdata = serialize(&Command::Init).unwrap();
|
||||
let tx = Transaction::new(
|
||||
&self.game,
|
||||
&[self.game.pubkey(), *player],
|
||||
program.program.pubkey(),
|
||||
userdata,
|
||||
loader.mint.last_id(),
|
||||
0,
|
||||
);
|
||||
check_tx_results(
|
||||
&loader.bank,
|
||||
&tx,
|
||||
loader.bank.process_transactions(&vec![tx.clone()]),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn command(&self, loader: &Loader, program: &Program, command: Command, player: &Pubkey) {
|
||||
let userdata = serialize(&command).unwrap();
|
||||
let tx = Transaction::new(
|
||||
&loader.mint.keypair(),
|
||||
&[self.game.pubkey(), *player],
|
||||
program.program.pubkey(),
|
||||
userdata,
|
||||
loader.mint.last_id(),
|
||||
0,
|
||||
);
|
||||
check_tx_results(
|
||||
&loader.bank,
|
||||
&tx,
|
||||
loader.bank.process_transactions(&vec![tx.clone()]),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn get_player_x(&self, loader: &Loader) -> Vec<u8> {
|
||||
loader
|
||||
.bank
|
||||
.get_account(&self.game.pubkey())
|
||||
.unwrap()
|
||||
.userdata[0..32]
|
||||
.to_vec()
|
||||
}
|
||||
|
||||
pub fn get_player_y(&self, loader: &Loader) -> Vec<u8> {
|
||||
loader
|
||||
.bank
|
||||
.get_account(&self.game.pubkey())
|
||||
.unwrap()
|
||||
.userdata[32..64]
|
||||
.to_vec()
|
||||
}
|
||||
|
||||
pub fn game(&self, loader: &Loader) -> Vec<u8> {
|
||||
loader
|
||||
.bank
|
||||
.get_account(&self.game.pubkey())
|
||||
.unwrap()
|
||||
.userdata[64..68]
|
||||
.to_vec()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "bpf_c")]
|
||||
struct Dashboard {
|
||||
dashboard: Keypair,
|
||||
}
|
||||
|
||||
#[cfg(feature = "bpf_c")]
|
||||
impl Dashboard {
|
||||
pub fn new(loader: &Loader, program: &Program) -> Self {
|
||||
let dashboard = Keypair::new();
|
||||
|
||||
// Create game account
|
||||
let tx = Transaction::system_create(
|
||||
&loader.mint.keypair(),
|
||||
dashboard.pubkey(),
|
||||
loader.mint.last_id(),
|
||||
1,
|
||||
0xD0, // corresponds to the C structure size
|
||||
program.program.pubkey(),
|
||||
0,
|
||||
);
|
||||
check_tx_results(
|
||||
&loader.bank,
|
||||
&tx,
|
||||
loader.bank.process_transactions(&vec![tx.clone()]),
|
||||
);
|
||||
|
||||
Dashboard { dashboard }
|
||||
}
|
||||
|
||||
pub fn update(&self, loader: &Loader, program: &Program, game: &Pubkey) {
|
||||
let tx = Transaction::new(
|
||||
&self.dashboard,
|
||||
&[self.dashboard.pubkey(), *game],
|
||||
program.program.pubkey(),
|
||||
vec![],
|
||||
loader.mint.last_id(),
|
||||
0,
|
||||
);
|
||||
check_tx_results(
|
||||
&loader.bank,
|
||||
&tx,
|
||||
loader.bank.process_transactions(&vec![tx.clone()]),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn get_game(&self, loader: &Loader, since_last: usize) -> Vec<u8> {
|
||||
let userdata = loader
|
||||
.bank
|
||||
.get_account(&self.dashboard.pubkey())
|
||||
.unwrap()
|
||||
.userdata;
|
||||
|
||||
// TODO serialize
|
||||
let last_game = userdata[192] as usize;
|
||||
let this_game = (last_game + since_last * 4) % 5;
|
||||
let start = 32 + this_game * 32;
|
||||
let end = start + 32;
|
||||
|
||||
loader
|
||||
.bank
|
||||
.get_account(&self.dashboard.pubkey())
|
||||
.unwrap()
|
||||
.userdata[start..end]
|
||||
.to_vec()
|
||||
}
|
||||
|
||||
pub fn get_pending(&self, loader: &Loader) -> Vec<u8> {
|
||||
loader
|
||||
.bank
|
||||
.get_account(&self.dashboard.pubkey())
|
||||
.unwrap()
|
||||
.userdata[0..32]
|
||||
.to_vec()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "bpf_c")]
|
||||
#[test]
|
||||
fn test_program_bpf_tictactoe_c() {
|
||||
logger::setup();
|
||||
|
||||
let loader = Loader::new_dynamic("solana_bpf_loader");
|
||||
let program = Program::new(
|
||||
&loader,
|
||||
elf::File::open_path(&create_bpf_path("tictactoe"))
|
||||
.unwrap()
|
||||
.get_section(PLATFORM_SECTION_C)
|
||||
.unwrap()
|
||||
.data
|
||||
.clone(),
|
||||
);
|
||||
let player_x = Pubkey::new(&[0xA; 32]);
|
||||
let player_y = Pubkey::new(&[0xB; 32]);
|
||||
|
||||
let ttt = TicTacToe::new(&loader, &program);
|
||||
ttt.init(&loader, &program, &player_x);
|
||||
ttt.command(&loader, &program, Command::Join(0xAABBCCDD), &player_y);
|
||||
ttt.command(&loader, &program, Command::Move(1, 1), &player_x);
|
||||
ttt.command(&loader, &program, Command::Move(0, 0), &player_y);
|
||||
ttt.command(&loader, &program, Command::Move(2, 0), &player_x);
|
||||
ttt.command(&loader, &program, Command::Move(0, 2), &player_y);
|
||||
ttt.command(&loader, &program, Command::Move(2, 2), &player_x);
|
||||
ttt.command(&loader, &program, Command::Move(0, 1), &player_y);
|
||||
|
||||
assert_eq!(player_x.as_ref(), &ttt.get_player_x(&loader)[..]); // validate x's key
|
||||
assert_eq!(player_y.as_ref(), &ttt.get_player_y(&loader)[..]); // validate o's key
|
||||
assert_eq!([4, 0, 0, 0], ttt.game(&loader)[..]); // validate that o won
|
||||
}
|
||||
|
||||
#[cfg(feature = "bpf_c")]
|
||||
#[test]
|
||||
fn test_program_bpf_tictactoe_dashboard_c() {
|
||||
logger::setup();
|
||||
|
||||
let loader = Loader::new_dynamic("solana_bpf_loader");
|
||||
let ttt_program = Program::new(
|
||||
&loader,
|
||||
elf::File::open_path(&create_bpf_path("tictactoe"))
|
||||
.unwrap()
|
||||
.get_section(PLATFORM_SECTION_C)
|
||||
.unwrap()
|
||||
.data
|
||||
.clone(),
|
||||
);
|
||||
let player_x = Pubkey::new(&[0xA; 32]);
|
||||
let player_y = Pubkey::new(&[0xB; 32]);
|
||||
|
||||
let ttt1 = TicTacToe::new(&loader, &ttt_program);
|
||||
ttt1.init(&loader, &ttt_program, &player_x);
|
||||
ttt1.command(&loader, &ttt_program, Command::Join(0xAABBCCDD), &player_y);
|
||||
ttt1.command(&loader, &ttt_program, Command::Move(1, 1), &player_x);
|
||||
ttt1.command(&loader, &ttt_program, Command::Move(0, 0), &player_y);
|
||||
ttt1.command(&loader, &ttt_program, Command::Move(2, 0), &player_x);
|
||||
ttt1.command(&loader, &ttt_program, Command::Move(0, 2), &player_y);
|
||||
ttt1.command(&loader, &ttt_program, Command::Move(2, 2), &player_x);
|
||||
ttt1.command(&loader, &ttt_program, Command::Move(0, 1), &player_y);
|
||||
|
||||
let ttt2 = TicTacToe::new(&loader, &ttt_program);
|
||||
ttt2.init(&loader, &ttt_program, &player_x);
|
||||
ttt2.command(&loader, &ttt_program, Command::Join(0xAABBCCDD), &player_y);
|
||||
ttt2.command(&loader, &ttt_program, Command::Move(1, 1), &player_x);
|
||||
ttt2.command(&loader, &ttt_program, Command::Move(0, 0), &player_y);
|
||||
ttt2.command(&loader, &ttt_program, Command::Move(2, 0), &player_x);
|
||||
ttt2.command(&loader, &ttt_program, Command::Move(0, 2), &player_y);
|
||||
ttt2.command(&loader, &ttt_program, Command::Move(2, 2), &player_x);
|
||||
ttt2.command(&loader, &ttt_program, Command::Move(0, 1), &player_y);
|
||||
|
||||
let ttt3 = TicTacToe::new(&loader, &ttt_program);
|
||||
ttt3.init(&loader, &ttt_program, &player_x);
|
||||
|
||||
let dashboard_program = Program::new(
|
||||
&loader,
|
||||
elf::File::open_path(&create_bpf_path("tictactoe_dashboard"))
|
||||
.unwrap()
|
||||
.get_section(PLATFORM_SECTION_C)
|
||||
.unwrap()
|
||||
.data
|
||||
.clone(),
|
||||
);
|
||||
let dashboard = Dashboard::new(&loader, &dashboard_program);
|
||||
|
||||
dashboard.update(&loader, &dashboard_program, &ttt1.id());
|
||||
dashboard.update(&loader, &dashboard_program, &ttt2.id());
|
||||
dashboard.update(&loader, &dashboard_program, &ttt3.id());
|
||||
|
||||
assert_eq!(ttt1.id().as_ref(), &dashboard.get_game(&loader, 1)[..]);
|
||||
assert_eq!(ttt2.id().as_ref(), &dashboard.get_game(&loader, 0)[..]);
|
||||
assert_eq!(ttt3.id().as_ref(), &dashboard.get_pending(&loader)[..]);
|
||||
}
|
||||
|
Reference in New Issue
Block a user