diff --git a/system-test/testnet-performance/gce-cpu-only-perf-5-node.yml b/system-test/testnet-performance/gce-cpu-only-perf-5-node.yml new file mode 100755 index 0000000000..c72a9d6e11 --- /dev/null +++ b/system-test/testnet-performance/gce-cpu-only-perf-5-node.yml @@ -0,0 +1,16 @@ +steps: + - command: "system-test/testnet-performance/testnet-automation.sh" + label: "GCE - CPU Only 5 Node" + env: + UPLOAD_RESULTS_TO_SLACK: "true" + CLOUD_PROVIDER: "gce" + TESTNET_TAG: "gce-edge-perf-cpu-only" + RAMP_UP_TIME: 0 + TEST_DURATION_SECONDS: 600 + NUMBER_OF_VALIDATOR_NODES: 5 + NUMBER_OF_CLIENT_NODES: 2 + CLIENT_OPTIONS: "bench-tps=2=--tx_count 15000 --thread-batch-sleep-ms 250" + TESTNET_ZONES: "us-west1-a,us-west1-b,us-central1-a,europe-west4-a" + ADDITIONAL_FLAGS: "--dedicated" + agents: + - "queue=testnet-deploy" diff --git a/system-test/testnet-performance/gce-gpu-perf-5-node.yml b/system-test/testnet-performance/gce-gpu-perf-5-node.yml new file mode 100755 index 0000000000..9b8e63c355 --- /dev/null +++ b/system-test/testnet-performance/gce-gpu-perf-5-node.yml @@ -0,0 +1,17 @@ +steps: + - command: "system-test/testnet-performance/testnet-automation.sh" + label: "GCE - GPU Enabled 5 Nodes" + env: + UPLOAD_RESULTS_TO_SLACK: "true" + CLOUD_PROVIDER: "gce" + TESTNET_TAG: "gce-edge-perf-gpu-enabled" + RAMP_UP_TIME: 0 + TEST_DURATION_SECONDS: 600 + NUMBER_OF_VALIDATOR_NODES: 5 + VALIDATOR_NODE_MACHINE_TYPE: "--machine-type n1-standard-16 --accelerator count=2,type=nvidia-tesla-v100" + NUMBER_OF_CLIENT_NODES: 2 + CLIENT_OPTIONS: "bench-tps=2=--tx_count 15000 --thread-batch-sleep-ms 250" + TESTNET_ZONES: "us-west1-a,us-west1-b,us-central1-a,europe-west4-a" + ADDITIONAL_FLAGS: "--dedicated" + agents: + - "queue=testnet-deploy" diff --git a/system-test/testnet-performance/testnet-automation-json-parser.py b/system-test/testnet-performance/testnet-automation-json-parser.py index c1d005c82b..6f8fa9d266 100755 --- a/system-test/testnet-performance/testnet-automation-json-parser.py +++ b/system-test/testnet-performance/testnet-automation-json-parser.py @@ -7,5 +7,7 @@ if 'results' in data: for result in data['results']: if 'series' in result: print result['series'][0]['columns'][1].encode() + ': ' + str(result['series'][0]['values'][0][1]) + else: + print "An expected result from CURL request is missing" else: print "No results returned from CURL request" diff --git a/system-test/testnet-performance/testnet-automation.sh b/system-test/testnet-performance/testnet-automation.sh index af2fea0bb9..20c833749e 100755 --- a/system-test/testnet-performance/testnet-automation.sh +++ b/system-test/testnet-performance/testnet-automation.sh @@ -1,16 +1,6 @@ #!/usr/bin/env bash set -e -# TODO: Make sure a dB named $TESTNET_TAG exists in the influxDB host, or can be created -[[ -n $TESTNET_TAG ]] || TESTNET_TAG=testnet-automation -[[ -n $INFLUX_HOST ]] || INFLUX_HOST=https://metrics.solana.com:8086 - -# TODO: Remove all default values, force explicitness in the testcase definition -[[ -n $TEST_DURATION_SECONDS ]] || TEST_DURATION_SECONDS=300 -[[ -n $RAMP_UP_TIME ]] || RAMP_UP_TIME=0 -[[ -n $NUMBER_OF_VALIDATOR_NODES ]] || NUMBER_OF_VALIDATOR_NODES=2 -[[ -n $NUMBER_OF_CLIENT_NODES ]] || NUMBER_OF_CLIENT_NODES=1 - function collect_logs { echo --- collect logs from remote nodes rm -rf net/log @@ -75,7 +65,7 @@ EOF } trap cleanup_testnet EXIT -launchTestnet() { +function launchTestnet() { set -x # shellcheck disable=SC2068 @@ -164,6 +154,26 @@ launchTestnet() { cd "$(dirname "$0")/../.." +# TODO: Make sure a dB named $TESTNET_TAG exists in the influxDB host, or can be created +[[ -n $TESTNET_TAG ]] || TESTNET_TAG=testnet-automation +[[ -n $INFLUX_HOST ]] || INFLUX_HOST=https://metrics.solana.com:8086 +[[ -n $RAMP_UP_TIME ]] || RAMP_UP_TIME=0 + +if [[ -z $TEST_DURATION_SECONDS ]] ; then + echo TEST_DURATION_SECONDS not defined + exit 1 +fi + +if [[ -z $NUMBER_OF_VALIDATOR_NODES ]] ; then + echo NUMBER_OF_VALIDATOR_NODES not defined + exit 1 +fi + +if [[ -z $NUMBER_OF_CLIENT_NODES ]] ; then + echo NUMBER_OF_CLIENT_NODES not defined + exit 1 +fi + if [[ -z $SOLANA_METRICS_CONFIG ]]; then if [[ -z $SOLANA_METRICS_PARTIAL_CONFIG ]]; then echo SOLANA_METRICS_PARTIAL_CONFIG not defined @@ -188,7 +198,7 @@ maybeMachineType=${VALIDATOR_NODE_MACHINE_TYPE:+"-G"} IFS=, read -r -a TESTNET_CLOUD_ZONES <<<"${TESTNET_ZONES}" RESULT_FILE="$TESTNET_TAG"_SUMMARY_STATS_"$NUMBER_OF_VALIDATOR_NODES".log -rm -f $RESULT_FILE +rm -f "$RESULT_FILE" RESULT_DETAILS="Test failed to finish" TEST_PARAMS_TO_DISPLAY=(CLOUD_PROVIDER \