cmd/swarm, swarm/api/http, swarm/bmt, swarm/fuse, swarm/network/stream, swarm/storage, swarm/storage/encryption, swarm/testutil: use pseudo-random instead of crypto-random for test files content generation (#18083)
- Replace "crypto/rand" to "math/rand" for files content generation - Remove swarm/network_test.go.Shuffle and swarm/btm/btm_test.go.Shuffle - because go1.9 support dropped (see https://github.com/ethereum/go-ethereum/pull/17807 and comments to swarm/network_test.go.Shuffle)
This commit is contained in:
committed by
Viktor Trón
parent
cff97119a7
commit
eb8fa3cc89
@ -19,13 +19,13 @@ package storage
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/crypto/sha3"
|
||||
"github.com/ethereum/go-ethereum/swarm/testutil"
|
||||
)
|
||||
|
||||
/*
|
||||
@ -47,7 +47,7 @@ func newTestHasherStore(store ChunkStore, hash string) *hasherStore {
|
||||
}
|
||||
|
||||
func testRandomBrokenData(n int, tester *chunkerTester) {
|
||||
data := io.LimitReader(rand.Reader, int64(n))
|
||||
data := testutil.RandomReader(1, n)
|
||||
brokendata := brokenLimitReader(data, n, n/2)
|
||||
|
||||
buf := make([]byte, n)
|
||||
@ -56,7 +56,7 @@ func testRandomBrokenData(n int, tester *chunkerTester) {
|
||||
tester.t.Fatalf("Broken reader is not broken, hence broken. Returns: %v", err)
|
||||
}
|
||||
|
||||
data = io.LimitReader(rand.Reader, int64(n))
|
||||
data = testutil.RandomReader(2, n)
|
||||
brokendata = brokenLimitReader(data, n, n/2)
|
||||
|
||||
putGetter := newTestHasherStore(NewMapChunkStore(), SHA3Hash)
|
||||
@ -77,7 +77,8 @@ func testRandomData(usePyramid bool, hash string, n int, tester *chunkerTester)
|
||||
input, found := tester.inputs[uint64(n)]
|
||||
var data io.Reader
|
||||
if !found {
|
||||
data, input = GenerateRandomData(n)
|
||||
input = testutil.RandomBytes(1, n)
|
||||
data = bytes.NewReader(input)
|
||||
tester.inputs[uint64(n)] = input
|
||||
} else {
|
||||
data = io.LimitReader(bytes.NewReader(input), int64(n))
|
||||
@ -118,14 +119,13 @@ func testRandomData(usePyramid bool, hash string, n int, tester *chunkerTester)
|
||||
// testing partial read
|
||||
for i := 1; i < n; i += 10000 {
|
||||
readableLength := n - i
|
||||
output := make([]byte, readableLength)
|
||||
r, err := reader.ReadAt(output, int64(i))
|
||||
if r != readableLength || err != io.EOF {
|
||||
tester.t.Fatalf("readAt error with offset %v read: %v n = %v err = %v\n", i, r, readableLength, err)
|
||||
}
|
||||
if input != nil {
|
||||
if !bytes.Equal(output, input[i:]) {
|
||||
tester.t.Fatalf("input and output mismatch\n IN: %v\nOUT: %v\n", input[i:], output)
|
||||
if !bytes.Equal(output[:readableLength], input[i:]) {
|
||||
tester.t.Fatalf("input and output mismatch\n IN: %v\nOUT: %v\n", input[i:], output[:readableLength])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -177,7 +177,8 @@ func TestDataAppend(t *testing.T) {
|
||||
input, found := tester.inputs[uint64(n)]
|
||||
var data io.Reader
|
||||
if !found {
|
||||
data, input = GenerateRandomData(n)
|
||||
input = testutil.RandomBytes(i, n)
|
||||
data = bytes.NewReader(input)
|
||||
tester.inputs[uint64(n)] = input
|
||||
} else {
|
||||
data = io.LimitReader(bytes.NewReader(input), int64(n))
|
||||
@ -199,7 +200,8 @@ func TestDataAppend(t *testing.T) {
|
||||
appendInput, found := tester.inputs[uint64(m)]
|
||||
var appendData io.Reader
|
||||
if !found {
|
||||
appendData, appendInput = GenerateRandomData(m)
|
||||
appendInput = testutil.RandomBytes(i, m)
|
||||
appendData = bytes.NewReader(appendInput)
|
||||
tester.inputs[uint64(m)] = appendInput
|
||||
} else {
|
||||
appendData = io.LimitReader(bytes.NewReader(appendInput), int64(m))
|
||||
@ -272,7 +274,7 @@ func benchReadAll(reader LazySectionReader) {
|
||||
func benchmarkSplitJoin(n int, t *testing.B) {
|
||||
t.ReportAllocs()
|
||||
for i := 0; i < t.N; i++ {
|
||||
data := testDataReader(n)
|
||||
data := testutil.RandomReader(i, n)
|
||||
|
||||
putGetter := newTestHasherStore(NewMapChunkStore(), SHA3Hash)
|
||||
ctx := context.TODO()
|
||||
@ -292,7 +294,7 @@ func benchmarkSplitJoin(n int, t *testing.B) {
|
||||
func benchmarkSplitTreeSHA3(n int, t *testing.B) {
|
||||
t.ReportAllocs()
|
||||
for i := 0; i < t.N; i++ {
|
||||
data := testDataReader(n)
|
||||
data := testutil.RandomReader(i, n)
|
||||
putGetter := newTestHasherStore(&FakeChunkStore{}, SHA3Hash)
|
||||
|
||||
ctx := context.Background()
|
||||
@ -311,7 +313,7 @@ func benchmarkSplitTreeSHA3(n int, t *testing.B) {
|
||||
func benchmarkSplitTreeBMT(n int, t *testing.B) {
|
||||
t.ReportAllocs()
|
||||
for i := 0; i < t.N; i++ {
|
||||
data := testDataReader(n)
|
||||
data := testutil.RandomReader(i, n)
|
||||
putGetter := newTestHasherStore(&FakeChunkStore{}, BMTHash)
|
||||
|
||||
ctx := context.Background()
|
||||
@ -329,7 +331,7 @@ func benchmarkSplitTreeBMT(n int, t *testing.B) {
|
||||
func benchmarkSplitPyramidBMT(n int, t *testing.B) {
|
||||
t.ReportAllocs()
|
||||
for i := 0; i < t.N; i++ {
|
||||
data := testDataReader(n)
|
||||
data := testutil.RandomReader(i, n)
|
||||
putGetter := newTestHasherStore(&FakeChunkStore{}, BMTHash)
|
||||
|
||||
ctx := context.Background()
|
||||
@ -347,7 +349,7 @@ func benchmarkSplitPyramidBMT(n int, t *testing.B) {
|
||||
func benchmarkSplitPyramidSHA3(n int, t *testing.B) {
|
||||
t.ReportAllocs()
|
||||
for i := 0; i < t.N; i++ {
|
||||
data := testDataReader(n)
|
||||
data := testutil.RandomReader(i, n)
|
||||
putGetter := newTestHasherStore(&FakeChunkStore{}, SHA3Hash)
|
||||
|
||||
ctx := context.Background()
|
||||
@ -365,8 +367,8 @@ func benchmarkSplitPyramidSHA3(n int, t *testing.B) {
|
||||
func benchmarkSplitAppendPyramid(n, m int, t *testing.B) {
|
||||
t.ReportAllocs()
|
||||
for i := 0; i < t.N; i++ {
|
||||
data := testDataReader(n)
|
||||
data1 := testDataReader(m)
|
||||
data := testutil.RandomReader(i, n)
|
||||
data1 := testutil.RandomReader(t.N+i, m)
|
||||
|
||||
store := NewMapChunkStore()
|
||||
putGetter := newTestHasherStore(store, SHA3Hash)
|
||||
|
Reference in New Issue
Block a user