Compare commits

...

84 Commits

Author SHA1 Message Date
2c1c78a6d9 Merge branch 'release/0.9.23' 2015-05-21 18:58:57 +02:00
3ea9868b65 miner: on downloader.Done/Fail stop immediately. Ignore pending evs 2015-05-21 18:14:32 +02:00
4600ecb5c7 cmd/geth: bump version 0.9.23 2015-05-21 18:02:15 +02:00
f1cc3619f5 Merge pull request #1055 from ethersphere/autodag
automatic DAG pregeneration for smooth epoch transitions
2015-05-21 09:01:57 -07:00
af28736bd0 Merge pull request #1064 from karalabe/downloader-attacks
Fix two additional download vulnerabilities
2015-05-21 09:00:12 -07:00
06a041589f eth, eth/downloader: remove duplicate consts, bump hash fetch to 2K 2015-05-21 18:16:04 +03:00
bed80133e0 automatic DAG pregeneration for smooth epoch transitions
- backend: AutoDAG bool flag passed from cli/eth.Config to ethereum, autoDAG loop started if true
- backend: autoDAG loop start/stop, remove previous DAG
- cli: AutoDAG bool flag, off by default, but automatically ON if mining
- admin jsre: add startAutoDAG stopAutoDAG and makeDAG in miner section
- switch on/off DAG autogeneration when miner started/stopped on console
2015-05-21 15:53:42 +01:00
1da145675d Merge pull request #1063 from bas-vk/issue1053
prefix dapp key/value entries in extradb
2015-05-21 06:43:17 -07:00
a61e6788db prefix dapp key/value entries in extradb 2015-05-21 15:20:38 +02:00
90b672f1af Merge pull request #1062 from Gustav-Simonsson/tests_updates
Tests updates
2015-05-21 04:49:23 -07:00
bf7dcfce36 Merge pull request #1059 from obscuren/cleanup
Cleanup
2015-05-21 04:11:36 -07:00
907848997b miner: one-shot update loop 2015-05-21 11:57:00 +02:00
207bd55751 eth: reduced max open files for LevelDB 2015-05-21 11:45:35 +02:00
84cd618585 ethdb: documentation and corruption recovery 2015-05-21 11:43:05 +02:00
ef8744d9fc core: switched back to set.Set for uncle verification 2015-05-21 11:36:39 +02:00
ff1630834c xeth: removed Value 2015-05-21 11:36:05 +02:00
52db6d8be5 eth/downloader: circumvent a forged block chain with known parent attack 2015-05-21 08:37:27 +03:00
e8b22b9253 eth/downloader: prevent a peer from dripping bad hashes 2015-05-21 08:07:58 +03:00
9bde7fd72e Merge pull request #1043 from obscuren/test_fixes
core/vm: optimisation on RETURN and updated tests
2015-05-20 03:06:04 -07:00
8fe8ec84f6 Merge pull request #1049 from zsfelfoldi/receipts
Storing tx receipts in extraDb
2015-05-20 03:00:36 -07:00
0300eef94d Merge pull request #1048 from ethersphere/cli-fixes
CLI, JSRE admin and  Solc improvements
2015-05-20 02:31:52 -07:00
6b83a0a589 Merge pull request #1050 from karalabe/handle-slow-download-tester
eth/downloader: fix test to it doesn't time out on a slow machine
2015-05-20 02:29:23 -07:00
adaa49d2cc Merge pull request #1051 from karalabe/fix-odd-naming
eth: fix odd method names in peer set
2015-05-20 02:29:07 -07:00
3c8227b935 eth: fix odd method names in peer set 2015-05-20 10:34:45 +03:00
6f54eb6d9a eth/downloader: fix test to it doesn't time out on a slow machine 2015-05-20 10:15:42 +03:00
00ec4132f8 Storing tx receipts in extraDb 2015-05-20 06:41:50 +02:00
e1d1417729 rpc: NewNotAvailableError instead of NewNotImplementedError if no solc 2015-05-20 05:29:28 +01:00
00f59f5014 fix eth.sign. now implemented in admin jsre until web3.js has it . 2015-05-20 04:38:20 +01:00
b0ae84aa0d multiple contract source for solidity compiler: returns contract array if multiple contracts. fixes #1023 2015-05-20 04:11:48 +01:00
ea893aca8f update web3.js to 0.4.2 2015-05-20 02:58:49 +01:00
d92172f3d4 add usage doc to wallet import 2015-05-20 02:47:13 +01:00
4201a18117 remove solc flair 2015-05-20 02:47:13 +01:00
22b694ee1e solc now in ethereum, fixes solc path setting; setSolc() didnt work 2015-05-20 02:47:13 +01:00
f9abcee0f9 fix solc tests unskip 2015-05-20 02:47:13 +01:00
9617aa8e19 tests: added conditional skip on long running VM tests
Set the TEST_VM_COMPLEX env var to test complex vm tests which require a
lot of ram and quite some time.
2015-05-20 00:21:24 +02:00
f5af1fdca8 core/vm: RETURN op code returns pointer to memory rather than copy 2015-05-20 00:21:23 +02:00
648b352424 tests/vm: updated tests and skipped output for specific tests
Skipped tests due to large return value
2015-05-20 00:21:23 +02:00
79042223dc Merge pull request #1044 from obscuren/thread_safe_block_cache
core: block cache Has method thread safe
2015-05-19 15:11:41 -07:00
b14ee6ce16 Merge pull request #1046 from tgerring/issue1045
Allow unlocking multiple accounts
2015-05-19 15:11:20 -07:00
8dac28f2e3 core: block cache Has method thread safe 2015-05-19 21:50:56 +02:00
87a05c8f38 core: skipped tests while cache disabled 2015-05-19 21:50:26 +02:00
32b8565022 Support multiple account unlock attempts 2015-05-19 14:46:32 -05:00
af8ada45e7 Allow unlocking multiple accounts #1045
Separate accounts with spaces when using --unlock
2015-05-19 13:40:41 -05:00
46d6470c43 Merge pull request #1035 from karalabe/eth-threadsafe-peers
eth: make the peer-set thread safe
2015-05-19 08:50:38 -07:00
9b825e2728 Merge pull request #1036 from tgerring/issue884
JSON RPC null field updates
2015-05-19 06:44:31 -07:00
748263d2f0 Use bytes.Repeat() instead of 32-byte literal 2015-05-19 08:14:48 -05:00
7d9a13e0d5 core: disable cash tmp 2015-05-19 12:59:58 +02:00
6c2ad7b72e Merge pull request #1034 from obscuren/tx_pool_fix
Nonce fix for failing transactions
2015-05-18 12:28:51 -07:00
b7baceefda xeth: remove nonce on error. Fixes #1026 2015-05-18 20:52:25 +02:00
4755caeb2d eth: remote a superfluous peerSet method 2015-05-18 21:35:42 +03:00
5422fe5125 eth: make the peer set thread safe 2015-05-18 21:33:37 +03:00
bd0c0a633b Merge pull request #1022 from obscuren/parallel_nonce_checks
Parallelise nonce checks
2015-05-18 11:13:53 -07:00
d6adadc5e3 Merge pull request #1033 from tgerring/issue1010
Add "removedb" command to Geth
2015-05-18 11:12:58 -07:00
0864f1fc8e Remove unused confirm() method 2015-05-18 12:25:33 -05:00
f14feea436 Refactor user prompts into utils 2015-05-18 12:24:30 -05:00
36a4ba3248 Add user confirmation for removedb 2015-05-18 12:04:35 -05:00
40717465bc core: fixed tests 2015-05-18 18:16:53 +02:00
59c0d01418 core: chain manager no longer exports genesis block 2015-05-18 18:16:53 +02:00
bc5e60cd63 miner: stale block notification 2015-05-18 18:16:53 +02:00
fe41bd6fe8 Add "removedb" command to Geth
Removes the state and blockchain databases
2015-05-18 10:54:15 -05:00
a3a5f8b593 Merge pull request #1032 from tgerring/issue1025
Multiple CORS domains support
2015-05-18 08:49:35 -07:00
54b5c8273d XEth comment clarification 2015-05-18 10:41:56 -05:00
a2598e649d Permit multiple CORS domains
Separated by spaces
2015-05-18 10:31:03 -05:00
62d76b8e1f Cleanup 2015-05-18 10:11:27 -05:00
a528bd04db Return nil for certain fields on eth_getTransactionByHash when not part of a block 2015-05-18 10:09:00 -05:00
677796b351 Merge pull request #1031 from bas-vk/issue1012
fixed race condition in miner
2015-05-18 07:33:06 -07:00
6a72cd45e2 Add wrapper for BlockTests/bcWalletTest.json 2015-05-18 16:28:54 +02:00
1d51cada3c Handle call depth exception for CREATE 2015-05-18 16:23:20 +02:00
d381d9a74c Return nil for certain fields on eth_getBlockByNumber pending 2015-05-18 09:16:10 -05:00
60561cdca2 fixed issue when miner is not stopping af stop command 2015-05-18 16:09:01 +02:00
7778740315 fixed race condition in miner 2015-05-18 15:13:58 +02:00
c67424ecc8 core: parallelise nonce checking when processing blocks
ChainManager now uses a parallel approach to block processing where all
nonces are checked seperatly from the block processing process. This
speeds up the process by about 3 times on my i7
2015-05-18 13:59:22 +02:00
36419defd1 Update Ethereum JSON test files 2015-05-18 12:45:24 +02:00
67d44519ce core: bugfix test 2. set => hash map 2015-05-18 10:49:09 +02:00
54f0f82dd1 ret 2015-05-18 10:14:48 +02:00
e323f0e831 core: tmp diagnostic logs 2015-05-18 10:13:50 +02:00
280b7f23af Merge branch 'hotfix/0.9.22' into develop 2015-05-17 21:35:29 +02:00
443d024843 Merge pull request #1021 from obscuren/global_chain_lock
core: global chain insert lock
2015-05-17 08:54:16 -07:00
27782bbade core: global chain insert lock 2015-05-17 00:55:02 +02:00
426c70ac0b Merge pull request #1011 from obscuren/upgradedb_fix
cmd/geth: delete state db on upgradedb command
2015-05-16 08:48:49 -07:00
28ba374f27 cmd/geth: delete state db on upgradedb command 2015-05-16 17:43:19 +02:00
ad99089567 Merge branch 'hotfix/0.9.21.1' into develop 2015-05-16 13:04:43 +02:00
61e8296bd8 Adjust miner coinbase and not just miner worker coinbase 2015-05-15 21:23:09 -04:00
4b2ee6c30c Merge branch 'release/0.9.21' into develop 2015-05-16 00:27:35 +02:00
43 changed files with 12316 additions and 954 deletions

View File

@ -8,6 +8,7 @@ import (
"strconv" "strconv"
"time" "time"
"github.com/ethereum/ethash"
"github.com/ethereum/go-ethereum/accounts" "github.com/ethereum/go-ethereum/accounts"
"github.com/ethereum/go-ethereum/cmd/utils" "github.com/ethereum/go-ethereum/cmd/utils"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
@ -35,6 +36,7 @@ func (js *jsre) adminBindings() {
eth := ethO.Object() eth := ethO.Object()
eth.Set("pendingTransactions", js.pendingTransactions) eth.Set("pendingTransactions", js.pendingTransactions)
eth.Set("resend", js.resend) eth.Set("resend", js.resend)
eth.Set("sign", js.sign)
js.re.Set("admin", struct{}{}) js.re.Set("admin", struct{}{})
t, _ := js.re.Get("admin") t, _ := js.re.Get("admin")
@ -72,6 +74,9 @@ func (js *jsre) adminBindings() {
miner.Set("hashrate", js.hashrate) miner.Set("hashrate", js.hashrate)
miner.Set("setExtra", js.setExtra) miner.Set("setExtra", js.setExtra)
miner.Set("setGasPrice", js.setGasPrice) miner.Set("setGasPrice", js.setGasPrice)
miner.Set("startAutoDAG", js.startAutoDAG)
miner.Set("stopAutoDAG", js.stopAutoDAG)
miner.Set("makeDAG", js.makeDAG)
admin.Set("debug", struct{}{}) admin.Set("debug", struct{}{})
t, _ = admin.Get("debug") t, _ = admin.Get("debug")
@ -177,6 +182,30 @@ func (js *jsre) resend(call otto.FunctionCall) otto.Value {
return otto.FalseValue() return otto.FalseValue()
} }
func (js *jsre) sign(call otto.FunctionCall) otto.Value {
if len(call.ArgumentList) != 2 {
fmt.Println("requires 2 arguments: eth.sign(signer, data)")
return otto.UndefinedValue()
}
signer, err := call.Argument(0).ToString()
if err != nil {
fmt.Println(err)
return otto.UndefinedValue()
}
data, err := call.Argument(1).ToString()
if err != nil {
fmt.Println(err)
return otto.UndefinedValue()
}
v, err := js.xeth.Sign(signer, data, false)
if err != nil {
fmt.Println(err)
return otto.UndefinedValue()
}
return js.re.ToVal(v)
}
func (js *jsre) debugBlock(call otto.FunctionCall) otto.Value { func (js *jsre) debugBlock(call otto.FunctionCall) otto.Value {
block, err := js.getBlock(call) block, err := js.getBlock(call)
if err != nil { if err != nil {
@ -253,6 +282,30 @@ func (js *jsre) hashrate(otto.FunctionCall) otto.Value {
return js.re.ToVal(js.ethereum.Miner().HashRate()) return js.re.ToVal(js.ethereum.Miner().HashRate())
} }
func (js *jsre) makeDAG(call otto.FunctionCall) otto.Value {
blockNumber, err := call.Argument(1).ToInteger()
if err != nil {
fmt.Println(err)
return otto.FalseValue()
}
err = ethash.MakeDAG(uint64(blockNumber), "")
if err != nil {
return otto.FalseValue()
}
return otto.TrueValue()
}
func (js *jsre) startAutoDAG(otto.FunctionCall) otto.Value {
js.ethereum.StartAutoDAG()
return otto.TrueValue()
}
func (js *jsre) stopAutoDAG(otto.FunctionCall) otto.Value {
js.ethereum.StopAutoDAG()
return otto.TrueValue()
}
func (js *jsre) backtrace(call otto.FunctionCall) otto.Value { func (js *jsre) backtrace(call otto.FunctionCall) otto.Value {
tracestr, err := call.Argument(0).ToString() tracestr, err := call.Argument(0).ToString()
if err != nil { if err != nil {
@ -291,6 +344,9 @@ func (js *jsre) startMining(call otto.FunctionCall) otto.Value {
threads = int64(js.ethereum.MinerThreads) threads = int64(js.ethereum.MinerThreads)
} }
// switch on DAG autogeneration when miner starts
js.ethereum.StartAutoDAG()
err = js.ethereum.StartMining(int(threads)) err = js.ethereum.StartMining(int(threads))
if err != nil { if err != nil {
fmt.Println(err) fmt.Println(err)
@ -302,6 +358,7 @@ func (js *jsre) startMining(call otto.FunctionCall) otto.Value {
func (js *jsre) stopMining(call otto.FunctionCall) otto.Value { func (js *jsre) stopMining(call otto.FunctionCall) otto.Value {
js.ethereum.StopMining() js.ethereum.StopMining()
js.ethereum.StopAutoDAG()
return otto.TrueValue() return otto.TrueValue()
} }
@ -383,7 +440,7 @@ func (js *jsre) unlock(call otto.FunctionCall) otto.Value {
var passphrase string var passphrase string
if arg.IsUndefined() { if arg.IsUndefined() {
fmt.Println("Please enter a passphrase now.") fmt.Println("Please enter a passphrase now.")
passphrase, err = readPassword("Passphrase: ", true) passphrase, err = utils.PromptPassword("Passphrase: ", true)
if err != nil { if err != nil {
fmt.Println(err) fmt.Println(err)
return otto.FalseValue() return otto.FalseValue()
@ -410,12 +467,12 @@ func (js *jsre) newAccount(call otto.FunctionCall) otto.Value {
if arg.IsUndefined() { if arg.IsUndefined() {
fmt.Println("The new account will be encrypted with a passphrase.") fmt.Println("The new account will be encrypted with a passphrase.")
fmt.Println("Please enter a passphrase now.") fmt.Println("Please enter a passphrase now.")
auth, err := readPassword("Passphrase: ", true) auth, err := utils.PromptPassword("Passphrase: ", true)
if err != nil { if err != nil {
fmt.Println(err) fmt.Println(err)
return otto.FalseValue() return otto.FalseValue()
} }
confirm, err := readPassword("Repeat Passphrase: ", false) confirm, err := utils.PromptPassword("Repeat Passphrase: ", false)
if err != nil { if err != nil {
fmt.Println(err) fmt.Println(err)
return otto.FalseValue() return otto.FalseValue()

View File

@ -1 +1 @@
{"code":"605280600c6000396000f3006000357c010000000000000000000000000000000000000000000000000000000090048063c6888fa114602e57005b60376004356041565b8060005260206000f35b6000600782029050604d565b91905056","info":{"abiDefinition":[{"constant":false,"inputs":[{"name":"a","type":"uint256"}],"name":"multiply","outputs":[{"name":"d","type":"uint256"}],"type":"function"}],"compilerVersion":"0.9.17","developerDoc":{"methods":{}},"language":"Solidity","languageVersion":"0","source":"contract test {\n /// @notice Will multiply `a` by 7.\n function multiply(uint a) returns(uint d) {\n return a * 7;\n }\n}\n","userDoc":{"methods":{"multiply(uint256)":{"notice":"Will multiply `a` by 7."}}}}} {"code":"0x605880600c6000396000f3006000357c010000000000000000000000000000000000000000000000000000000090048063c6888fa114602e57005b603d6004803590602001506047565b8060005260206000f35b60006007820290506053565b91905056","info":{"abiDefinition":[{"constant":false,"inputs":[{"name":"a","type":"uint256"}],"name":"multiply","outputs":[{"name":"d","type":"uint256"}],"type":"function"}],"compilerVersion":"0.9.23","developerDoc":{"methods":{}},"language":"Solidity","languageVersion":"0","source":"contract test {\n /// @notice Will multiply `a` by 7.\n function multiply(uint a) returns(uint d) {\n return a * 7;\n }\n}\n","userDoc":{"methods":{"multiply(uint256)":{"notice":"Will multiply `a` by 7."}}}}}

View File

@ -71,7 +71,7 @@ type jsre struct {
prompter prompter
} }
func newJSRE(ethereum *eth.Ethereum, libPath, solcPath, corsDomain string, interactive bool, f xeth.Frontend) *jsre { func newJSRE(ethereum *eth.Ethereum, libPath, corsDomain string, interactive bool, f xeth.Frontend) *jsre {
js := &jsre{ethereum: ethereum, ps1: "> "} js := &jsre{ethereum: ethereum, ps1: "> "}
// set default cors domain used by startRpc from CLI flag // set default cors domain used by startRpc from CLI flag
js.corsDomain = corsDomain js.corsDomain = corsDomain
@ -81,7 +81,6 @@ func newJSRE(ethereum *eth.Ethereum, libPath, solcPath, corsDomain string, inter
js.xeth = xeth.New(ethereum, f) js.xeth = xeth.New(ethereum, f)
js.wait = js.xeth.UpdateState() js.wait = js.xeth.UpdateState()
// update state in separare forever blocks // update state in separare forever blocks
js.xeth.SetSolc(solcPath)
js.re = re.New(libPath) js.re = re.New(libPath)
js.apiBindings(f) js.apiBindings(f)
js.adminBindings() js.adminBindings()

View File

@ -24,7 +24,7 @@ import (
const ( const (
testSolcPath = "" testSolcPath = ""
solcVersion = "0.9.17" solcVersion = "0.9.23"
testKey = "e6fab74a43941f82d89cb7faa408e227cdad3153c4720e540e855c19b15e6674" testKey = "e6fab74a43941f82d89cb7faa408e227cdad3153c4720e540e855c19b15e6674"
testAddress = "0x8605cdbbdb6d264aa742e77020dcbc58fcdce182" testAddress = "0x8605cdbbdb6d264aa742e77020dcbc58fcdce182"
@ -34,6 +34,7 @@ const (
) )
var ( var (
versionRE = regexp.MustCompile(strconv.Quote(`"compilerVersion":"` + solcVersion + `"`))
testGenesis = `{"` + testAddress[2:] + `": {"balance": "` + testBalance + `"}}` testGenesis = `{"` + testAddress[2:] + `": {"balance": "` + testBalance + `"}}`
) )
@ -75,6 +76,7 @@ func testJEthRE(t *testing.T) (string, *testjethre, *eth.Ethereum) {
AccountManager: am, AccountManager: am,
MaxPeers: 0, MaxPeers: 0,
Name: "test", Name: "test",
SolcPath: testSolcPath,
}) })
if err != nil { if err != nil {
t.Fatal("%v", err) t.Fatal("%v", err)
@ -101,7 +103,7 @@ func testJEthRE(t *testing.T) (string, *testjethre, *eth.Ethereum) {
t.Errorf("Error creating DocServer: %v", err) t.Errorf("Error creating DocServer: %v", err)
} }
tf := &testjethre{ds: ds, stateDb: ethereum.ChainManager().State().Copy()} tf := &testjethre{ds: ds, stateDb: ethereum.ChainManager().State().Copy()}
repl := newJSRE(ethereum, assetPath, testSolcPath, "", false, tf) repl := newJSRE(ethereum, assetPath, "", false, tf)
tf.jsre = repl tf.jsre = repl
return tmp, tf, ethereum return tmp, tf, ethereum
} }
@ -172,6 +174,8 @@ func TestBlockChain(t *testing.T) {
tmpfile := filepath.Join(extmp, "export.chain") tmpfile := filepath.Join(extmp, "export.chain")
tmpfileq := strconv.Quote(tmpfile) tmpfileq := strconv.Quote(tmpfile)
ethereum.ChainManager().Reset()
checkEvalJSON(t, repl, `admin.export(`+tmpfileq+`)`, `true`) checkEvalJSON(t, repl, `admin.export(`+tmpfileq+`)`, `true`)
if _, err := os.Stat(tmpfile); err != nil { if _, err := os.Stat(tmpfile); err != nil {
t.Fatal(err) t.Fatal(err)
@ -226,11 +230,11 @@ func TestSignature(t *testing.T) {
defer ethereum.Stop() defer ethereum.Stop()
defer os.RemoveAll(tmp) defer os.RemoveAll(tmp)
val, err := repl.re.Run(`eth.sign({from: "` + testAddress + `", data: "` + testHash + `"})`) val, err := repl.re.Run(`eth.sign("` + testAddress + `", "` + testHash + `")`)
// This is a very preliminary test, lacking actual signature verification // This is a very preliminary test, lacking actual signature verification
if err != nil { if err != nil {
t.Errorf("Error runnig js: %v", err) t.Errorf("Error running js: %v", err)
return return
} }
output := val.String() output := val.String()
@ -244,7 +248,6 @@ func TestSignature(t *testing.T) {
} }
func TestContract(t *testing.T) { func TestContract(t *testing.T) {
t.Skip()
tmp, repl, ethereum := testJEthRE(t) tmp, repl, ethereum := testJEthRE(t)
if err := ethereum.Start(); err != nil { if err := ethereum.Start(); err != nil {
@ -257,7 +260,9 @@ func TestContract(t *testing.T) {
var txc uint64 var txc uint64
coinbase := common.HexToAddress(testAddress) coinbase := common.HexToAddress(testAddress)
resolver.New(repl.xeth).CreateContracts(coinbase) resolver.New(repl.xeth).CreateContracts(coinbase)
// time.Sleep(1000 * time.Millisecond)
// checkEvalJSON(t, repl, `eth.getBlock("pending", true).transactions.length`, `2`)
source := `contract test {\n` + source := `contract test {\n` +
" /// @notice Will multiply `a` by 7." + `\n` + " /// @notice Will multiply `a` by 7." + `\n` +
` function multiply(uint a) returns(uint d) {\n` + ` function multiply(uint a) returns(uint d) {\n` +
@ -277,10 +282,9 @@ func TestContract(t *testing.T) {
// if solc is found with right version, test it, otherwise read from file // if solc is found with right version, test it, otherwise read from file
sol, err := compiler.New("") sol, err := compiler.New("")
if err != nil { if err != nil {
t.Logf("solc not found: skipping compiler test") t.Logf("solc not found: mocking contract compilation step")
} else if sol.Version() != solcVersion { } else if sol.Version() != solcVersion {
err = fmt.Errorf("solc wrong version found (%v, expect %v): skipping compiler test", sol.Version(), solcVersion) t.Logf("WARNING: solc different version found (%v, test written for %v, may need to update)", sol.Version(), solcVersion)
t.Log(err)
} }
if err != nil { if err != nil {
@ -293,10 +297,10 @@ func TestContract(t *testing.T) {
t.Errorf("%v", err) t.Errorf("%v", err)
} }
} else { } else {
checkEvalJSON(t, repl, `contract = eth.compile.solidity(source)`, string(contractInfo)) checkEvalJSON(t, repl, `contract = eth.compile.solidity(source).test`, string(contractInfo))
} }
checkEvalJSON(t, repl, `contract.code`, `"605280600c6000396000f3006000357c010000000000000000000000000000000000000000000000000000000090048063c6888fa114602e57005b60376004356041565b8060005260206000f35b6000600782029050604d565b91905056"`) checkEvalJSON(t, repl, `contract.code`, `"0x605880600c6000396000f3006000357c010000000000000000000000000000000000000000000000000000000090048063c6888fa114602e57005b603d6004803590602001506047565b8060005260206000f35b60006007820290506053565b91905056"`)
checkEvalJSON( checkEvalJSON(
t, repl, t, repl,
@ -306,15 +310,16 @@ func TestContract(t *testing.T) {
callSetup := `abiDef = JSON.parse('[{"constant":false,"inputs":[{"name":"a","type":"uint256"}],"name":"multiply","outputs":[{"name":"d","type":"uint256"}],"type":"function"}]'); callSetup := `abiDef = JSON.parse('[{"constant":false,"inputs":[{"name":"a","type":"uint256"}],"name":"multiply","outputs":[{"name":"d","type":"uint256"}],"type":"function"}]');
Multiply7 = eth.contract(abiDef); Multiply7 = eth.contract(abiDef);
multiply7 = new Multiply7(contractaddress); multiply7 = Multiply7.at(contractaddress);
` `
// time.Sleep(1500 * time.Millisecond)
_, err = repl.re.Run(callSetup) _, err = repl.re.Run(callSetup)
if err != nil { if err != nil {
t.Errorf("unexpected error registering, got %v", err) t.Errorf("unexpected error setting up contract, got %v", err)
} }
// updatespec // checkEvalJSON(t, repl, `eth.getBlock("pending", true).transactions.length`, `3`)
// why is this sometimes failing? // why is this sometimes failing?
// checkEvalJSON(t, repl, `multiply7.multiply.call(6)`, `42`) // checkEvalJSON(t, repl, `multiply7.multiply.call(6)`, `42`)
expNotice := "" expNotice := ""
@ -322,20 +327,23 @@ multiply7 = new Multiply7(contractaddress);
t.Errorf("incorrect confirmation message: expected %v, got %v", expNotice, repl.lastConfirm) t.Errorf("incorrect confirmation message: expected %v, got %v", expNotice, repl.lastConfirm)
} }
// why 0?
checkEvalJSON(t, repl, `eth.getBlock("pending", true).transactions.length`, `0`)
txc, repl.xeth = repl.xeth.ApplyTestTxs(repl.stateDb, coinbase, txc) txc, repl.xeth = repl.xeth.ApplyTestTxs(repl.stateDb, coinbase, txc)
checkEvalJSON(t, repl, `admin.contractInfo.start()`, `true`) checkEvalJSON(t, repl, `admin.contractInfo.start()`, `true`)
checkEvalJSON(t, repl, `multiply7.multiply.sendTransaction(6, { from: primary, gas: "1000000", gasPrice: "100000" })`, `undefined`) checkEvalJSON(t, repl, `multiply7.multiply.sendTransaction(6, { from: primary, gas: "1000000", gasPrice: "100000" })`, `undefined`)
expNotice = `About to submit transaction (no NatSpec info found for contract: content hash not found for '0x4a6c99e127191d2ee302e42182c338344b39a37a47cdbb17ab0f26b6802eb4d1'): {"params":[{"to":"0x5dcaace5982778b409c524873b319667eba5d074","data": "0xc6888fa10000000000000000000000000000000000000000000000000000000000000006"}]}` expNotice = `About to submit transaction (no NatSpec info found for contract: content hash not found for '0x87e2802265838c7f14bb69eecd2112911af6767907a702eeaa445239fb20711b'): {"params":[{"to":"0x5dcaace5982778b409c524873b319667eba5d074","data": "0xc6888fa10000000000000000000000000000000000000000000000000000000000000006"}]}`
if repl.lastConfirm != expNotice { if repl.lastConfirm != expNotice {
t.Errorf("incorrect confirmation message: expected %v, got %v", expNotice, repl.lastConfirm) t.Errorf("incorrect confirmation message: expected %v, got %v", expNotice, repl.lastConfirm)
} }
var contenthash = `"0x86d2b7cf1e72e9a7a3f8d96601f0151742a2f780f1526414304fbe413dc7f9bd"`
if sol != nil {
modContractInfo := versionRE.ReplaceAll(contractInfo, []byte(`"compilerVersion":"`+sol.Version()+`"`))
_ = modContractInfo
// contenthash = crypto.Sha3(modContractInfo)
}
checkEvalJSON(t, repl, `filename = "/tmp/info.json"`, `"/tmp/info.json"`) checkEvalJSON(t, repl, `filename = "/tmp/info.json"`, `"/tmp/info.json"`)
checkEvalJSON(t, repl, `contenthash = admin.contractInfo.register(primary, contractaddress, contract, filename)`, `"0x0d067e2dd99a4d8f0c0279738b17130dd415a89f24a23f0e7cf68c546ae3089d"`) checkEvalJSON(t, repl, `contenthash = admin.contractInfo.register(primary, contractaddress, contract, filename)`, contenthash)
checkEvalJSON(t, repl, `admin.contractInfo.registerUrl(primary, contenthash, "file://"+filename)`, `true`) checkEvalJSON(t, repl, `admin.contractInfo.registerUrl(primary, contenthash, "file://"+filename)`, `true`)
if err != nil { if err != nil {
t.Errorf("unexpected error registering, got %v", err) t.Errorf("unexpected error registering, got %v", err)

View File

@ -21,7 +21,6 @@
package main package main
import ( import (
"bufio"
"fmt" "fmt"
"io" "io"
"io/ioutil" "io/ioutil"
@ -44,13 +43,12 @@ import (
"github.com/ethereum/go-ethereum/logger" "github.com/ethereum/go-ethereum/logger"
"github.com/mattn/go-colorable" "github.com/mattn/go-colorable"
"github.com/mattn/go-isatty" "github.com/mattn/go-isatty"
"github.com/peterh/liner"
) )
import _ "net/http/pprof" import _ "net/http/pprof"
const ( const (
ClientIdentifier = "Geth" ClientIdentifier = "Geth"
Version = "0.9.22" Version = "0.9.23"
) )
var ( var (
@ -101,7 +99,15 @@ The output of this command is supposed to be machine-readable.
Usage: "import ethereum presale wallet", Usage: "import ethereum presale wallet",
}, },
}, },
}, Description: `
get wallet import /path/to/my/presale.wallet
will prompt for your password and imports your ether presale account.
It can be used non-interactively with the --password option taking a
passwordfile as argument containing the wallet password in plaintext.
`},
{ {
Action: accountList, Action: accountList,
Name: "account", Name: "account",
@ -111,7 +117,7 @@ The output of this command is supposed to be machine-readable.
Manage accounts lets you create new accounts, list all existing accounts, Manage accounts lets you create new accounts, list all existing accounts,
import a private key into a new account. import a private key into a new account.
'account help' shows a list of subcommands or help for one subcommand. ' help' shows a list of subcommands or help for one subcommand.
It supports interactive mode, when you are prompted for password as well as It supports interactive mode, when you are prompted for password as well as
non-interactive mode where passwords are supplied via a given password file. non-interactive mode where passwords are supplied via a given password file.
@ -230,6 +236,11 @@ JavaScript API. See https://github.com/ethereum/go-ethereum/wiki/Javascipt-Conso
Name: "upgradedb", Name: "upgradedb",
Usage: "upgrade chainblock database", Usage: "upgrade chainblock database",
}, },
{
Action: removeDb,
Name: "removedb",
Usage: "Remove blockchain and state databases",
},
} }
app.Flags = []cli.Flag{ app.Flags = []cli.Flag{
utils.IdentityFlag, utils.IdentityFlag,
@ -246,6 +257,7 @@ JavaScript API. See https://github.com/ethereum/go-ethereum/wiki/Javascipt-Conso
utils.GasPriceFlag, utils.GasPriceFlag,
utils.MinerThreadsFlag, utils.MinerThreadsFlag,
utils.MiningEnabledFlag, utils.MiningEnabledFlag,
utils.AutoDAGFlag,
utils.NATFlag, utils.NATFlag,
utils.NatspecEnabledFlag, utils.NatspecEnabledFlag,
utils.NodeKeyFileFlag, utils.NodeKeyFileFlag,
@ -323,7 +335,6 @@ func console(ctx *cli.Context) {
repl := newJSRE( repl := newJSRE(
ethereum, ethereum,
ctx.String(utils.JSpathFlag.Name), ctx.String(utils.JSpathFlag.Name),
ctx.String(utils.SolcPathFlag.Name),
ctx.GlobalString(utils.RPCCORSDomainFlag.Name), ctx.GlobalString(utils.RPCCORSDomainFlag.Name),
true, true,
nil, nil,
@ -345,7 +356,6 @@ func execJSFiles(ctx *cli.Context) {
repl := newJSRE( repl := newJSRE(
ethereum, ethereum,
ctx.String(utils.JSpathFlag.Name), ctx.String(utils.JSpathFlag.Name),
ctx.String(utils.SolcPathFlag.Name),
ctx.GlobalString(utils.RPCCORSDomainFlag.Name), ctx.GlobalString(utils.RPCCORSDomainFlag.Name),
false, false,
nil, nil,
@ -361,12 +371,20 @@ func execJSFiles(ctx *cli.Context) {
func unlockAccount(ctx *cli.Context, am *accounts.Manager, account string) (passphrase string) { func unlockAccount(ctx *cli.Context, am *accounts.Manager, account string) (passphrase string) {
var err error var err error
// Load startup keys. XXX we are going to need a different format // Load startup keys. XXX we are going to need a different format
// Attempt to unlock the account
passphrase = getPassPhrase(ctx, "", false)
if len(account) == 0 { if len(account) == 0 {
utils.Fatalf("Invalid account address '%s'", account) utils.Fatalf("Invalid account address '%s'", account)
} }
// Attempt to unlock the account 3 times
attempts := 3
for tries := 0; tries < attempts; tries++ {
msg := fmt.Sprintf("Unlocking account %s...%s | Attempt %d/%d", account[:8], account[len(account)-6:], tries+1, attempts)
passphrase = getPassPhrase(ctx, msg, false)
err = am.Unlock(common.HexToAddress(account), passphrase) err = am.Unlock(common.HexToAddress(account), passphrase)
if err == nil {
break
}
}
if err != nil { if err != nil {
utils.Fatalf("Unlock account failed '%v'", err) utils.Fatalf("Unlock account failed '%v'", err)
} }
@ -381,6 +399,8 @@ func startEth(ctx *cli.Context, eth *eth.Ethereum) {
am := eth.AccountManager() am := eth.AccountManager()
account := ctx.GlobalString(utils.UnlockedAccountFlag.Name) account := ctx.GlobalString(utils.UnlockedAccountFlag.Name)
accounts := strings.Split(account, " ")
for _, account := range accounts {
if len(account) > 0 { if len(account) > 0 {
if account == "primary" { if account == "primary" {
primaryAcc, err := am.Primary() primaryAcc, err := am.Primary()
@ -391,6 +411,7 @@ func startEth(ctx *cli.Context, eth *eth.Ethereum) {
} }
unlockAccount(ctx, am, account) unlockAccount(ctx, am, account)
} }
}
// Start auxiliary services if enabled. // Start auxiliary services if enabled.
if ctx.GlobalBool(utils.RPCEnabledFlag.Name) { if ctx.GlobalBool(utils.RPCEnabledFlag.Name) {
if err := utils.StartRPC(eth, ctx); err != nil { if err := utils.StartRPC(eth, ctx); err != nil {
@ -421,12 +442,12 @@ func getPassPhrase(ctx *cli.Context, desc string, confirmation bool) (passphrase
passfile := ctx.GlobalString(utils.PasswordFileFlag.Name) passfile := ctx.GlobalString(utils.PasswordFileFlag.Name)
if len(passfile) == 0 { if len(passfile) == 0 {
fmt.Println(desc) fmt.Println(desc)
auth, err := readPassword("Passphrase: ", true) auth, err := utils.PromptPassword("Passphrase: ", true)
if err != nil { if err != nil {
utils.Fatalf("%v", err) utils.Fatalf("%v", err)
} }
if confirmation { if confirmation {
confirm, err := readPassword("Repeat Passphrase: ", false) confirm, err := utils.PromptPassword("Repeat Passphrase: ", false)
if err != nil { if err != nil {
utils.Fatalf("%v", err) utils.Fatalf("%v", err)
} }
@ -543,6 +564,25 @@ func exportchain(ctx *cli.Context) {
return return
} }
func removeDb(ctx *cli.Context) {
confirm, err := utils.PromptConfirm("Remove local databases?")
if err != nil {
utils.Fatalf("%v", err)
}
if confirm {
fmt.Println("Removing chain and state databases...")
start := time.Now()
os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain"))
os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "state"))
fmt.Printf("Removed in %v\n", time.Since(start))
} else {
fmt.Println("Operation aborted")
}
}
func upgradeDb(ctx *cli.Context) { func upgradeDb(ctx *cli.Context) {
fmt.Println("Upgrade blockchain DB") fmt.Println("Upgrade blockchain DB")
@ -574,6 +614,7 @@ func upgradeDb(ctx *cli.Context) {
ethereum.ExtraDb().Close() ethereum.ExtraDb().Close()
os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain")) os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "blockchain"))
os.RemoveAll(filepath.Join(ctx.GlobalString(utils.DataDirFlag.Name), "state"))
ethereum, err = eth.New(cfg) ethereum, err = eth.New(cfg)
if err != nil { if err != nil {
@ -665,18 +706,3 @@ func hashish(x string) bool {
_, err := strconv.Atoi(x) _, err := strconv.Atoi(x)
return err != nil return err != nil
} }
func readPassword(prompt string, warnTerm bool) (string, error) {
if liner.TerminalSupported() {
lr := liner.NewLiner()
defer lr.Close()
return lr.PasswordPrompt(prompt)
}
if warnTerm {
fmt.Println("!! Unsupported terminal, password will be echoed.")
}
fmt.Print(prompt)
input, err := bufio.NewReader(os.Stdin).ReadString('\n')
fmt.Println()
return input, err
}

View File

@ -22,11 +22,13 @@
package utils package utils
import ( import (
"bufio"
"fmt" "fmt"
"io" "io"
"os" "os"
"os/signal" "os/signal"
"regexp" "regexp"
"strings"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core" "github.com/ethereum/go-ethereum/core"
@ -35,6 +37,7 @@ import (
"github.com/ethereum/go-ethereum/logger" "github.com/ethereum/go-ethereum/logger"
"github.com/ethereum/go-ethereum/logger/glog" "github.com/ethereum/go-ethereum/logger/glog"
"github.com/ethereum/go-ethereum/rlp" "github.com/ethereum/go-ethereum/rlp"
"github.com/peterh/liner"
) )
var interruptCallbacks = []func(os.Signal){} var interruptCallbacks = []func(os.Signal){}
@ -71,18 +74,45 @@ func openLogFile(Datadir string, filename string) *os.File {
return file return file
} }
func confirm(message string) bool { func PromptConfirm(prompt string) (bool, error) {
fmt.Println(message, "Are you sure? (y/n)") var (
var r string input string
fmt.Scanln(&r) err error
for ; ; fmt.Scanln(&r) { )
if r == "n" || r == "y" { prompt = prompt + " [y/N] "
break
if liner.TerminalSupported() {
lr := liner.NewLiner()
defer lr.Close()
input, err = lr.Prompt(prompt)
} else { } else {
fmt.Printf("Yes or no? (%s)", r) fmt.Print(prompt)
input, err = bufio.NewReader(os.Stdin).ReadString('\n')
fmt.Println()
} }
if len(input) > 0 && strings.ToUpper(input[:1]) == "Y" {
return true, nil
} else {
return false, nil
} }
return r == "y"
return false, err
}
func PromptPassword(prompt string, warnTerm bool) (string, error) {
if liner.TerminalSupported() {
lr := liner.NewLiner()
defer lr.Close()
return lr.PasswordPrompt(prompt)
}
if warnTerm {
fmt.Println("!! Unsupported terminal, password will be echoed.")
}
fmt.Print(prompt)
input, err := bufio.NewReader(os.Stdin).ReadString('\n')
fmt.Println()
return input, err
} }
func initDataDir(Datadir string) { func initDataDir(Datadir string) {

View File

@ -112,6 +112,10 @@ var (
Name: "mine", Name: "mine",
Usage: "Enable mining", Usage: "Enable mining",
} }
AutoDAGFlag = cli.BoolFlag{
Name: "autodag",
Usage: "Enable automatic DAG pregeneration",
}
EtherbaseFlag = cli.StringFlag{ EtherbaseFlag = cli.StringFlag{
Name: "etherbase", Name: "etherbase",
Usage: "Public address for block mining rewards. By default the address of your primary account is used", Usage: "Public address for block mining rewards. By default the address of your primary account is used",
@ -313,6 +317,8 @@ func MakeEthConfig(clientID, version string, ctx *cli.Context) *eth.Config {
Dial: true, Dial: true,
BootNodes: ctx.GlobalString(BootnodesFlag.Name), BootNodes: ctx.GlobalString(BootnodesFlag.Name),
GasPrice: common.String2Big(ctx.GlobalString(GasPriceFlag.Name)), GasPrice: common.String2Big(ctx.GlobalString(GasPriceFlag.Name)),
SolcPath: ctx.GlobalString(SolcPathFlag.Name),
AutoDAG: ctx.GlobalBool(AutoDAGFlag.Name) || ctx.GlobalBool(MiningEnabledFlag.Name),
} }
} }
@ -336,8 +342,8 @@ func GetChain(ctx *cli.Context) (*core.ChainManager, common.Database, common.Dat
} }
eventMux := new(event.TypeMux) eventMux := new(event.TypeMux)
chainManager := core.NewChainManager(blockDb, stateDb, eventMux)
pow := ethash.New() pow := ethash.New()
chainManager := core.NewChainManager(blockDb, stateDb, pow, eventMux)
txPool := core.NewTxPool(eventMux, chainManager.State, chainManager.GasLimit) txPool := core.NewTxPool(eventMux, chainManager.State, chainManager.GasLimit)
blockProcessor := core.NewBlockProcessor(stateDb, extraDb, pow, txPool, chainManager, eventMux) blockProcessor := core.NewBlockProcessor(stateDb, extraDb, pow, txPool, chainManager, eventMux)
chainManager.SetProcessor(blockProcessor) chainManager.SetProcessor(blockProcessor)

View File

@ -18,7 +18,8 @@ import (
) )
const ( const (
flair = "Christian <c@ethdev.com> and Lefteris <lefteris@ethdev.com> (c) 2014-2015" // flair = "Christian <c@ethdev.com> and Lefteris <lefteris@ethdev.com> (c) 2014-2015"
flair = ""
languageVersion = "0" languageVersion = "0"
) )
@ -91,7 +92,7 @@ func (sol *Solidity) Version() string {
return sol.version return sol.version
} }
func (sol *Solidity) Compile(source string) (contract *Contract, err error) { func (sol *Solidity) Compile(source string) (contracts map[string]*Contract, err error) {
if len(source) == 0 { if len(source) == 0 {
err = fmt.Errorf("empty source") err = fmt.Errorf("empty source")
@ -122,11 +123,10 @@ func (sol *Solidity) Compile(source string) (contract *Contract, err error) {
err = fmt.Errorf("solc error: missing code output") err = fmt.Errorf("solc error: missing code output")
return return
} }
if len(matches) > 1 {
err = fmt.Errorf("multi-contract sources are not supported") contracts = make(map[string]*Contract)
return for _, path := range matches {
} _, file := filepath.Split(path)
_, file := filepath.Split(matches[0])
base := strings.Split(file, ".")[0] base := strings.Split(file, ".")[0]
codeFile := filepath.Join(wd, base+".binary") codeFile := filepath.Join(wd, base+".binary")
@ -134,12 +134,13 @@ func (sol *Solidity) Compile(source string) (contract *Contract, err error) {
userDocFile := filepath.Join(wd, base+".docuser") userDocFile := filepath.Join(wd, base+".docuser")
developerDocFile := filepath.Join(wd, base+".docdev") developerDocFile := filepath.Join(wd, base+".docdev")
code, err := ioutil.ReadFile(codeFile) var code, abiDefinitionJson, userDocJson, developerDocJson []byte
code, err = ioutil.ReadFile(codeFile)
if err != nil { if err != nil {
err = fmt.Errorf("error reading compiler output for code: %v", err) err = fmt.Errorf("error reading compiler output for code: %v", err)
return return
} }
abiDefinitionJson, err := ioutil.ReadFile(abiDefinitionFile) abiDefinitionJson, err = ioutil.ReadFile(abiDefinitionFile)
if err != nil { if err != nil {
err = fmt.Errorf("error reading compiler output for abiDefinition: %v", err) err = fmt.Errorf("error reading compiler output for abiDefinition: %v", err)
return return
@ -147,7 +148,7 @@ func (sol *Solidity) Compile(source string) (contract *Contract, err error) {
var abiDefinition interface{} var abiDefinition interface{}
err = json.Unmarshal(abiDefinitionJson, &abiDefinition) err = json.Unmarshal(abiDefinitionJson, &abiDefinition)
userDocJson, err := ioutil.ReadFile(userDocFile) userDocJson, err = ioutil.ReadFile(userDocFile)
if err != nil { if err != nil {
err = fmt.Errorf("error reading compiler output for userDoc: %v", err) err = fmt.Errorf("error reading compiler output for userDoc: %v", err)
return return
@ -155,7 +156,7 @@ func (sol *Solidity) Compile(source string) (contract *Contract, err error) {
var userDoc interface{} var userDoc interface{}
err = json.Unmarshal(userDocJson, &userDoc) err = json.Unmarshal(userDocJson, &userDoc)
developerDocJson, err := ioutil.ReadFile(developerDocFile) developerDocJson, err = ioutil.ReadFile(developerDocFile)
if err != nil { if err != nil {
err = fmt.Errorf("error reading compiler output for developerDoc: %v", err) err = fmt.Errorf("error reading compiler output for developerDoc: %v", err)
return return
@ -163,8 +164,8 @@ func (sol *Solidity) Compile(source string) (contract *Contract, err error) {
var developerDoc interface{} var developerDoc interface{}
err = json.Unmarshal(developerDocJson, &developerDoc) err = json.Unmarshal(developerDocJson, &developerDoc)
contract = &Contract{ contract := &Contract{
Code: string(code), Code: "0x" + string(code),
Info: ContractInfo{ Info: ContractInfo{
Source: source, Source: source,
Language: "Solidity", Language: "Solidity",
@ -176,6 +177,9 @@ func (sol *Solidity) Compile(source string) (contract *Contract, err error) {
}, },
} }
contracts[base] = contract
}
return return
} }

View File

@ -9,7 +9,7 @@ import (
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
) )
const solcVersion = "0.9.17" const solcVersion = "0.9.23"
var ( var (
source = ` source = `
@ -20,37 +20,45 @@ contract test {
} }
} }
` `
code = "605280600c6000396000f3006000357c010000000000000000000000000000000000000000000000000000000090048063c6888fa114602e57005b60376004356041565b8060005260206000f35b6000600782029050604d565b91905056" code = "0x605880600c6000396000f3006000357c010000000000000000000000000000000000000000000000000000000090048063c6888fa114602e57005b603d6004803590602001506047565b8060005260206000f35b60006007820290506053565b91905056"
info = `{"source":"\ncontract test {\n /// @notice Will multiply ` + "`a`" + ` by 7.\n function multiply(uint a) returns(uint d) {\n return a * 7;\n }\n}\n","language":"Solidity","languageVersion":"0","compilerVersion":"0.9.17","abiDefinition":[{"constant":false,"inputs":[{"name":"a","type":"uint256"}],"name":"multiply","outputs":[{"name":"d","type":"uint256"}],"type":"function"}],"userDoc":{"methods":{"multiply(uint256)":{"notice":"Will multiply ` + "`a`" + ` by 7."}}},"developerDoc":{"methods":{}}}` info = `{"source":"\ncontract test {\n /// @notice Will multiply ` + "`a`" + ` by 7.\n function multiply(uint a) returns(uint d) {\n return a * 7;\n }\n}\n","language":"Solidity","languageVersion":"0","compilerVersion":"0.9.23","abiDefinition":[{"constant":false,"inputs":[{"name":"a","type":"uint256"}],"name":"multiply","outputs":[{"name":"d","type":"uint256"}],"type":"function"}],"userDoc":{"methods":{"multiply(uint256)":{"notice":"Will multiply ` + "`a`" + ` by 7."}}},"developerDoc":{"methods":{}}}`
infohash = common.HexToHash("0x834075768a68e500e459b9c3213750c84de3df47156500cb01bb664d3f88c60a") infohash = common.HexToHash("0xea782f674eb898e477c20e8a7cf11c2c28b09fa68b5278732104f7a101aed255")
) )
func TestCompiler(t *testing.T) { func TestCompiler(t *testing.T) {
sol, err := New("") sol, err := New("")
if err != nil { if err != nil {
t.Skip("no solc installed") t.Skip("solc not found: skip")
} else if sol.Version() != solcVersion {
t.Logf("WARNING: a newer version of solc found (%v, expect %v)", sol.Version(), solcVersion)
} }
contract, err := sol.Compile(source) contracts, err := sol.Compile(source)
if err != nil { if err != nil {
t.Errorf("error compiling source. result %v: %v", contract, err) t.Errorf("error compiling source. result %v: %v", contracts, err)
return return
} }
/*
if contract.Code != code { if len(contracts) != 1 {
t.Errorf("wrong code, expected\n%s, got\n%s", code, contract.Code) t.Errorf("one contract expected, got\n%s", len(contracts))
} }
*/
if contracts["test"].Code != code {
t.Errorf("wrong code, expected\n%s, got\n%s", code, contracts["test"].Code)
}
} }
func TestCompileError(t *testing.T) { func TestCompileError(t *testing.T) {
sol, err := New("") sol, err := New("")
if err != nil || sol.version != solcVersion { if err != nil || sol.version != solcVersion {
t.Skip("no solc installed") t.Skip("solc not found: skip")
} else if sol.Version() != solcVersion {
t.Logf("WARNING: a newer version of solc found (%v, expect %v)", sol.Version(), solcVersion)
} }
contract, err := sol.Compile(source[2:]) contracts, err := sol.Compile(source[2:])
if err == nil { if err == nil {
t.Errorf("error expected compiling source. got none. result %v", contract) t.Errorf("error expected compiling source. got none. result %v", contracts)
return return
} }
} }
@ -78,11 +86,11 @@ func TestExtractInfo(t *testing.T) {
os.Remove(filename) os.Remove(filename)
cinfohash, err := ExtractInfo(contract, filename) cinfohash, err := ExtractInfo(contract, filename)
if err != nil { if err != nil {
t.Errorf("%v", err) t.Errorf("error extracting info: %v", err)
} }
got, err := ioutil.ReadFile(filename) got, err := ioutil.ReadFile(filename)
if err != nil { if err != nil {
t.Errorf("%v", err) t.Errorf("error reading '%v': %v", filename, err)
} }
if string(got) != info { if string(got) != info {
t.Errorf("incorrect info.json extracted, expected:\n%s\ngot\n%s", info, string(got)) t.Errorf("incorrect info.json extracted, expected:\n%s\ngot\n%s", info, string(got))

View File

@ -85,6 +85,9 @@ func (bc *BlockCache) Get(hash common.Hash) *types.Block {
} }
func (bc *BlockCache) Has(hash common.Hash) bool { func (bc *BlockCache) Has(hash common.Hash) bool {
bc.mu.RLock()
defer bc.mu.RUnlock()
_, ok := bc.blocks[hash] _, ok := bc.blocks[hash]
return ok return ok
} }

View File

@ -24,6 +24,8 @@ const (
BlockChainVersion = 2 BlockChainVersion = 2
) )
var receiptsPre = []byte("receipts-")
type BlockProcessor struct { type BlockProcessor struct {
db common.Database db common.Database
extraDb common.Database extraDb common.Database
@ -189,7 +191,7 @@ func (sm *BlockProcessor) processWithParent(block, parent *types.Block) (logs st
state := state.New(parent.Root(), sm.db) state := state.New(parent.Root(), sm.db)
// Block validation // Block validation
if err = sm.ValidateHeader(block.Header(), parent.Header()); err != nil { if err = sm.ValidateHeader(block.Header(), parent.Header(), false); err != nil {
return return
} }
@ -263,13 +265,27 @@ func (sm *BlockProcessor) processWithParent(block, parent *types.Block) (logs st
putTx(sm.extraDb, tx, block, uint64(i)) putTx(sm.extraDb, tx, block, uint64(i))
} }
receiptsRlp := block.Receipts().RlpEncode()
sm.extraDb.Put(append(receiptsPre, block.Hash().Bytes()...), receiptsRlp)
return state.Logs(), nil return state.Logs(), nil
} }
func (self *BlockProcessor) GetBlockReceipts(bhash common.Hash) (receipts types.Receipts, err error) {
var rdata []byte
rdata, err = self.extraDb.Get(append(receiptsPre, bhash[:]...))
if err == nil {
err = rlp.DecodeBytes(rdata, &receipts)
}
return
}
// Validates the current block. Returns an error if the block was invalid, // Validates the current block. Returns an error if the block was invalid,
// an uncle or anything that isn't on the current block chain. // an uncle or anything that isn't on the current block chain.
// Validation validates easy over difficult (dagger takes longer time = difficult) // Validation validates easy over difficult (dagger takes longer time = difficult)
func (sm *BlockProcessor) ValidateHeader(block, parent *types.Header) error { func (sm *BlockProcessor) ValidateHeader(block, parent *types.Header, checkPow bool) error {
if big.NewInt(int64(len(block.Extra))).Cmp(params.MaximumExtraDataSize) == 1 { if big.NewInt(int64(len(block.Extra))).Cmp(params.MaximumExtraDataSize) == 1 {
return fmt.Errorf("Block extra data too long (%d)", len(block.Extra)) return fmt.Errorf("Block extra data too long (%d)", len(block.Extra))
} }
@ -300,10 +316,12 @@ func (sm *BlockProcessor) ValidateHeader(block, parent *types.Header) error {
return BlockEqualTSErr //ValidationError("Block timestamp equal or less than previous block (%v - %v)", block.Time, parent.Time) return BlockEqualTSErr //ValidationError("Block timestamp equal or less than previous block (%v - %v)", block.Time, parent.Time)
} }
if checkPow {
// Verify the nonce of the block. Return an error if it's not valid // Verify the nonce of the block. Return an error if it's not valid
if !sm.Pow.Verify(types.NewBlockWithHeader(block)) { if !sm.Pow.Verify(types.NewBlockWithHeader(block)) {
return ValidationError("Block's nonce is invalid (= %x)", block.Nonce) return ValidationError("Block's nonce is invalid (= %x)", block.Nonce)
} }
}
return nil return nil
} }
@ -351,6 +369,13 @@ func (sm *BlockProcessor) VerifyUncles(statedb *state.StateDB, block, parent *ty
uncles.Add(hash) uncles.Add(hash)
if ancestors.Has(hash) { if ancestors.Has(hash) {
branch := fmt.Sprintf(" O - %x\n |\n", block.Hash())
ancestors.Each(func(item interface{}) bool {
branch += fmt.Sprintf(" O - %x\n |\n", hash)
return true
})
glog.Infoln(branch)
return UncleError("uncle[%d](%x) is ancestor", i, hash[:4]) return UncleError("uncle[%d](%x) is ancestor", i, hash[:4])
} }
@ -358,7 +383,7 @@ func (sm *BlockProcessor) VerifyUncles(statedb *state.StateDB, block, parent *ty
return UncleError("uncle[%d](%x)'s parent unknown (%x)", i, hash[:4], uncle.ParentHash[0:4]) return UncleError("uncle[%d](%x)'s parent unknown (%x)", i, hash[:4], uncle.ParentHash[0:4])
} }
if err := sm.ValidateHeader(uncle, ancestorHeaders[uncle.ParentHash]); err != nil { if err := sm.ValidateHeader(uncle, ancestorHeaders[uncle.ParentHash], true); err != nil {
return ValidationError(fmt.Sprintf("uncle[%d](%x) header invalid: %v", i, hash[:4], err)) return ValidationError(fmt.Sprintf("uncle[%d](%x) header invalid: %v", i, hash[:4], err))
} }
} }

View File

@ -14,7 +14,7 @@ func proc() (*BlockProcessor, *ChainManager) {
db, _ := ethdb.NewMemDatabase() db, _ := ethdb.NewMemDatabase()
var mux event.TypeMux var mux event.TypeMux
chainMan := NewChainManager(db, db, &mux) chainMan := NewChainManager(db, db, thePow(), &mux)
return NewBlockProcessor(db, db, ezp.New(), nil, chainMan, &mux), chainMan return NewBlockProcessor(db, db, ezp.New(), nil, chainMan, &mux), chainMan
} }
@ -24,13 +24,13 @@ func TestNumber(t *testing.T) {
block1.Header().Number = big.NewInt(3) block1.Header().Number = big.NewInt(3)
block1.Header().Time-- block1.Header().Time--
err := bp.ValidateHeader(block1.Header(), chain.Genesis().Header()) err := bp.ValidateHeader(block1.Header(), chain.Genesis().Header(), false)
if err != BlockNumberErr { if err != BlockNumberErr {
t.Errorf("expected block number error %v", err) t.Errorf("expected block number error %v", err)
} }
block1 = chain.NewBlock(common.Address{}) block1 = chain.NewBlock(common.Address{})
err = bp.ValidateHeader(block1.Header(), chain.Genesis().Header()) err = bp.ValidateHeader(block1.Header(), chain.Genesis().Header(), false)
if err == BlockNumberErr { if err == BlockNumberErr {
t.Errorf("didn't expect block number error") t.Errorf("didn't expect block number error")
} }

View File

@ -109,7 +109,7 @@ func makeChain(bman *BlockProcessor, parent *types.Block, max int, db common.Dat
// Effectively a fork factory // Effectively a fork factory
func newChainManager(block *types.Block, eventMux *event.TypeMux, db common.Database) *ChainManager { func newChainManager(block *types.Block, eventMux *event.TypeMux, db common.Database) *ChainManager {
genesis := GenesisBlock(db) genesis := GenesisBlock(db)
bc := &ChainManager{blockDb: db, stateDb: db, genesisBlock: genesis, eventMux: eventMux} bc := &ChainManager{blockDb: db, stateDb: db, genesisBlock: genesis, eventMux: eventMux, pow: FakePow{}}
bc.txState = state.ManageState(state.New(genesis.Root(), db)) bc.txState = state.ManageState(state.New(genesis.Root(), db))
bc.futureBlocks = NewBlockCache(1000) bc.futureBlocks = NewBlockCache(1000)
if block == nil { if block == nil {

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"io" "io"
"math/big" "math/big"
"runtime"
"sync" "sync"
"time" "time"
@ -15,6 +16,7 @@ import (
"github.com/ethereum/go-ethereum/logger" "github.com/ethereum/go-ethereum/logger"
"github.com/ethereum/go-ethereum/logger/glog" "github.com/ethereum/go-ethereum/logger/glog"
"github.com/ethereum/go-ethereum/params" "github.com/ethereum/go-ethereum/params"
"github.com/ethereum/go-ethereum/pow"
"github.com/ethereum/go-ethereum/rlp" "github.com/ethereum/go-ethereum/rlp"
) )
@ -84,6 +86,7 @@ type ChainManager struct {
genesisBlock *types.Block genesisBlock *types.Block
// Last known total difficulty // Last known total difficulty
mu sync.RWMutex mu sync.RWMutex
chainmu sync.RWMutex
tsmu sync.RWMutex tsmu sync.RWMutex
td *big.Int td *big.Int
@ -99,9 +102,11 @@ type ChainManager struct {
quit chan struct{} quit chan struct{}
wg sync.WaitGroup wg sync.WaitGroup
pow pow.PoW
} }
func NewChainManager(blockDb, stateDb common.Database, mux *event.TypeMux) *ChainManager { func NewChainManager(blockDb, stateDb common.Database, pow pow.PoW, mux *event.TypeMux) *ChainManager {
bc := &ChainManager{ bc := &ChainManager{
blockDb: blockDb, blockDb: blockDb,
stateDb: stateDb, stateDb: stateDb,
@ -109,6 +114,7 @@ func NewChainManager(blockDb, stateDb common.Database, mux *event.TypeMux) *Chai
eventMux: mux, eventMux: mux,
quit: make(chan struct{}), quit: make(chan struct{}),
cache: NewBlockCache(blockCacheLimit), cache: NewBlockCache(blockCacheLimit),
pow: pow,
} }
bc.setLastState() bc.setLastState()
@ -342,7 +348,7 @@ func (self *ChainManager) Export(w io.Writer) error {
last := self.currentBlock.NumberU64() last := self.currentBlock.NumberU64()
for nr := uint64(0); nr <= last; nr++ { for nr := uint64(1); nr <= last; nr++ {
block := self.GetBlockByNumber(nr) block := self.GetBlockByNumber(nr)
if block == nil { if block == nil {
return fmt.Errorf("export failed on #%d: not found", nr) return fmt.Errorf("export failed on #%d: not found", nr)
@ -406,9 +412,11 @@ func (self *ChainManager) GetBlockHashesFromHash(hash common.Hash, max uint64) (
} }
func (self *ChainManager) GetBlock(hash common.Hash) *types.Block { func (self *ChainManager) GetBlock(hash common.Hash) *types.Block {
/*
if block := self.cache.Get(hash); block != nil { if block := self.cache.Get(hash); block != nil {
return block return block
} }
*/
data, _ := self.blockDb.Get(append(blockHashPre, hash[:]...)) data, _ := self.blockDb.Get(append(blockHashPre, hash[:]...))
if len(data) == 0 { if len(data) == 0 {
@ -518,6 +526,9 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) {
self.wg.Add(1) self.wg.Add(1)
defer self.wg.Done() defer self.wg.Done()
self.chainmu.Lock()
defer self.chainmu.Unlock()
// A queued approach to delivering events. This is generally faster than direct delivery and requires much less mutex acquiring. // A queued approach to delivering events. This is generally faster than direct delivery and requires much less mutex acquiring.
var ( var (
queue = make([]interface{}, len(chain)) queue = make([]interface{}, len(chain))
@ -525,10 +536,19 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) {
stats struct{ queued, processed, ignored int } stats struct{ queued, processed, ignored int }
tstart = time.Now() tstart = time.Now()
) )
// check the nonce in parallel to the block processing
// this speeds catching up significantly
nonceErrCh := make(chan error)
go func() {
nonceErrCh <- verifyNonces(self.pow, chain)
}()
for i, block := range chain { for i, block := range chain {
if block == nil { if block == nil {
continue continue
} }
// Setting block.Td regardless of error (known for example) prevents errors down the line // Setting block.Td regardless of error (known for example) prevents errors down the line
// in the protocol handler // in the protocol handler
block.Td = new(big.Int).Set(CalcTD(block, self.GetBlock(block.ParentHash()))) block.Td = new(big.Int).Set(CalcTD(block, self.GetBlock(block.ParentHash())))
@ -542,7 +562,6 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) {
continue continue
} }
block.Td = new(big.Int)
// Do not penelise on future block. We'll need a block queue eventually that will queue // Do not penelise on future block. We'll need a block queue eventually that will queue
// future block for future use // future block for future use
if err == BlockFutureErr { if err == BlockFutureErr {
@ -559,17 +578,11 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) {
continue continue
} }
h := block.Header() blockErr(block, err)
glog.V(logger.Error).Infof("INVALID block #%v (%x)\n", h.Number, h.Hash().Bytes())
glog.V(logger.Error).Infoln(err)
glog.V(logger.Debug).Infoln(block)
return i, err return i, err
} }
self.mu.Lock()
{
cblock := self.currentBlock cblock := self.currentBlock
// Write block to database. Eventually we'll have to improve on this and throw away blocks that are // Write block to database. Eventually we'll have to improve on this and throw away blocks that are
// not in the canonical chain. // not in the canonical chain.
@ -614,13 +627,18 @@ func (self *ChainManager) InsertChain(chain types.Blocks) (int, error) {
queueEvent.sideCount++ queueEvent.sideCount++
} }
self.futureBlocks.Delete(block.Hash()) self.futureBlocks.Delete(block.Hash())
}
self.mu.Unlock()
stats.processed++ stats.processed++
} }
// check and wait for the nonce error channel and
// make sure no nonce error was thrown in the process
err := <-nonceErrCh
if err != nil {
return 0, err
}
if (stats.queued > 0 || stats.processed > 0 || stats.ignored > 0) && bool(glog.V(logger.Info)) { if (stats.queued > 0 || stats.processed > 0 || stats.ignored > 0) && bool(glog.V(logger.Info)) {
tend := time.Since(tstart) tend := time.Since(tstart)
start, end := chain[0], chain[len(chain)-1] start, end := chain[0], chain[len(chain)-1]
@ -719,3 +737,63 @@ out:
} }
} }
} }
func blockErr(block *types.Block, err error) {
h := block.Header()
glog.V(logger.Error).Infof("INVALID block #%v (%x)\n", h.Number, h.Hash().Bytes())
glog.V(logger.Error).Infoln(err)
glog.V(logger.Debug).Infoln(block)
}
// verifyNonces verifies nonces of the given blocks in parallel and returns
// an error if one of the blocks nonce verifications failed.
func verifyNonces(pow pow.PoW, blocks []*types.Block) error {
// Spawn a few workers. They listen for blocks on the in channel
// and send results on done. The workers will exit in the
// background when in is closed.
var (
in = make(chan *types.Block)
done = make(chan error, runtime.GOMAXPROCS(0))
)
defer close(in)
for i := 0; i < cap(done); i++ {
go verifyNonce(pow, in, done)
}
// Feed blocks to the workers, aborting at the first invalid nonce.
var (
running, i int
block *types.Block
sendin = in
)
for i < len(blocks) || running > 0 {
if i == len(blocks) {
// Disable sending to in.
sendin = nil
} else {
block = blocks[i]
i++
}
select {
case sendin <- block:
running++
case err := <-done:
running--
if err != nil {
return err
}
}
}
return nil
}
// verifyNonce is a worker for the verifyNonces method. It will run until
// in is closed.
func verifyNonce(pow pow.PoW, in <-chan *types.Block, done chan<- error) {
for block := range in {
if !pow.Verify(block) {
done <- ValidationError("Block(#%v) nonce is invalid (= %x)", block.Number(), block.Nonce)
} else {
done <- nil
}
}
}

View File

@ -9,11 +9,13 @@ import (
"strconv" "strconv"
"testing" "testing"
"github.com/ethereum/ethash"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/state" "github.com/ethereum/go-ethereum/core/state"
"github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethdb" "github.com/ethereum/go-ethereum/ethdb"
"github.com/ethereum/go-ethereum/event" "github.com/ethereum/go-ethereum/event"
"github.com/ethereum/go-ethereum/pow"
"github.com/ethereum/go-ethereum/rlp" "github.com/ethereum/go-ethereum/rlp"
) )
@ -21,6 +23,11 @@ func init() {
runtime.GOMAXPROCS(runtime.NumCPU()) runtime.GOMAXPROCS(runtime.NumCPU())
} }
func thePow() pow.PoW {
pow, _ := ethash.NewForTesting()
return pow
}
// Test fork of length N starting from block i // Test fork of length N starting from block i
func testFork(t *testing.T, bman *BlockProcessor, i, N int, f func(td1, td2 *big.Int)) { func testFork(t *testing.T, bman *BlockProcessor, i, N int, f func(td1, td2 *big.Int)) {
// switch databases to process the new chain // switch databases to process the new chain
@ -259,7 +266,7 @@ func TestChainInsertions(t *testing.T) {
} }
var eventMux event.TypeMux var eventMux event.TypeMux
chainMan := NewChainManager(db, db, &eventMux) chainMan := NewChainManager(db, db, thePow(), &eventMux)
txPool := NewTxPool(&eventMux, chainMan.State, func() *big.Int { return big.NewInt(100000000) }) txPool := NewTxPool(&eventMux, chainMan.State, func() *big.Int { return big.NewInt(100000000) })
blockMan := NewBlockProcessor(db, db, nil, txPool, chainMan, &eventMux) blockMan := NewBlockProcessor(db, db, nil, txPool, chainMan, &eventMux)
chainMan.SetProcessor(blockMan) chainMan.SetProcessor(blockMan)
@ -305,7 +312,7 @@ func TestChainMultipleInsertions(t *testing.T) {
} }
} }
var eventMux event.TypeMux var eventMux event.TypeMux
chainMan := NewChainManager(db, db, &eventMux) chainMan := NewChainManager(db, db, thePow(), &eventMux)
txPool := NewTxPool(&eventMux, chainMan.State, func() *big.Int { return big.NewInt(100000000) }) txPool := NewTxPool(&eventMux, chainMan.State, func() *big.Int { return big.NewInt(100000000) })
blockMan := NewBlockProcessor(db, db, nil, txPool, chainMan, &eventMux) blockMan := NewBlockProcessor(db, db, nil, txPool, chainMan, &eventMux)
chainMan.SetProcessor(blockMan) chainMan.SetProcessor(blockMan)
@ -334,7 +341,7 @@ func TestGetAncestors(t *testing.T) {
db, _ := ethdb.NewMemDatabase() db, _ := ethdb.NewMemDatabase()
var eventMux event.TypeMux var eventMux event.TypeMux
chainMan := NewChainManager(db, db, &eventMux) chainMan := NewChainManager(db, db, thePow(), &eventMux)
chain, err := loadChain("valid1", t) chain, err := loadChain("valid1", t)
if err != nil { if err != nil {
fmt.Println(err) fmt.Println(err)
@ -372,7 +379,7 @@ func makeChainWithDiff(genesis *types.Block, d []int, seed byte) []*types.Block
func chm(genesis *types.Block, db common.Database) *ChainManager { func chm(genesis *types.Block, db common.Database) *ChainManager {
var eventMux event.TypeMux var eventMux event.TypeMux
bc := &ChainManager{blockDb: db, stateDb: db, genesisBlock: genesis, eventMux: &eventMux} bc := &ChainManager{blockDb: db, stateDb: db, genesisBlock: genesis, eventMux: &eventMux, pow: FakePow{}}
bc.cache = NewBlockCache(100) bc.cache = NewBlockCache(100)
bc.futureBlocks = NewBlockCache(100) bc.futureBlocks = NewBlockCache(100)
bc.processor = bproc{} bc.processor = bproc{}
@ -383,6 +390,7 @@ func chm(genesis *types.Block, db common.Database) *ChainManager {
} }
func TestReorgLongest(t *testing.T) { func TestReorgLongest(t *testing.T) {
t.Skip("skipped while cache is removed")
db, _ := ethdb.NewMemDatabase() db, _ := ethdb.NewMemDatabase()
genesis := GenesisBlock(db) genesis := GenesisBlock(db)
bc := chm(genesis, db) bc := chm(genesis, db)
@ -402,6 +410,7 @@ func TestReorgLongest(t *testing.T) {
} }
func TestReorgShortest(t *testing.T) { func TestReorgShortest(t *testing.T) {
t.Skip("skipped while cache is removed")
db, _ := ethdb.NewMemDatabase() db, _ := ethdb.NewMemDatabase()
genesis := GenesisBlock(db) genesis := GenesisBlock(db)
bc := chm(genesis, db) bc := chm(genesis, db)

View File

@ -38,6 +38,12 @@ func (self *Execution) Create(caller vm.ContextRef) (ret []byte, err error, acco
code := self.input code := self.input
self.input = nil self.input = nil
ret, err = self.exec(nil, code, caller) ret, err = self.exec(nil, code, caller)
// Here we get an error if we run into maximum stack depth,
// See: https://github.com/ethereum/yellowpaper/pull/131
// and YP definitions for CREATE instruction
if err != nil {
return nil, err, nil
}
account = self.env.State().GetStateObject(*self.address) account = self.env.State().GetStateObject(*self.address)
return return
} }

View File

@ -49,6 +49,18 @@ func (self *Memory) Get(offset, size int64) (cpy []byte) {
return return
} }
func (self *Memory) GetPtr(offset, size int64) []byte {
if size == 0 {
return nil
}
if len(self.store) > int(offset) {
return self.store[offset : offset+size]
}
return nil
}
func (m *Memory) Len() int { func (m *Memory) Len() int {
return len(m.store) return len(m.store)
} }

View File

@ -695,7 +695,7 @@ func (self *Vm) Run(context *Context, callData []byte) (ret []byte, err error) {
self.Printf("resume %x (%v)", context.Address(), context.Gas) self.Printf("resume %x (%v)", context.Address(), context.Gas)
case RETURN: case RETURN:
offset, size := stack.pop(), stack.pop() offset, size := stack.pop(), stack.pop()
ret := mem.Get(offset.Int64(), size.Int64()) ret := mem.GetPtr(offset.Int64(), size.Int64())
self.Printf(" => [%v, %v] (%d) 0x%x", offset, size, len(ret), ret).Endl() self.Printf(" => [%v, %v] (%d) 0x%x", offset, size, len(ret), ret).Endl()

View File

@ -14,6 +14,7 @@ import (
"github.com/ethereum/ethash" "github.com/ethereum/ethash"
"github.com/ethereum/go-ethereum/accounts" "github.com/ethereum/go-ethereum/accounts"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/compiler"
"github.com/ethereum/go-ethereum/core" "github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/core/vm" "github.com/ethereum/go-ethereum/core/vm"
@ -30,6 +31,14 @@ import (
"github.com/ethereum/go-ethereum/whisper" "github.com/ethereum/go-ethereum/whisper"
) )
const (
epochLength = 30000
ethashRevision = 23
autoDAGcheckInterval = 10 * time.Hour
autoDAGepochHeight = epochLength / 2
)
var ( var (
jsonlogger = logger.NewJsonLogger() jsonlogger = logger.NewJsonLogger()
@ -59,6 +68,7 @@ type Config struct {
LogJSON string LogJSON string
VmDebug bool VmDebug bool
NatSpec bool NatSpec bool
AutoDAG bool
MaxPeers int MaxPeers int
MaxPendingPeers int MaxPendingPeers int
@ -79,6 +89,7 @@ type Config struct {
GasPrice *big.Int GasPrice *big.Int
MinerThreads int MinerThreads int
AccountManager *accounts.Manager AccountManager *accounts.Manager
SolcPath string
// NewDB is used to create databases. // NewDB is used to create databases.
// If nil, the default is to create leveldb databases on disk. // If nil, the default is to create leveldb databases on disk.
@ -181,6 +192,8 @@ type Ethereum struct {
pow *ethash.Ethash pow *ethash.Ethash
protocolManager *ProtocolManager protocolManager *ProtocolManager
downloader *downloader.Downloader downloader *downloader.Downloader
SolcPath string
solc *compiler.Solidity
net *p2p.Server net *p2p.Server
eventMux *event.TypeMux eventMux *event.TypeMux
@ -193,6 +206,8 @@ type Ethereum struct {
MinerThreads int MinerThreads int
NatSpec bool NatSpec bool
DataDir string DataDir string
AutoDAG bool
autodagquit chan bool
etherbase common.Address etherbase common.Address
clientVersion string clientVersion string
ethVersionId int ethVersionId int
@ -209,7 +224,7 @@ func New(config *Config) (*Ethereum, error) {
// Let the database take 3/4 of the max open files (TODO figure out a way to get the actual limit of the open files) // Let the database take 3/4 of the max open files (TODO figure out a way to get the actual limit of the open files)
const dbCount = 3 const dbCount = 3
ethdb.OpenFileLimit = 256 / (dbCount + 1) ethdb.OpenFileLimit = 128 / (dbCount + 1)
newdb := config.NewDB newdb := config.NewDB
if newdb == nil { if newdb == nil {
@ -264,11 +279,13 @@ func New(config *Config) (*Ethereum, error) {
netVersionId: config.NetworkId, netVersionId: config.NetworkId,
NatSpec: config.NatSpec, NatSpec: config.NatSpec,
MinerThreads: config.MinerThreads, MinerThreads: config.MinerThreads,
SolcPath: config.SolcPath,
AutoDAG: config.AutoDAG,
} }
eth.chainManager = core.NewChainManager(blockDb, stateDb, eth.EventMux())
eth.downloader = downloader.New(eth.EventMux(), eth.chainManager.HasBlock, eth.chainManager.GetBlock)
eth.pow = ethash.New() eth.pow = ethash.New()
eth.chainManager = core.NewChainManager(blockDb, stateDb, eth.pow, eth.EventMux())
eth.downloader = downloader.New(eth.EventMux(), eth.chainManager.HasBlock, eth.chainManager.GetBlock)
eth.txPool = core.NewTxPool(eth.EventMux(), eth.chainManager.State, eth.chainManager.GasLimit) eth.txPool = core.NewTxPool(eth.EventMux(), eth.chainManager.State, eth.chainManager.GasLimit)
eth.blockProcessor = core.NewBlockProcessor(stateDb, extraDb, eth.pow, eth.txPool, eth.chainManager, eth.EventMux()) eth.blockProcessor = core.NewBlockProcessor(stateDb, extraDb, eth.pow, eth.txPool, eth.chainManager, eth.EventMux())
eth.chainManager.SetProcessor(eth.blockProcessor) eth.chainManager.SetProcessor(eth.blockProcessor)
@ -443,6 +460,10 @@ func (s *Ethereum) Start() error {
// periodically flush databases // periodically flush databases
go s.syncDatabases() go s.syncDatabases()
if s.AutoDAG {
s.StartAutoDAG()
}
// Start services // Start services
go s.txPool.Start() go s.txPool.Start()
s.protocolManager.Start() s.protocolManager.Start()
@ -521,6 +542,7 @@ func (s *Ethereum) Stop() {
if s.whisper != nil { if s.whisper != nil {
s.whisper.Stop() s.whisper.Stop()
} }
s.StopAutoDAG()
glog.V(logger.Info).Infoln("Server stopped") glog.V(logger.Info).Infoln("Server stopped")
close(s.shutdownChan) close(s.shutdownChan)
@ -554,6 +576,77 @@ func (self *Ethereum) syncAccounts(tx *types.Transaction) {
} }
} }
// StartAutoDAG() spawns a go routine that checks the DAG every autoDAGcheckInterval
// by default that is 10 times per epoch
// in epoch n, if we past autoDAGepochHeight within-epoch blocks,
// it calls ethash.MakeDAG to pregenerate the DAG for the next epoch n+1
// if it does not exist yet as well as remove the DAG for epoch n-1
// the loop quits if autodagquit channel is closed, it can safely restart and
// stop any number of times.
// For any more sophisticated pattern of DAG generation, use CLI subcommand
// makedag
func (self *Ethereum) StartAutoDAG() {
if self.autodagquit != nil {
return // already started
}
go func() {
glog.V(logger.Info).Infof("Automatic pregeneration of ethash DAG ON (ethash dir: %s)", ethash.DefaultDir)
var nextEpoch uint64
timer := time.After(0)
self.autodagquit = make(chan bool)
for {
select {
case <-timer:
glog.V(logger.Info).Infof("checking DAG (ethash dir: %s)", ethash.DefaultDir)
currentBlock := self.ChainManager().CurrentBlock().NumberU64()
thisEpoch := currentBlock / epochLength
if nextEpoch <= thisEpoch {
if currentBlock%epochLength > autoDAGepochHeight {
if thisEpoch > 0 {
previousDag, previousDagFull := dagFiles(thisEpoch - 1)
os.Remove(filepath.Join(ethash.DefaultDir, previousDag))
os.Remove(filepath.Join(ethash.DefaultDir, previousDagFull))
glog.V(logger.Info).Infof("removed DAG for epoch %d (%s)", thisEpoch-1, previousDag)
}
nextEpoch = thisEpoch + 1
dag, _ := dagFiles(nextEpoch)
if _, err := os.Stat(dag); os.IsNotExist(err) {
glog.V(logger.Info).Infof("Pregenerating DAG for epoch %d (%s)", nextEpoch, dag)
err := ethash.MakeDAG(nextEpoch*epochLength, "") // "" -> ethash.DefaultDir
if err != nil {
glog.V(logger.Error).Infof("Error generating DAG for epoch %d (%s)", nextEpoch, dag)
return
}
} else {
glog.V(logger.Error).Infof("DAG for epoch %d (%s)", nextEpoch, dag)
}
}
}
timer = time.After(autoDAGcheckInterval)
case <-self.autodagquit:
return
}
}
}()
}
// dagFiles(epoch) returns the two alternative DAG filenames (not a path)
// 1) <revision>-<hex(seedhash[8])> 2) full-R<revision>-<hex(seedhash[8])>
func dagFiles(epoch uint64) (string, string) {
seedHash, _ := ethash.GetSeedHash(epoch * epochLength)
dag := fmt.Sprintf("full-R%d-%x", ethashRevision, seedHash[:8])
return dag, "full-R" + dag
}
// stopAutoDAG stops automatic DAG pregeneration by quitting the loop
func (self *Ethereum) StopAutoDAG() {
if self.autodagquit != nil {
close(self.autodagquit)
self.autodagquit = nil
}
glog.V(logger.Info).Infof("Automatic pregeneration of ethash DAG OFF (ethash dir: %s)", ethash.DefaultDir)
}
func saveProtocolVersion(db common.Database, protov int) { func saveProtocolVersion(db common.Database, protov int) {
d, _ := db.Get([]byte("ProtocolVersion")) d, _ := db.Get([]byte("ProtocolVersion"))
protocolVersion := common.NewValue(d).Uint() protocolVersion := common.NewValue(d).Uint()
@ -571,3 +664,18 @@ func saveBlockchainVersion(db common.Database, bcVersion int) {
db.Put([]byte("BlockchainVersion"), common.NewValue(bcVersion).Bytes()) db.Put([]byte("BlockchainVersion"), common.NewValue(bcVersion).Bytes())
} }
} }
func (self *Ethereum) Solc() (*compiler.Solidity, error) {
var err error
if self.solc == nil {
self.solc, err = compiler.New(self.SolcPath)
}
return self.solc, err
}
// set in js console via admin interface or wrapper from cli flags
func (self *Ethereum) SetSolc(solcPath string) (*compiler.Solidity, error) {
self.SolcPath = solcPath
self.solc = nil
return self.Solc()
}

View File

@ -15,8 +15,10 @@ import (
) )
const ( const (
maxHashFetch = 512 // Amount of hashes to be fetched per chunk MinHashFetch = 512 // Minimum amount of hashes to not consider a peer stalling
maxBlockFetch = 128 // Amount of blocks to be fetched per chunk MaxHashFetch = 2048 // Amount of hashes to be fetched per retrieval request
MaxBlockFetch = 128 // Amount of blocks to be fetched per retrieval request
peerCountTimeout = 12 * time.Second // Amount of time it takes for the peer handler to ignore minDesiredPeerCount peerCountTimeout = 12 * time.Second // Amount of time it takes for the peer handler to ignore minDesiredPeerCount
hashTTL = 5 * time.Second // Time it takes for a hash request to time out hashTTL = 5 * time.Second // Time it takes for a hash request to time out
) )
@ -28,10 +30,11 @@ var (
) )
var ( var (
errLowTd = errors.New("peer's TD is too low") errLowTd = errors.New("peers TD is too low")
ErrBusy = errors.New("busy") ErrBusy = errors.New("busy")
errUnknownPeer = errors.New("peer's unknown or unhealthy") errUnknownPeer = errors.New("peer is unknown or unhealthy")
ErrBadPeer = errors.New("action from bad peer ignored") ErrBadPeer = errors.New("action from bad peer ignored")
ErrStallingPeer = errors.New("peer is stalling")
errNoPeers = errors.New("no peers to keep download active") errNoPeers = errors.New("no peers to keep download active")
ErrPendingQueue = errors.New("pending items in queue") ErrPendingQueue = errors.New("pending items in queue")
ErrTimeout = errors.New("timeout") ErrTimeout = errors.New("timeout")
@ -60,13 +63,18 @@ type hashPack struct {
hashes []common.Hash hashes []common.Hash
} }
type crossCheck struct {
expire time.Time
parent common.Hash
}
type Downloader struct { type Downloader struct {
mux *event.TypeMux mux *event.TypeMux
mu sync.RWMutex mu sync.RWMutex
queue *queue // Scheduler for selecting the hashes to download queue *queue // Scheduler for selecting the hashes to download
peers *peerSet // Set of active peers from which download can proceed peers *peerSet // Set of active peers from which download can proceed
checks map[common.Hash]time.Time // Pending cross checks to verify a hash chain checks map[common.Hash]*crossCheck // Pending cross checks to verify a hash chain
// Callbacks // Callbacks
hasBlock hashCheckFn hasBlock hashCheckFn
@ -157,7 +165,7 @@ func (d *Downloader) Synchronise(id string, hash common.Hash) error {
// Reset the queue and peer set to clean any internal leftover state // Reset the queue and peer set to clean any internal leftover state
d.queue.Reset() d.queue.Reset()
d.peers.Reset() d.peers.Reset()
d.checks = make(map[common.Hash]time.Time) d.checks = make(map[common.Hash]*crossCheck)
// Retrieve the origin peer and initiate the downloading process // Retrieve the origin peer and initiate the downloading process
p := d.peers.Peer(id) p := d.peers.Peer(id)
@ -283,15 +291,22 @@ func (d *Downloader) fetchHashes(p *peer, h common.Hash) error {
return ErrBadPeer return ErrBadPeer
} }
if !done { if !done {
// Check that the peer is not stalling the sync
if len(inserts) < MinHashFetch {
return ErrStallingPeer
}
// Try and fetch a random block to verify the hash batch // Try and fetch a random block to verify the hash batch
// Skip the last hash as the cross check races with the next hash fetch // Skip the last hash as the cross check races with the next hash fetch
if len(inserts) > 1 { cross := rand.Intn(len(inserts) - 1)
cross := inserts[rand.Intn(len(inserts)-1)] origin, parent := inserts[cross], inserts[cross+1]
glog.V(logger.Detail).Infof("Cross checking (%s) with %x", active.id, cross) glog.V(logger.Detail).Infof("Cross checking (%s) with %x/%x", active.id, origin, parent)
d.checks[cross] = time.Now().Add(blockTTL) d.checks[origin] = &crossCheck{
active.getBlocks([]common.Hash{cross}) expire: time.Now().Add(blockTTL),
parent: parent,
} }
active.getBlocks([]common.Hash{origin})
// Also fetch a fresh // Also fetch a fresh
active.getHashes(head) active.getHashes(head)
continue continue
@ -310,8 +325,8 @@ func (d *Downloader) fetchHashes(p *peer, h common.Hash) error {
continue continue
} }
block := blockPack.blocks[0] block := blockPack.blocks[0]
if _, ok := d.checks[block.Hash()]; ok { if check, ok := d.checks[block.Hash()]; ok {
if !d.queue.Has(block.ParentHash()) { if block.ParentHash() != check.parent {
return ErrCrossCheckFailed return ErrCrossCheckFailed
} }
delete(d.checks, block.Hash()) delete(d.checks, block.Hash())
@ -319,8 +334,8 @@ func (d *Downloader) fetchHashes(p *peer, h common.Hash) error {
case <-crossTicker.C: case <-crossTicker.C:
// Iterate over all the cross checks and fail the hash chain if they're not verified // Iterate over all the cross checks and fail the hash chain if they're not verified
for hash, deadline := range d.checks { for hash, check := range d.checks {
if time.Now().After(deadline) { if time.Now().After(check.expire) {
glog.V(logger.Debug).Infof("Cross check timeout for %x", hash) glog.V(logger.Debug).Infof("Cross check timeout for %x", hash)
return ErrCrossCheckFailed return ErrCrossCheckFailed
} }
@ -438,7 +453,7 @@ out:
} }
// Get a possible chunk. If nil is returned no chunk // Get a possible chunk. If nil is returned no chunk
// could be returned due to no hashes available. // could be returned due to no hashes available.
request := d.queue.Reserve(peer, maxBlockFetch) request := d.queue.Reserve(peer, MaxBlockFetch)
if request == nil { if request == nil {
continue continue
} }

View File

@ -53,6 +53,8 @@ type downloadTester struct {
blocks map[common.Hash]*types.Block // Blocks associated with the hashes blocks map[common.Hash]*types.Block // Blocks associated with the hashes
chain []common.Hash // Block-chain being constructed chain []common.Hash // Block-chain being constructed
maxHashFetch int // Overrides the maximum number of retrieved hashes
t *testing.T t *testing.T
pcount int pcount int
done chan bool done chan bool
@ -133,8 +135,12 @@ func (dl *downloadTester) getBlock(hash common.Hash) *types.Block {
// getHashes retrieves a batch of hashes for reconstructing the chain. // getHashes retrieves a batch of hashes for reconstructing the chain.
func (dl *downloadTester) getHashes(head common.Hash) error { func (dl *downloadTester) getHashes(head common.Hash) error {
limit := MaxHashFetch
if dl.maxHashFetch > 0 {
limit = dl.maxHashFetch
}
// Gather the next batch of hashes // Gather the next batch of hashes
hashes := make([]common.Hash, 0, maxHashFetch) hashes := make([]common.Hash, 0, limit)
for i, hash := range dl.hashes { for i, hash := range dl.hashes {
if hash == head { if hash == head {
i++ i++
@ -382,7 +388,7 @@ func TestRepeatingHashAttack(t *testing.T) {
// Make sure that syncing returns and does so with a failure // Make sure that syncing returns and does so with a failure
select { select {
case <-time.After(100 * time.Millisecond): case <-time.After(time.Second):
t.Fatalf("synchronisation blocked") t.Fatalf("synchronisation blocked")
case err := <-errc: case err := <-errc:
if err == nil { if err == nil {
@ -469,6 +475,23 @@ func TestMadeupHashChainAttack(t *testing.T) {
} }
} }
// Tests that if a malicious peer makes up a random hash chain, and tries to push
// indefinitely, one hash at a time, it actually gets caught with it. The reason
// this is separate from the classical made up chain attack is that sending hashes
// one by one prevents reliable block/parent verification.
func TestMadeupHashChainDrippingAttack(t *testing.T) {
// Create a random chain of hashes to drip
hashes := createHashes(0, 16*blockCacheLimit)
tester := newTester(t, hashes, nil)
// Try and sync with the attacker, one hash at a time
tester.maxHashFetch = 1
tester.newPeer("attack", big.NewInt(10000), hashes[0])
if _, err := tester.syncTake("attack", hashes[0]); err != ErrStallingPeer {
t.Fatalf("synchronisation error mismatch: have %v, want %v", err, ErrStallingPeer)
}
}
// Tests that if a malicious peer makes up a random block chain, and tried to // Tests that if a malicious peer makes up a random block chain, and tried to
// push indefinitely, it actually gets caught with it. // push indefinitely, it actually gets caught with it.
func TestMadeupBlockChainAttack(t *testing.T) { func TestMadeupBlockChainAttack(t *testing.T) {
@ -479,7 +502,7 @@ func TestMadeupBlockChainAttack(t *testing.T) {
crossCheckCycle = 25 * time.Millisecond crossCheckCycle = 25 * time.Millisecond
// Create a long chain of blocks and simulate an invalid chain by dropping every second // Create a long chain of blocks and simulate an invalid chain by dropping every second
hashes := createHashes(0, 32*blockCacheLimit) hashes := createHashes(0, 16*blockCacheLimit)
blocks := createBlocksFromHashes(hashes) blocks := createBlocksFromHashes(hashes)
gapped := make([]common.Hash, len(hashes)/2) gapped := make([]common.Hash, len(hashes)/2)
@ -502,3 +525,37 @@ func TestMadeupBlockChainAttack(t *testing.T) {
t.Fatalf("failed to synchronise blocks: %v", err) t.Fatalf("failed to synchronise blocks: %v", err)
} }
} }
// Advanced form of the above forged blockchain attack, where not only does the
// attacker make up a valid hashes for random blocks, but also forges the block
// parents to point to existing hashes.
func TestMadeupParentBlockChainAttack(t *testing.T) {
defaultBlockTTL := blockTTL
defaultCrossCheckCycle := crossCheckCycle
blockTTL = 100 * time.Millisecond
crossCheckCycle = 25 * time.Millisecond
// Create a long chain of blocks and simulate an invalid chain by dropping every second
hashes := createHashes(0, 16*blockCacheLimit)
blocks := createBlocksFromHashes(hashes)
forges := createBlocksFromHashes(hashes)
for hash, block := range forges {
block.ParentHeaderHash = hash // Simulate pointing to already known hash
}
// Try and sync with the malicious node and check that it fails
tester := newTester(t, hashes, forges)
tester.newPeer("attack", big.NewInt(10000), hashes[0])
if _, err := tester.syncTake("attack", hashes[0]); err != ErrCrossCheckFailed {
t.Fatalf("synchronisation error mismatch: have %v, want %v", err, ErrCrossCheckFailed)
}
// Ensure that a valid chain can still pass sync
blockTTL = defaultBlockTTL
crossCheckCycle = defaultCrossCheckCycle
tester.blocks = blocks
tester.newPeer("valid", big.NewInt(20000), hashes[0])
if _, err := tester.syncTake("valid", hashes[0]); err != nil {
t.Fatalf("failed to synchronise blocks: %v", err)
}
}

View File

@ -17,7 +17,7 @@ import (
) )
const ( const (
blockCacheLimit = 1024 // Maximum number of blocks to cache before throttling the download blockCacheLimit = 8 * MaxBlockFetch // Maximum number of blocks to cache before throttling the download
) )
// fetchRequest is a currently running block retrieval operation. // fetchRequest is a currently running block retrieval operation.

View File

@ -47,9 +47,7 @@ type ProtocolManager struct {
txpool txPool txpool txPool
chainman *core.ChainManager chainman *core.ChainManager
downloader *downloader.Downloader downloader *downloader.Downloader
peers *peerSet
pmu sync.Mutex
peers map[string]*peer
SubProtocol p2p.Protocol SubProtocol p2p.Protocol
@ -73,7 +71,7 @@ func NewProtocolManager(protocolVersion, networkId int, mux *event.TypeMux, txpo
txpool: txpool, txpool: txpool,
chainman: chainman, chainman: chainman,
downloader: downloader, downloader: downloader,
peers: make(map[string]*peer), peers: newPeerSet(),
newPeerCh: make(chan *peer, 1), newPeerCh: make(chan *peer, 1),
quitSync: make(chan struct{}), quitSync: make(chan struct{}),
} }
@ -95,10 +93,14 @@ func NewProtocolManager(protocolVersion, networkId int, mux *event.TypeMux, txpo
} }
func (pm *ProtocolManager) removePeer(peer *peer) { func (pm *ProtocolManager) removePeer(peer *peer) {
pm.pmu.Lock() // Unregister the peer from the downloader
defer pm.pmu.Unlock()
pm.downloader.UnregisterPeer(peer.id) pm.downloader.UnregisterPeer(peer.id)
delete(pm.peers, peer.id)
// Remove the peer from the Ethereum peer set too
glog.V(logger.Detail).Infoln("Removing peer", peer.id)
if err := pm.peers.Unregister(peer.id); err != nil {
glog.V(logger.Error).Infoln("Removal failed:", err)
}
} }
func (pm *ProtocolManager) Start() { func (pm *ProtocolManager) Start() {
@ -136,31 +138,32 @@ func (pm *ProtocolManager) newPeer(pv, nv int, p *p2p.Peer, rw p2p.MsgReadWriter
} }
func (pm *ProtocolManager) handle(p *peer) error { func (pm *ProtocolManager) handle(p *peer) error {
// Execute the Ethereum handshake, short circuit if fails
if err := p.handleStatus(); err != nil { if err := p.handleStatus(); err != nil {
return err return err
} }
pm.pmu.Lock() // Register the peer locally and in the downloader too
pm.peers[p.id] = p glog.V(logger.Detail).Infoln("Adding peer", p.id)
pm.pmu.Unlock() if err := pm.peers.Register(p); err != nil {
glog.V(logger.Error).Infoln("Addition failed:", err)
pm.downloader.RegisterPeer(p.id, p.recentHash, p.requestHashes, p.requestBlocks) return err
defer func() { }
pm.removePeer(p) defer pm.removePeer(p)
}()
if err := pm.downloader.RegisterPeer(p.id, p.recentHash, p.requestHashes, p.requestBlocks); err != nil {
return err
}
// propagate existing transactions. new transactions appearing // propagate existing transactions. new transactions appearing
// after this will be sent via broadcasts. // after this will be sent via broadcasts.
if err := p.sendTransactions(pm.txpool.GetTransactions()); err != nil { if err := p.sendTransactions(pm.txpool.GetTransactions()); err != nil {
return err return err
} }
// main loop. handle incoming messages. // main loop. handle incoming messages.
for { for {
if err := pm.handleMsg(p); err != nil { if err := pm.handleMsg(p); err != nil {
return err return err
} }
} }
return nil return nil
} }
@ -203,8 +206,8 @@ func (self *ProtocolManager) handleMsg(p *peer) error {
return errResp(ErrDecode, "->msg %v: %v", msg, err) return errResp(ErrDecode, "->msg %v: %v", msg, err)
} }
if request.Amount > maxHashes { if request.Amount > downloader.MaxHashFetch {
request.Amount = maxHashes request.Amount = downloader.MaxHashFetch
} }
hashes := self.chainman.GetBlockHashesFromHash(request.Hash, request.Amount) hashes := self.chainman.GetBlockHashesFromHash(request.Hash, request.Amount)
@ -251,7 +254,7 @@ func (self *ProtocolManager) handleMsg(p *peer) error {
if block != nil { if block != nil {
blocks = append(blocks, block) blocks = append(blocks, block)
} }
if i == maxBlocks { if i == downloader.MaxBlockFetch {
break break
} }
} }
@ -346,18 +349,8 @@ func (pm *ProtocolManager) verifyTd(peer *peer, request newBlockMsgData) error {
// out which peers do not contain the block in their block set and will do a // out which peers do not contain the block in their block set and will do a
// sqrt(peers) to determine the amount of peers we broadcast to. // sqrt(peers) to determine the amount of peers we broadcast to.
func (pm *ProtocolManager) BroadcastBlock(hash common.Hash, block *types.Block) { func (pm *ProtocolManager) BroadcastBlock(hash common.Hash, block *types.Block) {
pm.pmu.Lock() // Broadcast block to a batch of peers not knowing about it
defer pm.pmu.Unlock() peers := pm.peers.PeersWithoutBlock(hash)
// Find peers who don't know anything about the given hash. Peers that
// don't know about the hash will be a candidate for the broadcast loop
var peers []*peer
for _, peer := range pm.peers {
if !peer.blockHashes.Has(hash) {
peers = append(peers, peer)
}
}
// Broadcast block to peer set
peers = peers[:int(math.Sqrt(float64(len(peers))))] peers = peers[:int(math.Sqrt(float64(len(peers))))]
for _, peer := range peers { for _, peer := range peers {
peer.sendNewBlock(block) peer.sendNewBlock(block)
@ -369,18 +362,8 @@ func (pm *ProtocolManager) BroadcastBlock(hash common.Hash, block *types.Block)
// out which peers do not contain the block in their block set and will do a // out which peers do not contain the block in their block set and will do a
// sqrt(peers) to determine the amount of peers we broadcast to. // sqrt(peers) to determine the amount of peers we broadcast to.
func (pm *ProtocolManager) BroadcastTx(hash common.Hash, tx *types.Transaction) { func (pm *ProtocolManager) BroadcastTx(hash common.Hash, tx *types.Transaction) {
pm.pmu.Lock() // Broadcast transaction to a batch of peers not knowing about it
defer pm.pmu.Unlock() peers := pm.peers.PeersWithoutTx(hash)
// Find peers who don't know anything about the given hash. Peers that
// don't know about the hash will be a candidate for the broadcast loop
var peers []*peer
for _, peer := range pm.peers {
if !peer.txHashes.Has(hash) {
peers = append(peers, peer)
}
}
// Broadcast block to peer set
//FIXME include this again: peers = peers[:int(math.Sqrt(float64(len(peers))))] //FIXME include this again: peers = peers[:int(math.Sqrt(float64(len(peers))))]
for _, peer := range peers { for _, peer := range peers {
peer.sendTransaction(tx) peer.sendTransaction(tx)

View File

@ -1,17 +1,25 @@
package eth package eth
import ( import (
"errors"
"fmt" "fmt"
"math/big" "math/big"
"sync"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/eth/downloader"
"github.com/ethereum/go-ethereum/logger" "github.com/ethereum/go-ethereum/logger"
"github.com/ethereum/go-ethereum/logger/glog" "github.com/ethereum/go-ethereum/logger/glog"
"github.com/ethereum/go-ethereum/p2p" "github.com/ethereum/go-ethereum/p2p"
"gopkg.in/fatih/set.v0" "gopkg.in/fatih/set.v0"
) )
var (
errAlreadyRegistered = errors.New("peer is already registered")
errNotRegistered = errors.New("peer is not registered")
)
type statusMsgData struct { type statusMsgData struct {
ProtocolVersion uint32 ProtocolVersion uint32
NetworkId uint32 NetworkId uint32
@ -25,16 +33,6 @@ type getBlockHashesMsgData struct {
Amount uint64 Amount uint64
} }
func getBestPeer(peers map[string]*peer) *peer {
var peer *peer
for _, cp := range peers {
if peer == nil || cp.td.Cmp(peer.td) > 0 {
peer = cp
}
}
return peer
}
type peer struct { type peer struct {
*p2p.Peer *p2p.Peer
@ -103,8 +101,8 @@ func (p *peer) sendTransaction(tx *types.Transaction) error {
} }
func (p *peer) requestHashes(from common.Hash) error { func (p *peer) requestHashes(from common.Hash) error {
glog.V(logger.Debug).Infof("[%s] fetching hashes (%d) %x...\n", p.id, maxHashes, from[:4]) glog.V(logger.Debug).Infof("[%s] fetching hashes (%d) %x...\n", p.id, downloader.MaxHashFetch, from[:4])
return p2p.Send(p.rw, GetBlockHashesMsg, getBlockHashesMsgData{from, maxHashes}) return p2p.Send(p.rw, GetBlockHashesMsg, getBlockHashesMsgData{from, downloader.MaxHashFetch})
} }
func (p *peer) requestBlocks(hashes []common.Hash) error { func (p *peer) requestBlocks(hashes []common.Hash) error {
@ -159,3 +157,103 @@ func (p *peer) handleStatus() error {
return <-errc return <-errc
} }
// peerSet represents the collection of active peers currently participating in
// the Ethereum sub-protocol.
type peerSet struct {
peers map[string]*peer
lock sync.RWMutex
}
// newPeerSet creates a new peer set to track the active participants.
func newPeerSet() *peerSet {
return &peerSet{
peers: make(map[string]*peer),
}
}
// Register injects a new peer into the working set, or returns an error if the
// peer is already known.
func (ps *peerSet) Register(p *peer) error {
ps.lock.Lock()
defer ps.lock.Unlock()
if _, ok := ps.peers[p.id]; ok {
return errAlreadyRegistered
}
ps.peers[p.id] = p
return nil
}
// Unregister removes a remote peer from the active set, disabling any further
// actions to/from that particular entity.
func (ps *peerSet) Unregister(id string) error {
ps.lock.Lock()
defer ps.lock.Unlock()
if _, ok := ps.peers[id]; !ok {
return errNotRegistered
}
delete(ps.peers, id)
return nil
}
// Peer retrieves the registered peer with the given id.
func (ps *peerSet) Peer(id string) *peer {
ps.lock.RLock()
defer ps.lock.RUnlock()
return ps.peers[id]
}
// Len returns if the current number of peers in the set.
func (ps *peerSet) Len() int {
ps.lock.RLock()
defer ps.lock.RUnlock()
return len(ps.peers)
}
// PeersWithoutBlock retrieves a list of peers that do not have a given block in
// their set of known hashes.
func (ps *peerSet) PeersWithoutBlock(hash common.Hash) []*peer {
ps.lock.RLock()
defer ps.lock.RUnlock()
list := make([]*peer, 0, len(ps.peers))
for _, p := range ps.peers {
if !p.blockHashes.Has(hash) {
list = append(list, p)
}
}
return list
}
// PeersWithoutTx retrieves a list of peers that do not have a given transaction
// in their set of known hashes.
func (ps *peerSet) PeersWithoutTx(hash common.Hash) []*peer {
ps.lock.RLock()
defer ps.lock.RUnlock()
list := make([]*peer, 0, len(ps.peers))
for _, p := range ps.peers {
if !p.txHashes.Has(hash) {
list = append(list, p)
}
}
return list
}
// BestPeer retrieves the known peer with the currently highest total difficulty.
func (ps *peerSet) BestPeer() *peer {
ps.lock.RLock()
defer ps.lock.RUnlock()
var best *peer
for _, p := range ps.peers {
if best == nil || p.td.Cmp(best.td) > 0 {
best = p
}
}
return best
}

View File

@ -12,8 +12,6 @@ const (
NetworkId = 0 NetworkId = 0
ProtocolLength = uint64(8) ProtocolLength = uint64(8)
ProtocolMaxMsgSize = 10 * 1024 * 1024 ProtocolMaxMsgSize = 10 * 1024 * 1024
maxHashes = 512
maxBlocks = 128
) )
// eth protocol message codes // eth protocol message codes

View File

@ -10,8 +10,8 @@ import (
"github.com/ethereum/go-ethereum/logger/glog" "github.com/ethereum/go-ethereum/logger/glog"
) )
// Sync contains all synchronisation code for the eth protocol // update periodically tries to synchronise with the network, both downloading
// hashes and blocks as well as retrieving cached ones.
func (pm *ProtocolManager) update() { func (pm *ProtocolManager) update() {
forceSync := time.Tick(forceSyncCycle) forceSync := time.Tick(forceSyncCycle)
blockProc := time.Tick(blockProcCycle) blockProc := time.Tick(blockProcCycle)
@ -20,22 +20,16 @@ func (pm *ProtocolManager) update() {
for { for {
select { select {
case <-pm.newPeerCh: case <-pm.newPeerCh:
// Meet the `minDesiredPeerCount` before we select our best peer // Make sure we have peers to select from, then sync
if len(pm.peers) < minDesiredPeerCount { if pm.peers.Len() < minDesiredPeerCount {
break break
} }
// Find the best peer and synchronise with it go pm.synchronise(pm.peers.BestPeer())
peer := getBestPeer(pm.peers)
if peer == nil {
glog.V(logger.Debug).Infoln("Sync attempt canceled. No peers available")
}
go pm.synchronise(peer)
case <-forceSync: case <-forceSync:
// Force a sync even if not enough peers are present // Force a sync even if not enough peers are present
if peer := getBestPeer(pm.peers); peer != nil { go pm.synchronise(pm.peers.BestPeer())
go pm.synchronise(peer)
}
case <-blockProc: case <-blockProc:
// Try to pull some blocks from the downloaded // Try to pull some blocks from the downloaded
if atomic.CompareAndSwapInt32(&blockProcPend, 0, 1) { if atomic.CompareAndSwapInt32(&blockProcPend, 0, 1) {
@ -51,10 +45,9 @@ func (pm *ProtocolManager) update() {
} }
} }
// processBlocks will attempt to reconstruct a chain by checking the first item and check if it's // processBlocks retrieves downloaded blocks from the download cache and tries
// a known parent. The first block in the chain may be unknown during downloading. When the // to construct the local block chain with it. Note, since the block retrieval
// downloader isn't downloading blocks will be dropped with an unknown parent until either it // order matters, access to this function *must* be synchronized/serialized.
// has depleted the list or found a known parent.
func (pm *ProtocolManager) processBlocks() error { func (pm *ProtocolManager) processBlocks() error {
pm.wg.Add(1) pm.wg.Add(1)
defer pm.wg.Done() defer pm.wg.Done()
@ -79,15 +72,24 @@ func (pm *ProtocolManager) processBlocks() error {
return nil return nil
} }
// synchronise tries to sync up our local block chain with a remote peer, both
// adding various sanity checks as well as wrapping it with various log entries.
func (pm *ProtocolManager) synchronise(peer *peer) { func (pm *ProtocolManager) synchronise(peer *peer) {
// Short circuit if no peers are available
if peer == nil {
glog.V(logger.Debug).Infoln("Synchronisation canceled: no peers available")
return
}
// Make sure the peer's TD is higher than our own. If not drop. // Make sure the peer's TD is higher than our own. If not drop.
if peer.td.Cmp(pm.chainman.Td()) <= 0 { if peer.td.Cmp(pm.chainman.Td()) <= 0 {
glog.V(logger.Debug).Infoln("Synchronisation canceled: peer TD too small")
return return
} }
// FIXME if we have the hash in our chain and the TD of the peer is // FIXME if we have the hash in our chain and the TD of the peer is
// much higher than ours, something is wrong with us or the peer. // much higher than ours, something is wrong with us or the peer.
// Check if the hash is on our own chain // Check if the hash is on our own chain
if pm.chainman.HasBlock(peer.recentHash) { if pm.chainman.HasBlock(peer.recentHash) {
glog.V(logger.Debug).Infoln("Synchronisation canceled: head already known")
return return
} }
// Get the hashes from the peer (synchronously) // Get the hashes from the peer (synchronously)

View File

@ -7,6 +7,7 @@ import (
"github.com/ethereum/go-ethereum/logger" "github.com/ethereum/go-ethereum/logger"
"github.com/ethereum/go-ethereum/logger/glog" "github.com/ethereum/go-ethereum/logger/glog"
"github.com/syndtr/goleveldb/leveldb" "github.com/syndtr/goleveldb/leveldb"
"github.com/syndtr/goleveldb/leveldb/errors"
"github.com/syndtr/goleveldb/leveldb/iterator" "github.com/syndtr/goleveldb/leveldb/iterator"
"github.com/syndtr/goleveldb/leveldb/opt" "github.com/syndtr/goleveldb/leveldb/opt"
) )
@ -24,9 +25,17 @@ type LDBDatabase struct {
quit chan struct{} quit chan struct{}
} }
// NewLDBDatabase returns a LevelDB wrapped object. LDBDatabase does not persist data by
// it self but requires a background poller which syncs every X. `Flush` should be called
// when data needs to be stored and written to disk.
func NewLDBDatabase(file string) (*LDBDatabase, error) { func NewLDBDatabase(file string) (*LDBDatabase, error) {
// Open the db // Open the db
db, err := leveldb.OpenFile(file, &opt.Options{OpenFilesCacheCapacity: OpenFileLimit}) db, err := leveldb.OpenFile(file, &opt.Options{OpenFilesCacheCapacity: OpenFileLimit})
// check for curruption and attempt to recover
if _, iscorrupted := err.(*errors.ErrCorrupted); iscorrupted {
db, err = leveldb.RecoverFile(file, nil)
}
// (re) check for errors and abort if opening of the db failed
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -44,21 +53,15 @@ func (self *LDBDatabase) makeQueue() {
self.queue = make(map[string][]byte) self.queue = make(map[string][]byte)
} }
// Put puts the given key / value to the queue
func (self *LDBDatabase) Put(key []byte, value []byte) { func (self *LDBDatabase) Put(key []byte, value []byte) {
self.mu.Lock() self.mu.Lock()
defer self.mu.Unlock() defer self.mu.Unlock()
self.queue[string(key)] = value self.queue[string(key)] = value
/*
value = rle.Compress(value)
err := self.db.Put(key, value, nil)
if err != nil {
fmt.Println("Error put", err)
}
*/
} }
// Get returns the given key if it's present.
func (self *LDBDatabase) Get(key []byte) ([]byte, error) { func (self *LDBDatabase) Get(key []byte) ([]byte, error) {
self.mu.Lock() self.mu.Lock()
defer self.mu.Unlock() defer self.mu.Unlock()
@ -76,6 +79,7 @@ func (self *LDBDatabase) Get(key []byte) ([]byte, error) {
return rle.Decompress(dat) return rle.Decompress(dat)
} }
// Delete deletes the key from the queue and database
func (self *LDBDatabase) Delete(key []byte) error { func (self *LDBDatabase) Delete(key []byte) error {
self.mu.Lock() self.mu.Lock()
defer self.mu.Unlock() defer self.mu.Unlock()
@ -100,6 +104,7 @@ func (self *LDBDatabase) NewIterator() iterator.Iterator {
return self.db.NewIterator(nil, nil) return self.db.NewIterator(nil, nil)
} }
// Flush flushes out the queue to leveldb
func (self *LDBDatabase) Flush() error { func (self *LDBDatabase) Flush() error {
self.mu.Lock() self.mu.Lock()
defer self.mu.Unlock() defer self.mu.Unlock()

File diff suppressed because it is too large Load Diff

View File

@ -40,7 +40,6 @@ func (self *CpuAgent) Stop() {
defer self.mu.Unlock() defer self.mu.Unlock()
close(self.quit) close(self.quit)
close(self.quitCurrentOp)
} }
func (self *CpuAgent) Start() { func (self *CpuAgent) Start() {
@ -50,7 +49,6 @@ func (self *CpuAgent) Start() {
self.quit = make(chan struct{}) self.quit = make(chan struct{})
// creating current op ch makes sure we're not closing a nil ch // creating current op ch makes sure we're not closing a nil ch
// later on // later on
self.quitCurrentOp = make(chan struct{})
self.workCh = make(chan *types.Block, 1) self.workCh = make(chan *types.Block, 1)
go self.update() go self.update()
@ -62,11 +60,19 @@ out:
select { select {
case block := <-self.workCh: case block := <-self.workCh:
self.mu.Lock() self.mu.Lock()
if self.quitCurrentOp != nil {
close(self.quitCurrentOp) close(self.quitCurrentOp)
}
self.quitCurrentOp = make(chan struct{})
go self.mine(block, self.quitCurrentOp)
self.mu.Unlock() self.mu.Unlock()
go self.mine(block)
case <-self.quit: case <-self.quit:
self.mu.Lock()
if self.quitCurrentOp != nil {
close(self.quitCurrentOp)
self.quitCurrentOp = nil
}
self.mu.Unlock()
break out break out
} }
} }
@ -84,16 +90,11 @@ done:
} }
} }
func (self *CpuAgent) mine(block *types.Block) { func (self *CpuAgent) mine(block *types.Block, stop <- chan struct{}) {
glog.V(logger.Debug).Infof("(re)started agent[%d]. mining...\n", self.index) glog.V(logger.Debug).Infof("(re)started agent[%d]. mining...\n", self.index)
// Reset the channel
self.mu.Lock()
self.quitCurrentOp = make(chan struct{})
self.mu.Unlock()
// Mine // Mine
nonce, mixDigest := self.pow.Search(block, self.quitCurrentOp) nonce, mixDigest := self.pow.Search(block, stop)
if nonce != 0 { if nonce != 0 {
block.SetNonce(nonce) block.SetNonce(nonce)
block.Header().MixDigest = common.BytesToHash(mixDigest) block.Header().MixDigest = common.BytesToHash(mixDigest)

View File

@ -39,6 +39,10 @@ func New(eth core.Backend, mux *event.TypeMux, pow pow.PoW) *Miner {
return miner return miner
} }
// update keeps track of the downloader events. Please be aware that this is a one shot type of update loop.
// It's entered once and as soon as `Done` or `Failed` has been broadcasted the events are unregistered and
// the loop is exited. This to prevent a major security vuln where external parties can DOS you with blocks
// and halt your mining operation for as long as the DOS continues.
func (self *Miner) update() { func (self *Miner) update() {
events := self.mux.Subscribe(downloader.StartEvent{}, downloader.DoneEvent{}, downloader.FailedEvent{}) events := self.mux.Subscribe(downloader.StartEvent{}, downloader.DoneEvent{}, downloader.FailedEvent{})
for ev := range events.Chan() { for ev := range events.Chan() {
@ -59,6 +63,10 @@ func (self *Miner) update() {
self.Start(self.coinbase, self.threads) self.Start(self.coinbase, self.threads)
} }
} }
// unsubscribe. we're only interested in this event once
events.Unsubscribe()
// stop immediately and ignore all further pending events
break
} }
} }

View File

@ -224,7 +224,13 @@ func (self *worker) wait() {
} }
self.mux.Post(core.NewMinedBlockEvent{block}) self.mux.Post(core.NewMinedBlockEvent{block})
glog.V(logger.Info).Infof("🔨 Mined block #%v", block.Number()) var stale string
canonBlock := self.chain.GetBlockByNumber(block.NumberU64())
if canonBlock != nil && canonBlock.Hash() != block.Hash() {
stale = "stale-"
}
glog.V(logger.Info).Infof("🔨 Mined %sblock #%v (%x)", stale, block.Number(), block.Hash().Bytes()[:4])
jsonlogger.LogJson(&logger.EthMinerNewBlock{ jsonlogger.LogJson(&logger.EthMinerNewBlock{
BlockHash: block.Hash().Hex(), BlockHash: block.Hash().Hex(),
@ -264,6 +270,7 @@ func (self *worker) makeCurrent() {
} }
block.Header().Extra = self.extra block.Header().Extra = self.extra
// when 08 is processed ancestors contain 07 (quick block)
current := env(block, self.eth) current := env(block, self.eth)
for _, ancestor := range self.chain.GetAncestors(block, 7) { for _, ancestor := range self.chain.GetAncestors(block, 7) {
for _, uncle := range ancestor.Uncles() { for _, uncle := range ancestor.Uncles() {

View File

@ -1,9 +1,9 @@
package rpc package rpc
import ( import (
"bytes"
"encoding/json" "encoding/json"
"math/big" "math/big"
// "sync"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/crypto"
@ -158,16 +158,16 @@ func (api *EthereumApi) GetRequestReply(req *RpcRequest, reply *interface{}) err
v := api.xethAtStateNum(args.BlockNumber).CodeAtBytes(args.Address) v := api.xethAtStateNum(args.BlockNumber).CodeAtBytes(args.Address)
*reply = newHexData(v) *reply = newHexData(v)
case "eth_sign": // case "eth_sign":
args := new(NewSigArgs) // args := new(NewSigArgs)
if err := json.Unmarshal(req.Params, &args); err != nil { // if err := json.Unmarshal(req.Params, &args); err != nil {
return err // return err
} // }
v, err := api.xeth().Sign(args.From, args.Data, false) // v, err := api.xeth().Sign(args.From, args.Data, false)
if err != nil { // if err != nil {
return err // return err
} // }
*reply = v // *reply = v
case "eth_sendTransaction", "eth_transact": case "eth_sendTransaction", "eth_transact":
args := new(NewTxArgs) args := new(NewTxArgs)
@ -230,7 +230,14 @@ func (api *EthereumApi) GetRequestReply(req *RpcRequest, reply *interface{}) err
block := api.xeth().EthBlockByNumber(args.BlockNumber) block := api.xeth().EthBlockByNumber(args.BlockNumber)
br := NewBlockRes(block, args.IncludeTxs) br := NewBlockRes(block, args.IncludeTxs)
// If request was for "pending", nil nonsensical fields
if args.BlockNumber == -2 {
br.BlockHash = nil
br.BlockNumber = nil
br.Miner = nil
br.Nonce = nil
br.LogsBloom = nil
}
*reply = br *reply = br
case "eth_getTransactionByHash": case "eth_getTransactionByHash":
args := new(HashArgs) args := new(HashArgs)
@ -240,9 +247,12 @@ func (api *EthereumApi) GetRequestReply(req *RpcRequest, reply *interface{}) err
tx, bhash, bnum, txi := api.xeth().EthTransactionByHash(args.Hash) tx, bhash, bnum, txi := api.xeth().EthTransactionByHash(args.Hash)
if tx != nil { if tx != nil {
v := NewTransactionRes(tx) v := NewTransactionRes(tx)
// if the blockhash is 0, assume this is a pending transaction
if bytes.Compare(bhash.Bytes(), bytes.Repeat([]byte{0}, 32)) != 0 {
v.BlockHash = newHexData(bhash) v.BlockHash = newHexData(bhash)
v.BlockNumber = newHexNum(bnum) v.BlockNumber = newHexNum(bnum)
v.TxIndex = newHexNum(txi) v.TxIndex = newHexNum(txi)
}
*reply = v *reply = v
} }
case "eth_getTransactionByBlockHashAndIndex": case "eth_getTransactionByBlockHashAndIndex":
@ -337,7 +347,7 @@ func (api *EthereumApi) GetRequestReply(req *RpcRequest, reply *interface{}) err
solc, _ := api.xeth().Solc() solc, _ := api.xeth().Solc()
if solc == nil { if solc == nil {
return NewNotImplementedError(req.Method) return NewNotAvailableError(req.Method, "solc (solidity compiler) not found")
} }
args := new(SourceArgs) args := new(SourceArgs)
@ -345,12 +355,11 @@ func (api *EthereumApi) GetRequestReply(req *RpcRequest, reply *interface{}) err
return err return err
} }
contract, err := solc.Compile(args.Source) contracts, err := solc.Compile(args.Source)
if err != nil { if err != nil {
return err return err
} }
contract.Code = newHexData(contract.Code).String() *reply = contracts
*reply = contract
case "eth_newFilter": case "eth_newFilter":
args := new(BlockFilterArgs) args := new(BlockFilterArgs)
@ -577,7 +586,7 @@ func (api *EthereumApi) GetRequestReply(req *RpcRequest, reply *interface{}) err
return NewNotImplementedError(req.Method) return NewNotImplementedError(req.Method)
} }
glog.V(logger.Detail).Infof("Reply: %T %s\n", reply, reply) // glog.V(logger.Detail).Infof("Reply: %v\n", reply)
return nil return nil
} }

View File

@ -2,14 +2,11 @@ package rpc
import ( import (
"encoding/json" "encoding/json"
// "sync"
"testing"
// "time"
// "fmt"
"io/ioutil"
"strconv" "strconv"
"testing"
"github.com/ethereum/go-ethereum/common/compiler" "github.com/ethereum/go-ethereum/common/compiler"
"github.com/ethereum/go-ethereum/eth"
"github.com/ethereum/go-ethereum/xeth" "github.com/ethereum/go-ethereum/xeth"
) )
@ -30,12 +27,15 @@ func TestWeb3Sha3(t *testing.T) {
} }
} }
const solcVersion = "0.9.23"
func TestCompileSolidity(t *testing.T) { func TestCompileSolidity(t *testing.T) {
t.Skip()
solc, err := compiler.New("") solc, err := compiler.New("")
if solc == nil { if solc == nil {
t.Skip("no solidity compiler") t.Skip("no solc found: skip")
} else if solc.Version() != solcVersion {
t.Logf("WARNING: solc different version found (%v, test written for %v, may need to update)", solc.Version(), solcVersion)
} }
source := `contract test {\n` + source := `contract test {\n` +
" /// @notice Will multiply `a` by 7." + `\n` + " /// @notice Will multiply `a` by 7." + `\n` +
@ -46,16 +46,16 @@ func TestCompileSolidity(t *testing.T) {
jsonstr := `{"jsonrpc":"2.0","method":"eth_compileSolidity","params":["` + source + `"],"id":64}` jsonstr := `{"jsonrpc":"2.0","method":"eth_compileSolidity","params":["` + source + `"],"id":64}`
//expCode := "605280600c6000396000f3006000357c010000000000000000000000000000000000000000000000000000000090048063c6888fa114602e57005b60376004356041565b8060005260206000f35b6000600782029050604d565b91905056" expCode := "0x605880600c6000396000f3006000357c010000000000000000000000000000000000000000000000000000000090048063c6888fa114602e57005b603d6004803590602001506047565b8060005260206000f35b60006007820290506053565b91905056"
expAbiDefinition := `[{"constant":false,"inputs":[{"name":"a","type":"uint256"}],"name":"multiply","outputs":[{"name":"d","type":"uint256"}],"type":"function"}]` expAbiDefinition := `[{"constant":false,"inputs":[{"name":"a","type":"uint256"}],"name":"multiply","outputs":[{"name":"d","type":"uint256"}],"type":"function"}]`
expUserDoc := `{"methods":{"multiply(uint256)":{"notice":"Will multiply ` + "`a`" + ` by 7."}}}` expUserDoc := `{"methods":{"multiply(uint256)":{"notice":"Will multiply ` + "`a`" + ` by 7."}}}`
expDeveloperDoc := `{"methods":{}}` expDeveloperDoc := `{"methods":{}}`
expCompilerVersion := `0.9.13` expCompilerVersion := solc.Version()
expLanguage := "Solidity" expLanguage := "Solidity"
expLanguageVersion := "0" expLanguageVersion := "0"
expSource := source expSource := source
api := NewEthereumApi(&xeth.XEth{}) api := NewEthereumApi(xeth.NewTest(&eth.Ethereum{}, nil))
var req RpcRequest var req RpcRequest
json.Unmarshal([]byte(jsonstr), &req) json.Unmarshal([]byte(jsonstr), &req)
@ -70,26 +70,34 @@ func TestCompileSolidity(t *testing.T) {
t.Errorf("expected no error, got %v", err) t.Errorf("expected no error, got %v", err)
} }
var contract = compiler.Contract{} var contracts = make(map[string]*compiler.Contract)
err = json.Unmarshal(respjson, &contract) err = json.Unmarshal(respjson, &contracts)
if err != nil { if err != nil {
t.Errorf("expected no error, got %v", err) t.Errorf("expected no error, got %v", err)
} }
/* if len(contracts) != 1 {
if contract.Code != expCode { t.Errorf("expected one contract, got %v", len(contracts))
t.Errorf("Expected %s got %s", expCode, contract.Code)
} }
*/
contract := contracts["test"]
if contract.Code != expCode {
t.Errorf("Expected \n%s got \n%s", expCode, contract.Code)
}
if strconv.Quote(contract.Info.Source) != `"`+expSource+`"` { if strconv.Quote(contract.Info.Source) != `"`+expSource+`"` {
t.Errorf("Expected \n'%s' got \n'%s'", expSource, strconv.Quote(contract.Info.Source)) t.Errorf("Expected \n'%s' got \n'%s'", expSource, strconv.Quote(contract.Info.Source))
} }
if contract.Info.Language != expLanguage { if contract.Info.Language != expLanguage {
t.Errorf("Expected %s got %s", expLanguage, contract.Info.Language) t.Errorf("Expected %s got %s", expLanguage, contract.Info.Language)
} }
if contract.Info.LanguageVersion != expLanguageVersion { if contract.Info.LanguageVersion != expLanguageVersion {
t.Errorf("Expected %s got %s", expLanguageVersion, contract.Info.LanguageVersion) t.Errorf("Expected %s got %s", expLanguageVersion, contract.Info.LanguageVersion)
} }
if contract.Info.CompilerVersion != expCompilerVersion { if contract.Info.CompilerVersion != expCompilerVersion {
t.Errorf("Expected %s got %s", expCompilerVersion, contract.Info.CompilerVersion) t.Errorf("Expected %s got %s", expCompilerVersion, contract.Info.CompilerVersion)
} }
@ -112,8 +120,6 @@ func TestCompileSolidity(t *testing.T) {
if string(abidef) != expAbiDefinition { if string(abidef) != expAbiDefinition {
t.Errorf("Expected \n'%s' got \n'%s'", expAbiDefinition, string(abidef)) t.Errorf("Expected \n'%s' got \n'%s'", expAbiDefinition, string(abidef))
} }
ioutil.WriteFile("/tmp/abidef", []byte(string(abidef)), 0700)
ioutil.WriteFile("/tmp/expabidef", []byte(expAbiDefinition), 0700)
if string(userdoc) != expUserDoc { if string(userdoc) != expUserDoc {
t.Errorf("Expected \n'%s' got \n'%s'", expUserDoc, string(userdoc)) t.Errorf("Expected \n'%s' got \n'%s'", expUserDoc, string(userdoc))

View File

@ -166,45 +166,45 @@ type NewTxArgs struct {
BlockNumber int64 BlockNumber int64
} }
type NewSigArgs struct { // type NewSigArgs struct {
From string // From string
Data string // Data string
} // }
func (args *NewSigArgs) UnmarshalJSON(b []byte) (err error) { // func (args *NewSigArgs) UnmarshalJSON(b []byte) (err error) {
var obj []json.RawMessage // var obj []json.RawMessage
var ext struct { // var ext struct {
From string // From string
Data string // Data string
} // }
// Decode byte slice to array of RawMessages // // Decode byte slice to array of RawMessages
if err := json.Unmarshal(b, &obj); err != nil { // if err := json.Unmarshal(b, &obj); err != nil {
return NewDecodeParamError(err.Error()) // return NewDecodeParamError(err.Error())
} // }
// Check for sufficient params // // Check for sufficient params
if len(obj) < 1 { // if len(obj) < 1 {
return NewInsufficientParamsError(len(obj), 1) // return NewInsufficientParamsError(len(obj), 1)
} // }
// Decode 0th RawMessage to temporary struct // // Decode 0th RawMessage to temporary struct
if err := json.Unmarshal(obj[0], &ext); err != nil { // if err := json.Unmarshal(obj[0], &ext); err != nil {
return NewDecodeParamError(err.Error()) // return NewDecodeParamError(err.Error())
} // }
if len(ext.From) == 0 { // if len(ext.From) == 0 {
return NewValidationError("from", "is required") // return NewValidationError("from", "is required")
} // }
if len(ext.Data) == 0 { // if len(ext.Data) == 0 {
return NewValidationError("data", "is required") // return NewValidationError("data", "is required")
} // }
args.From = ext.From // args.From = ext.From
args.Data = ext.Data // args.Data = ext.Data
return nil // return nil
} // }
func (args *NewTxArgs) UnmarshalJSON(b []byte) (err error) { func (args *NewTxArgs) UnmarshalJSON(b []byte) (err error) {
var obj []json.RawMessage var obj []json.RawMessage

View File

@ -6,6 +6,7 @@ import (
"io" "io"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
"strings"
"github.com/ethereum/go-ethereum/logger" "github.com/ethereum/go-ethereum/logger"
"github.com/ethereum/go-ethereum/logger/glog" "github.com/ethereum/go-ethereum/logger/glog"
@ -39,7 +40,7 @@ func Start(pipe *xeth.XEth, config RpcConfig) error {
if len(config.CorsDomain) > 0 { if len(config.CorsDomain) > 0 {
var opts cors.Options var opts cors.Options
opts.AllowedMethods = []string{"POST"} opts.AllowedMethods = []string{"POST"}
opts.AllowedOrigins = []string{config.CorsDomain} opts.AllowedOrigins = strings.Split(config.CorsDomain, " ")
c := cors.New(opts) c := cors.New(opts)
handler = newStoppableHandler(c.Handler(JSONRPC(pipe)), l.stop) handler = newStoppableHandler(c.Handler(JSONRPC(pipe)), l.stop)

View File

@ -48,6 +48,10 @@ func TestBcTotalDifficulty(t *testing.T) {
runBlockTestsInFile("files/BlockTests/bcTotalDifficultyTest.json", []string{}, t) runBlockTestsInFile("files/BlockTests/bcTotalDifficultyTest.json", []string{}, t)
} }
func TestBcWallet(t *testing.T) {
runBlockTestsInFile("files/BlockTests/bcWalletTest.json", []string{}, t)
}
func runBlockTestsInFile(filepath string, snafus []string, t *testing.T) { func runBlockTestsInFile(filepath string, snafus []string, t *testing.T) {
bt, err := LoadBlockTests(filepath) bt, err := LoadBlockTests(filepath)
if err != nil { if err != nil {

View File

@ -924,6 +924,440 @@
"value" : "0x0186a0" "value" : "0x0186a0"
} }
}, },
"createFailBalanceTooLow" : {
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x05f5e100",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"logs" : [
],
"out" : "0x",
"post" : {
"0000000000000000000000000000000000000000" : {
"balance" : "0x0de0b6b3a7640017",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : {
"balance" : "0x715d",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a7638e8c",
"code" : "0x",
"nonce" : "0x01",
"storage" : {
}
}
},
"postStateRoot" : "5833e19631ddedaf4e3c9a766f696c2e59e7524e388eb871be19dc8e0ce37b6e",
"pre" : {
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x605a60005360016000670de0b6b3a7640018f0ff",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
}
},
"transaction" : {
"data" : "",
"gasLimit" : "0xcf1d",
"gasPrice" : "0x01",
"nonce" : "0x00",
"secretKey" : "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8",
"to" : "095e7baea6a6c7c4c2dfeb977efac326af552d87",
"value" : "0x17"
}
},
"createInitFailBadJumpDestination" : {
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x05f5e100",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"logs" : [
],
"out" : "0x",
"post" : {
"0000000000000000000000000000000000000000" : {
"balance" : "0x0de0b6b3a76586a0",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : {
"balance" : "0x05f58340",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a16cf620",
"code" : "0x",
"nonce" : "0x01",
"storage" : {
}
}
},
"postStateRoot" : "ccf7765eff3effe22a5f853099f7da88291b8346b689ffbf54b729ba04170e59",
"pre" : {
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x6056600053600160006001f0ff",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
}
},
"transaction" : {
"data" : "",
"gasLimit" : "0x05f5e100",
"gasPrice" : "0x01",
"nonce" : "0x00",
"secretKey" : "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8",
"to" : "095e7baea6a6c7c4c2dfeb977efac326af552d87",
"value" : "0x0186a0"
}
},
"createInitFailStackSizeLargerThan1024" : {
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x05f5e100",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"logs" : [
],
"out" : "0x",
"post" : {
"0000000000000000000000000000000000000000" : {
"balance" : "0x0de0b6b3a76586a0",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : {
"balance" : "0x05f58340",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a16cf620",
"code" : "0x",
"nonce" : "0x01",
"storage" : {
}
}
},
"postStateRoot" : "ccf7765eff3effe22a5f853099f7da88291b8346b689ffbf54b729ba04170e59",
"pre" : {
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x7f6103ff6000525b7f0102030405060708090a0102030405060708090a010203046000527f05060708090a0102600160005103600052600051600657000000000000000000602052604060006001f0ff",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
}
},
"transaction" : {
"data" : "",
"gasLimit" : "0x05f5e100",
"gasPrice" : "0x01",
"nonce" : "0x00",
"secretKey" : "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8",
"to" : "095e7baea6a6c7c4c2dfeb977efac326af552d87",
"value" : "0x0186a0"
}
},
"createInitFailStackUnderflow" : {
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x05f5e100",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"logs" : [
],
"out" : "0x",
"post" : {
"0000000000000000000000000000000000000000" : {
"balance" : "0x0de0b6b3a76586a0",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : {
"balance" : "0x05f58340",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a16cf620",
"code" : "0x",
"nonce" : "0x01",
"storage" : {
}
}
},
"postStateRoot" : "ccf7765eff3effe22a5f853099f7da88291b8346b689ffbf54b729ba04170e59",
"pre" : {
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x6001600053600160006001f0ff",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
}
},
"transaction" : {
"data" : "",
"gasLimit" : "0x05f5e100",
"gasPrice" : "0x01",
"nonce" : "0x00",
"secretKey" : "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8",
"to" : "095e7baea6a6c7c4c2dfeb977efac326af552d87",
"value" : "0x0186a0"
}
},
"createInitFailUndefinedInstruction" : {
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x05f5e100",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"logs" : [
],
"out" : "0x",
"post" : {
"0000000000000000000000000000000000000000" : {
"balance" : "0x0de0b6b3a76586a0",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : {
"balance" : "0x05f58340",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a16cf620",
"code" : "0x",
"nonce" : "0x01",
"storage" : {
}
}
},
"postStateRoot" : "ccf7765eff3effe22a5f853099f7da88291b8346b689ffbf54b729ba04170e59",
"pre" : {
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60f4600053600160006001f0ff",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
}
},
"transaction" : {
"data" : "",
"gasLimit" : "0x05f5e100",
"gasPrice" : "0x01",
"nonce" : "0x00",
"secretKey" : "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8",
"to" : "095e7baea6a6c7c4c2dfeb977efac326af552d87",
"value" : "0x0186a0"
}
},
"createInitFail_OOGduringInit" : {
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x05f5e100",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"logs" : [
],
"out" : "0x",
"post" : {
"0000000000000000000000000000000000000000" : {
"balance" : "0x0de0b6b3a76586a0",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : {
"balance" : "0x715d",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a7620803",
"code" : "0x",
"nonce" : "0x01",
"storage" : {
}
}
},
"postStateRoot" : "297303455494578a5176177ff1b9db0b0a516255a3d062fb960bbc99e60d8eb5",
"pre" : {
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x605a600053600160006001f0ff",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
}
},
"transaction" : {
"data" : "",
"gasLimit" : "0xcf1d",
"gasPrice" : "0x01",
"nonce" : "0x00",
"secretKey" : "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8",
"to" : "095e7baea6a6c7c4c2dfeb977efac326af552d87",
"value" : "0x0186a0"
}
},
"createInitOOGforCREATE" : {
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x05f5e100",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"logs" : [
],
"out" : "0x",
"post" : {
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x605a600053600160006001f0ff",
"nonce" : "0x00",
"storage" : {
}
},
"2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : {
"balance" : "0xcf1c",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a76330e4",
"code" : "0x",
"nonce" : "0x01",
"storage" : {
}
}
},
"postStateRoot" : "b61e4a95fae40806b0ddef0883479c3db70e79e019ab4260535560827525c00c",
"pre" : {
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x605a600053600160006001f0ff",
"nonce" : "0x00",
"storage" : {
}
},
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
}
},
"transaction" : {
"data" : "",
"gasLimit" : "0xcf1c",
"gasPrice" : "0x01",
"nonce" : "0x00",
"secretKey" : "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8",
"to" : "095e7baea6a6c7c4c2dfeb977efac326af552d87",
"value" : "0x0186a0"
}
},
"createJS_ExampleContract" : { "createJS_ExampleContract" : {
"env" : { "env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",

View File

@ -266,35 +266,35 @@
}, },
"logs" : [ "logs" : [
], ],
"out" : "0x00", "out" : "#4294967295",
"post" : { "post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
"code" : "0x6401000000016000f3", "code" : "0x63ffffffff6000f3",
"nonce" : "0x00", "nonce" : "0x00",
"storage" : { "storage" : {
} }
}, },
"2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : { "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : {
"balance" : "0x200018085211", "balance" : "0x20001800520e",
"code" : "0x", "code" : "0x",
"nonce" : "0x00", "nonce" : "0x00",
"storage" : { "storage" : {
} }
}, },
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { "a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x1869ec3b06114f6f", "balance" : "0x1869ec3b06194f72",
"code" : "0x", "code" : "0x",
"nonce" : "0x01", "nonce" : "0x01",
"storage" : { "storage" : {
} }
} }
}, },
"postStateRoot" : "8745f6bdec4290420747b8c024382c6ed14e09f4a11718bdc1f0f99e4d04607b", "postStateRoot" : "1716bcf6c106040a46ab1d37c569b7e0841f921b4d252dc5202fb05388308a39",
"pre" : { "pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
"code" : "0x6401000000016000f3", "code" : "0x63ffffffff6000f3",
"nonce" : "0x00", "nonce" : "0x00",
"storage" : { "storage" : {
} }
@ -328,11 +328,11 @@
}, },
"logs" : [ "logs" : [
], ],
"out" : "0x", "out" : "#4294967295",
"post" : { "post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
"code" : "0x60016000526401000000006000f3", "code" : "0x600160005263ffffffff6000f3",
"nonce" : "0x00", "nonce" : "0x00",
"storage" : { "storage" : {
} }
@ -352,11 +352,11 @@
} }
} }
}, },
"postStateRoot" : "e6c6c5b997cf7ecbc653c920a5b42d1ddd9f9ca2df2c68fd47059df2a3309b14", "postStateRoot" : "b95676d3103fc4a6bdcfb1c201a6ed49e0fa7a239520a4b8ac034ce3b6c697eb",
"pre" : { "pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
"code" : "0x60016000526401000000006000f3", "code" : "0x600160005263ffffffff6000f3",
"nonce" : "0x00", "nonce" : "0x00",
"storage" : { "storage" : {
} }

File diff suppressed because it is too large Load Diff

View File

@ -268,42 +268,50 @@
}, },
"logs" : [ "logs" : [
], ],
"out" : "0x0000000000000000000000000000000000000000000000000000000000000000", "out" : "0x0000000000000000000000000000000000000000000000000000000000000001",
"post" : { "post" : {
"0000000000000000000000000000000000000000" : {
"balance" : "0x00",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
},
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : { "095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0186a0", "balance" : "0x0186a0",
"code" : "0x6000357c0100000000000000000000000000000000000000000000000000000000900480633e0bca3b1461003a578063c04062261461004c57005b610042610099565b8060005260206000f35b61005461005e565b8060005260206000f35b6000610068610099565b600060006101000a81548160ff02191690830217905550600060009054906101000a900460ff169050610096565b90565b60006000600060019250825060018273ffffffffffffffffffffffffffffffffffffffff166381bda09b60206000827c010000000000000000000000000000000000000000000000000000000002600052600460006000866161da5a03f16100fd57005b505060005163ffffffff1614156101135761011c565b60009250610194565b60028173ffffffffffffffffffffffffffffffffffffffff166381bda09b60206000827c010000000000000000000000000000000000000000000000000000000002600052600460006000866161da5a03f161017457005b505060005163ffffffff16141561018a57610193565b60009250610194565b5b50509056", "code" : "0x7c010000000000000000000000000000000000000000000000000000000060003504633e0bca3b8114610039578063c0406226146100a857005b6100b55b600160008060456101ec8339604560006000f091508173ffffffffffffffffffffffffffffffffffffffff166381bda09b60206000827c010000000000000000000000000000000000000000000000000000000002600052600460006000866161da5a03f161011957005b6100bf60006100c961003d565b8060005260206000f35b8060005260206000f35b600080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0016919091179081905560ff16919050565b505060005163ffffffff166002141561019d575b5b505090565b505060005163ffffffff1660011415610194575b60456101a7600039604560006000f090508073ffffffffffffffffffffffffffffffffffffffff166381bda09b60206000827c010000000000000000000000000000000000000000000000000000000002600052600460006000866161da5a03f16100ff57005b60009250610114565b600092506101145600603980600c6000396000f3007c0100000000000000000000000000000000000000000000000000000000600035046381bda09b8114602d57005b60026000818152602090f3603980600c6000396000f3007c0100000000000000000000000000000000000000000000000000000000600035046381bda09b8114602d57005b60016000818152602090f3",
"nonce" : "0x00", "nonce" : "0x02",
"storage" : { "storage" : {
"0x00" : "0x01"
} }
}, },
"2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : { "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba" : {
"balance" : "0xca69", "balance" : "0x01f758",
"code" : "0x", "code" : "0x",
"nonce" : "0x00", "nonce" : "0x00",
"storage" : { "storage" : {
} }
}, },
"a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { "a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : {
"balance" : "0x02fa2617", "balance" : "0x02f8f928",
"code" : "0x", "code" : "0x",
"nonce" : "0x01", "nonce" : "0x01",
"storage" : { "storage" : {
} }
},
"b88de88b35ecbf3c141e3caae2baf35834d18f63" : {
"balance" : "0x00",
"code" : "0x7c0100000000000000000000000000000000000000000000000000000000600035046381bda09b8114602d57005b60026000818152602090f3",
"nonce" : "0x00",
"storage" : {
} }
}, },
"postStateRoot" : "ed06b00015d227623175bd12a5d960281781493ea1d9fed79d6c40a20e2c6ef7", "d2571607e241ecf590ed94b12d87c94babe36db6" : {
"balance" : "0x00",
"code" : "0x7c0100000000000000000000000000000000000000000000000000000000600035046381bda09b8114602d57005b60016000818152602090f3",
"nonce" : "0x00",
"storage" : {
}
}
},
"postStateRoot" : "e7f84d674881d1cfd115be59e3e390271435c0b3474a482f7add54c3fe429d85",
"pre" : { "pre" : {
"095e7baea6a6c7c4c2dfeb977efac326af552d87" : { "095e7baea6a6c7c4c2dfeb977efac326af552d87" : {
"balance" : "0x0186a0", "balance" : "0x0186a0",
"code" : "0x6000357c0100000000000000000000000000000000000000000000000000000000900480633e0bca3b1461003a578063c04062261461004c57005b610042610099565b8060005260206000f35b61005461005e565b8060005260206000f35b6000610068610099565b600060006101000a81548160ff02191690830217905550600060009054906101000a900460ff169050610096565b90565b60006000600060019250825060018273ffffffffffffffffffffffffffffffffffffffff166381bda09b60206000827c010000000000000000000000000000000000000000000000000000000002600052600460006000866161da5a03f16100fd57005b505060005163ffffffff1614156101135761011c565b60009250610194565b60028173ffffffffffffffffffffffffffffffffffffffff166381bda09b60206000827c010000000000000000000000000000000000000000000000000000000002600052600460006000866161da5a03f161017457005b505060005163ffffffff16141561018a57610193565b60009250610194565b5b50509056", "code" : "0x7c010000000000000000000000000000000000000000000000000000000060003504633e0bca3b8114610039578063c0406226146100a857005b6100b55b600160008060456101ec8339604560006000f091508173ffffffffffffffffffffffffffffffffffffffff166381bda09b60206000827c010000000000000000000000000000000000000000000000000000000002600052600460006000866161da5a03f161011957005b6100bf60006100c961003d565b8060005260206000f35b8060005260206000f35b600080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0016919091179081905560ff16919050565b505060005163ffffffff166002141561019d575b5b505090565b505060005163ffffffff1660011415610194575b60456101a7600039604560006000f090508073ffffffffffffffffffffffffffffffffffffffff166381bda09b60206000827c010000000000000000000000000000000000000000000000000000000002600052600460006000866161da5a03f16100ff57005b60009250610114565b600092506101145600603980600c6000396000f3007c0100000000000000000000000000000000000000000000000000000000600035046381bda09b8114602d57005b60026000818152602090f3603980600c6000396000f3007c0100000000000000000000000000000000000000000000000000000000600035046381bda09b8114602d57005b60016000818152602090f3",
"nonce" : "0x00", "nonce" : "0x00",
"storage" : { "storage" : {
} }

View File

@ -356,6 +356,8 @@
} }
}, },
"addmod1_overflow3" : { "addmod1_overflow3" : {
"callcreates" : [
],
"env" : { "env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100", "currentDifficulty" : "0x0100",
@ -369,11 +371,25 @@
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681", "caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x60056001600160000308600055", "code" : "0x60056001600160000308600055",
"data" : "0x", "data" : "0x",
"gas" : "0x2710", "gas" : "0x0f4240",
"gasPrice" : "0x5af3107a4000", "gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681", "origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000" "value" : "0x0de0b6b3a7640000"
}, },
"gas" : "0x0ef406",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60056001600160000308600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x01"
}
}
},
"pre" : { "pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
@ -385,6 +401,8 @@
} }
}, },
"addmod1_overflow4" : { "addmod1_overflow4" : {
"callcreates" : [
],
"env" : { "env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100", "currentDifficulty" : "0x0100",
@ -398,11 +416,25 @@
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681", "caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x60056002600160000308600055", "code" : "0x60056002600160000308600055",
"data" : "0x", "data" : "0x",
"gas" : "0x2710", "gas" : "0x0f4240",
"gasPrice" : "0x5af3107a4000", "gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681", "origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000" "value" : "0x0de0b6b3a7640000"
}, },
"gas" : "0x0ef406",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60056002600160000308600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x02"
}
}
},
"pre" : { "pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
@ -414,6 +446,8 @@
} }
}, },
"addmod1_overflowDiff" : { "addmod1_overflowDiff" : {
"callcreates" : [
],
"env" : { "env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100", "currentDifficulty" : "0x0100",
@ -427,11 +461,25 @@
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681", "caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x60056002600003600160000308600055", "code" : "0x60056002600003600160000308600055",
"data" : "0x", "data" : "0x",
"gas" : "0x2710", "gas" : "0x0f4240",
"gasPrice" : "0x5af3107a4000", "gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681", "origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000" "value" : "0x0de0b6b3a7640000"
}, },
"gas" : "0x0ef400",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60056002600003600160000308600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x04"
}
}
},
"pre" : { "pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
@ -842,6 +890,51 @@
} }
} }
}, },
"addmodDivByZero3" : {
"callcreates" : [
],
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"exec" : {
"address" : "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x60016000600060000803600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x013866",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60016000600060000803600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
}
}
},
"pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60016000600060000803600055",
"nonce" : "0x00",
"storage" : {
}
}
}
},
"arith1" : { "arith1" : {
"callcreates" : [ "callcreates" : [
], ],
@ -1152,6 +1245,51 @@
} }
} }
}, },
"divByZero_2" : {
"callcreates" : [
],
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"exec" : {
"address" : "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x60076000600d0401600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x01386c",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60076000600d0401600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x07"
}
}
},
"pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60076000600d0401600055",
"nonce" : "0x00",
"storage" : {
}
}
}
},
"exp0" : { "exp0" : {
"callcreates" : [ "callcreates" : [
], ],
@ -5498,6 +5636,51 @@
} }
} }
}, },
"modByZero" : {
"callcreates" : [
],
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"exec" : {
"address" : "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x6001600060030603600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x01386c",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x6001600060030603600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
}
}
},
"pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x6001600060030603600055",
"nonce" : "0x00",
"storage" : {
}
}
}
},
"mul0" : { "mul0" : {
"callcreates" : [ "callcreates" : [
], ],
@ -6017,6 +6200,8 @@
} }
}, },
"mulmod1_overflow2" : { "mulmod1_overflow2" : {
"callcreates" : [
],
"env" : { "env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100", "currentDifficulty" : "0x0100",
@ -6030,11 +6215,25 @@
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681", "caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x600560027f800000000000000000000000000000000000000000000000000000000000000009600055", "code" : "0x600560027f800000000000000000000000000000000000000000000000000000000000000009600055",
"data" : "0x", "data" : "0x",
"gas" : "0x2710", "gas" : "0x0f4240",
"gasPrice" : "0x5af3107a4000", "gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681", "origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000" "value" : "0x0de0b6b3a7640000"
}, },
"gas" : "0x0ef40c",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x600560027f800000000000000000000000000000000000000000000000000000000000000009600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x01"
}
}
},
"pre" : { "pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
@ -6046,6 +6245,8 @@
} }
}, },
"mulmod1_overflow3" : { "mulmod1_overflow3" : {
"callcreates" : [
],
"env" : { "env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100", "currentDifficulty" : "0x0100",
@ -6059,11 +6260,25 @@
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681", "caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x600560027f7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff09600055", "code" : "0x600560027f7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff09600055",
"data" : "0x", "data" : "0x",
"gas" : "0x2710", "gas" : "0x0f4240",
"gasPrice" : "0x5af3107a4000", "gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681", "origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000" "value" : "0x0de0b6b3a7640000"
}, },
"gas" : "0x0ef40c",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x600560027f7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff09600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x04"
}
}
},
"pre" : { "pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
@ -6075,6 +6290,8 @@
} }
}, },
"mulmod1_overflow4" : { "mulmod1_overflow4" : {
"callcreates" : [
],
"env" : { "env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100", "currentDifficulty" : "0x0100",
@ -6088,11 +6305,25 @@
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681", "caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x600560027f800000000000000000000000000000000000000000000000000000000000000109600055", "code" : "0x600560027f800000000000000000000000000000000000000000000000000000000000000109600055",
"data" : "0x", "data" : "0x",
"gas" : "0x2710", "gas" : "0x0f4240",
"gasPrice" : "0x5af3107a4000", "gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681", "origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000" "value" : "0x0de0b6b3a7640000"
}, },
"gas" : "0x0ef40c",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x600560027f800000000000000000000000000000000000000000000000000000000000000109600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x03"
}
}
},
"pre" : { "pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
@ -6502,6 +6733,51 @@
} }
} }
}, },
"mulmoddivByZero3" : {
"callcreates" : [
],
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"exec" : {
"address" : "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x60006000600009600103600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x013866",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60006000600009600103600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x01"
}
}
},
"pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60006000600009600103600055",
"nonce" : "0x00",
"storage" : {
}
}
}
},
"not1" : { "not1" : {
"callcreates" : [ "callcreates" : [
], ],
@ -7081,6 +7357,51 @@
} }
} }
}, },
"sdivByZero2" : {
"callcreates" : [
],
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"exec" : {
"address" : "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x600160007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffcf923bdff6000030501600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x013866",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x600160007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffcf923bdff6000030501600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x01"
}
}
},
"pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x600160007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffcf923bdff6000030501600055",
"nonce" : "0x00",
"storage" : {
}
}
}
},
"sdiv_dejavu" : { "sdiv_dejavu" : {
"callcreates" : [ "callcreates" : [
], ],
@ -8109,6 +8430,8 @@
} }
}, },
"smod6" : { "smod6" : {
"callcreates" : [
],
"env" : { "env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba", "currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100", "currentDifficulty" : "0x0100",
@ -8122,11 +8445,25 @@
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681", "caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x7f7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60000307600055", "code" : "0x7f7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60000307600055",
"data" : "0x", "data" : "0x",
"gas" : "0x2710", "gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000", "gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681", "origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000" "value" : "0x0de0b6b3a7640000"
}, },
"gas" : "0x01386c",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x7f7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60000307600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x01"
}
}
},
"pre" : { "pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : { "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000", "balance" : "0x0de0b6b3a7640000",
@ -8181,6 +8518,140 @@
} }
} }
}, },
"smod8_byZero" : {
"callcreates" : [
],
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"exec" : {
"address" : "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x600d600060c86000030703600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x013866",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x600d600060c86000030703600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff3"
}
}
},
"pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x600d600060c86000030703600055",
"nonce" : "0x00",
"storage" : {
}
}
}
},
"smod_i256min1" : {
"callcreates" : [
],
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"exec" : {
"address" : "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x60016000037f800000000000000000000000000000000000000000000000000000000000000060000307600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x0172fe",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60016000037f800000000000000000000000000000000000000000000000000000000000000060000307600055",
"nonce" : "0x00",
"storage" : {
}
}
},
"pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x60016000037f800000000000000000000000000000000000000000000000000000000000000060000307600055",
"nonce" : "0x00",
"storage" : {
}
}
}
},
"smod_i256min2" : {
"callcreates" : [
],
"env" : {
"currentCoinbase" : "2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01",
"previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6"
},
"exec" : {
"address" : "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"code" : "0x600160016000037f80000000000000000000000000000000000000000000000000000000000000006000030703600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "cd1722f2947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x013860",
"logs" : [
],
"out" : "0x",
"post" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x600160016000037f80000000000000000000000000000000000000000000000000000000000000006000030703600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
}
}
},
"pre" : {
"0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x0de0b6b3a7640000",
"code" : "0x600160016000037f80000000000000000000000000000000000000000000000000000000000000006000030703600055",
"nonce" : "0x00",
"storage" : {
}
}
}
},
"stop" : { "stop" : {
"callcreates" : [ "callcreates" : [
], ],

View File

@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"io/ioutil" "io/ioutil"
"math/big" "math/big"
"os"
"path/filepath" "path/filepath"
"strconv" "strconv"
"testing" "testing"
@ -87,7 +88,7 @@ func RunVmTest(p string, t *testing.T) {
vm.Debug = true vm.Debug = true
glog.SetV(4) glog.SetV(4)
glog.SetToStderr(true) glog.SetToStderr(true)
if name != "stackLimitPush32_1024" { if name != "Call50000_sha256" {
continue continue
} }
*/ */
@ -128,10 +129,16 @@ func RunVmTest(p string, t *testing.T) {
ret, logs, gas, err = helper.RunState(statedb, env, test.Transaction) ret, logs, gas, err = helper.RunState(statedb, env, test.Transaction)
} }
switch name {
// the memory required for these tests (4294967297 bytes) would take too much time.
// on 19 May 2015 decided to skip these tests their output.
case "mload32bitBound_return", "mload32bitBound_return2":
default:
rexp := helper.FromHex(test.Out) rexp := helper.FromHex(test.Out)
if bytes.Compare(rexp, ret) != 0 { if bytes.Compare(rexp, ret) != 0 {
t.Errorf("%s's return failed. Expected %x, got %x\n", name, rexp, ret) t.Errorf("%s's return failed. Expected %x, got %x\n", name, rexp, ret)
} }
}
if isVmTest { if isVmTest {
if len(test.Gas) == 0 && err == nil { if len(test.Gas) == 0 && err == nil {
@ -246,8 +253,7 @@ func TestLogTest(t *testing.T) {
} }
func TestPerformance(t *testing.T) { func TestPerformance(t *testing.T) {
t.Skip() const fn = "../files/VMTests/vmPerformanceTest.json"
const fn = "../files/VMTests/vmPerformance.json"
RunVmTest(fn, t) RunVmTest(fn, t)
} }
@ -281,13 +287,13 @@ func TestInputLimitsLight(t *testing.T) {
RunVmTest(fn, t) RunVmTest(fn, t)
} }
func TestStateExample(t *testing.T) { func TestStateSystemOperations(t *testing.T) {
const fn = "../files/StateTests/stExample.json" const fn = "../files/StateTests/stSystemOperationsTest.json"
RunVmTest(fn, t) RunVmTest(fn, t)
} }
func TestStateSystemOperations(t *testing.T) { func TestStateExample(t *testing.T) {
const fn = "../files/StateTests/stSystemOperationsTest.json" const fn = "../files/StateTests/stExample.json"
RunVmTest(fn, t) RunVmTest(fn, t)
} }
@ -342,13 +348,17 @@ func TestMemory(t *testing.T) {
} }
func TestMemoryStress(t *testing.T) { func TestMemoryStress(t *testing.T) {
t.Skip("Skipped due to...consuming too much memory :D") if os.Getenv("TEST_VM_COMPLEX") == "" {
t.Skip()
}
const fn = "../files/StateTests/stMemoryStressTest.json" const fn = "../files/StateTests/stMemoryStressTest.json"
RunVmTest(fn, t) RunVmTest(fn, t)
} }
func TestQuadraticComplexity(t *testing.T) { func TestQuadraticComplexity(t *testing.T) {
t.Skip() // takes too long if os.Getenv("TEST_VM_COMPLEX") == "" {
t.Skip()
}
const fn = "../files/StateTests/stQuadraticComplexityTest.json" const fn = "../files/StateTests/stQuadraticComplexityTest.json"
RunVmTest(fn, t) RunVmTest(fn, t)
} }

View File

@ -28,6 +28,7 @@ var (
filterTickerTime = 5 * time.Minute filterTickerTime = 5 * time.Minute
defaultGasPrice = big.NewInt(10000000000000) //150000000000 defaultGasPrice = big.NewInt(10000000000000) //150000000000
defaultGas = big.NewInt(90000) //500000 defaultGas = big.NewInt(90000) //500000
dappStorePre = []byte("dapp-")
) )
// byte will be inferred // byte will be inferred
@ -66,12 +67,16 @@ type XEth struct {
// regmut sync.Mutex // regmut sync.Mutex
// register map[string][]*interface{} // TODO improve return type // register map[string][]*interface{} // TODO improve return type
solcPath string
solc *compiler.Solidity
agent *miner.RemoteAgent agent *miner.RemoteAgent
} }
func NewTest(eth *eth.Ethereum, frontend Frontend) *XEth {
return &XEth{
backend: eth,
frontend: frontend,
}
}
// New creates an XEth that uses the given frontend. // New creates an XEth that uses the given frontend.
// If a nil Frontend is provided, a default frontend which // If a nil Frontend is provided, a default frontend which
// confirms all transactions will be used. // confirms all transactions will be used.
@ -304,6 +309,8 @@ func (self *XEth) EthBlockByHash(strHash string) *types.Block {
} }
func (self *XEth) EthTransactionByHash(hash string) (tx *types.Transaction, blhash common.Hash, blnum *big.Int, txi uint64) { func (self *XEth) EthTransactionByHash(hash string) (tx *types.Transaction, blhash common.Hash, blnum *big.Int, txi uint64) {
// Due to increasing return params and need to determine if this is from transaction pool or
// some chain, this probably needs to be refactored for more expressiveness
data, _ := self.backend.ExtraDb().Get(common.FromHex(hash)) data, _ := self.backend.ExtraDb().Get(common.FromHex(hash))
if len(data) != 0 { if len(data) != 0 {
tx = types.NewTransactionFromBytes(data) tx = types.NewTransactionFromBytes(data)
@ -348,6 +355,24 @@ func (self *XEth) CurrentBlock() *types.Block {
return self.backend.ChainManager().CurrentBlock() return self.backend.ChainManager().CurrentBlock()
} }
func (self *XEth) GetBlockReceipts(bhash common.Hash) (receipts types.Receipts, err error) {
return self.backend.BlockProcessor().GetBlockReceipts(bhash)
}
func (self *XEth) GetTxReceipt(txhash common.Hash) (receipt *types.Receipt, err error) {
_, bhash, _, txi := self.EthTransactionByHash(common.ToHex(txhash[:]))
var receipts types.Receipts
receipts, err = self.backend.BlockProcessor().GetBlockReceipts(bhash)
if err == nil {
if txi < uint64(len(receipts)) {
receipt = receipts[txi]
} else {
err = fmt.Errorf("Invalid tx index")
}
}
return
}
func (self *XEth) GasLimit() *big.Int { func (self *XEth) GasLimit() *big.Int {
return self.backend.ChainManager().GasLimit() return self.backend.ChainManager().GasLimit()
} }
@ -357,7 +382,7 @@ func (self *XEth) Block(v interface{}) *Block {
return self.BlockByNumber(int64(n)) return self.BlockByNumber(int64(n))
} else if str, ok := v.(string); ok { } else if str, ok := v.(string); ok {
return self.BlockByHash(str) return self.BlockByHash(str)
} else if f, ok := v.(float64); ok { // Don't ask ... } else if f, ok := v.(float64); ok { // JSON numbers are represented as float64
return self.BlockByNumber(int64(f)) return self.BlockByNumber(int64(f))
} }
@ -377,27 +402,24 @@ func (self *XEth) Accounts() []string {
// accessor for solidity compiler. // accessor for solidity compiler.
// memoized if available, retried on-demand if not // memoized if available, retried on-demand if not
func (self *XEth) Solc() (*compiler.Solidity, error) { func (self *XEth) Solc() (*compiler.Solidity, error) {
var err error return self.backend.Solc()
if self.solc == nil {
self.solc, err = compiler.New(self.solcPath)
}
return self.solc, err
} }
// set in js console via admin interface or wrapper from cli flags // set in js console via admin interface or wrapper from cli flags
func (self *XEth) SetSolc(solcPath string) (*compiler.Solidity, error) { func (self *XEth) SetSolc(solcPath string) (*compiler.Solidity, error) {
self.solcPath = solcPath self.backend.SetSolc(solcPath)
self.solc = nil
return self.Solc() return self.Solc()
} }
// store DApp value in extra database
func (self *XEth) DbPut(key, val []byte) bool { func (self *XEth) DbPut(key, val []byte) bool {
self.backend.ExtraDb().Put(key, val) self.backend.ExtraDb().Put(append(dappStorePre, key...), val)
return true return true
} }
// retrieve DApp value from extra database
func (self *XEth) DbGet(key []byte) ([]byte, error) { func (self *XEth) DbGet(key []byte) ([]byte, error) {
val, err := self.backend.ExtraDb().Get(key) val, err := self.backend.ExtraDb().Get(append(dappStorePre, key...))
return val, err return val, err
} }
@ -778,7 +800,7 @@ func (self *XEth) PushTx(encodedTx string) (string, error) {
} }
func (self *XEth) Call(fromStr, toStr, valueStr, gasStr, gasPriceStr, dataStr string) (string, string, error) { func (self *XEth) Call(fromStr, toStr, valueStr, gasStr, gasPriceStr, dataStr string) (string, string, error) {
statedb := self.State().State().Copy() //self.eth.ChainManager().TransState() statedb := self.State().State().Copy()
var from *state.StateObject var from *state.StateObject
if len(fromStr) == 0 { if len(fromStr) == 0 {
accounts, err := self.backend.AccountManager().Accounts() accounts, err := self.backend.AccountManager().Accounts()
@ -862,13 +884,14 @@ func (self *XEth) Transact(fromStr, toStr, nonceStr, valueStr, gasStr, gasPriceS
var ( var (
from = common.HexToAddress(fromStr) from = common.HexToAddress(fromStr)
to = common.HexToAddress(toStr) to = common.HexToAddress(toStr)
value = common.NewValue(valueStr) value = common.Big(valueStr)
gas = common.Big(gasStr) gas = common.Big(gasStr)
price = common.Big(gasPriceStr) price = common.Big(gasPriceStr)
data []byte data []byte
contractCreation bool contractCreation bool
) )
// 2015-05-18 Is this still needed?
// TODO if no_private_key then // TODO if no_private_key then
//if _, exists := p.register[args.From]; exists { //if _, exists := p.register[args.From]; exists {
// p.register[args.From] = append(p.register[args.From], args) // p.register[args.From] = append(p.register[args.From], args)
@ -908,9 +931,9 @@ func (self *XEth) Transact(fromStr, toStr, nonceStr, valueStr, gasStr, gasPriceS
var tx *types.Transaction var tx *types.Transaction
if contractCreation { if contractCreation {
tx = types.NewContractCreationTx(value.BigInt(), gas, price, data) tx = types.NewContractCreationTx(value, gas, price, data)
} else { } else {
tx = types.NewTransactionMessage(to, value.BigInt(), gas, price, data) tx = types.NewTransactionMessage(to, value, gas, price, data)
} }
state := self.backend.ChainManager().TxState() state := self.backend.ChainManager().TxState()
@ -924,9 +947,11 @@ func (self *XEth) Transact(fromStr, toStr, nonceStr, valueStr, gasStr, gasPriceS
tx.SetNonce(nonce) tx.SetNonce(nonce)
if err := self.sign(tx, from, false); err != nil { if err := self.sign(tx, from, false); err != nil {
state.RemoveNonce(from, tx.Nonce())
return "", err return "", err
} }
if err := self.backend.TxPool().Add(tx); err != nil { if err := self.backend.TxPool().Add(tx); err != nil {
state.RemoveNonce(from, tx.Nonce())
return "", err return "", err
} }