Compare commits
1767 Commits
v1.9.7
...
verkle/fix
Author | SHA1 | Date | |
---|---|---|---|
9c67d521ac | |||
15b353d7b4 | |||
5beac51808 | |||
99604b0699 | |||
952be80177 | |||
d761880fd2 | |||
4428439fdf | |||
99f3c92361 | |||
c87a6d904f | |||
e16e9cc84b | |||
f215cc0791 | |||
99ebf767b9 | |||
6af78cba9e | |||
fe75603d0b | |||
5bac5b3262 | |||
fa753db9e8 | |||
86bdc3fb39 | |||
909049c5fe | |||
7360d168c8 | |||
361a328cb7 | |||
41c2f754cc | |||
7cb1add36a | |||
03dbc0a210 | |||
6d40e11fe3 | |||
5ca990184f | |||
15d98607f3 | |||
ef08e51e40 | |||
e1144745a7 | |||
bc06d2c740 | |||
97a79f50e8 | |||
9f9c03a94c | |||
719bf47354 | |||
162780515a | |||
c10a0a62c3 | |||
3038e480f5 | |||
519cf98b69 | |||
4ebeca19d7 | |||
1876cb443b | |||
9055cc14ec | |||
ad7c90c198 | |||
10b1cd9b1b | |||
66ee9422f5 | |||
8151dd67e1 | |||
7a0c19f813 | |||
0a7672fc9a | |||
7322b2590c | |||
743769f48e | |||
d15e423562 | |||
347c37b362 | |||
50e07a1e16 | |||
23f69c6db0 | |||
17f1c2dc0f | |||
d9c13d407f | |||
441c7f2b0f | |||
5d4bcbc14f | |||
6f2c3f2114 | |||
e0761432a4 | |||
e761255ba7 | |||
c52def7f11 | |||
ab31fbbde1 | |||
16341e0563 | |||
fa96718512 | |||
33f2813809 | |||
b7a6409cc1 | |||
05acc272b5 | |||
b0b708bf23 | |||
abc74a5ffe | |||
e9294a7fe9 | |||
5358e491f3 | |||
c57df9ca28 | |||
f32feeb260 | |||
e185a8c818 | |||
fb7da82dde | |||
0efed7f58b | |||
6b9c77f060 | |||
9489853321 | |||
ad11691daf | |||
6c4dc6c388 | |||
787a3b185c | |||
851256e856 | |||
c4fff0f56e | |||
aa2727f82c | |||
e61b8cb1f8 | |||
e1c000b0dd | |||
8be8ba450e | |||
476fb565ce | |||
8d7e6062ec | |||
3bbeb94c1c | |||
53b94f135a | |||
03bc8b7858 | |||
f49e90e32c | |||
178debe435 | |||
2e8b58f076 | |||
551bd6e721 | |||
c576fa153a | |||
c2e64db3b1 | |||
1e4becb5c1 | |||
ff844918e8 | |||
c113520d5d | |||
57c252ef4e | |||
410e731bea | |||
31870a59ff | |||
32150f8aa9 | |||
bff330335b | |||
52c02ccb1f | |||
eab4d898fd | |||
526c3f6b9e | |||
53f81574e3 | |||
c72b16c340 | |||
48dc34b8d9 | |||
0e7efd696b | |||
2954f40eac | |||
b6fb18479c | |||
3ce9f6d96f | |||
114ed3edcd | |||
7231b3efb8 | |||
da1b6f3906 | |||
f423290ac8 | |||
312e02bca9 | |||
0183256e7f | |||
84d8eb2ca8 | |||
554b1b9d5f | |||
b97f57882c | |||
60d3cc8b77 | |||
c36f8fefc3 | |||
433f0919cc | |||
b8dc1e2705 | |||
eaa24a8a15 | |||
c641cff51a | |||
464885faaa | |||
bb74230f2a | |||
f915f6873f | |||
08e782c61f | |||
011fe3eb5e | |||
79b727bc8a | |||
778ff94794 | |||
e4f570fcc6 | |||
f9d683b07f | |||
633e7ef478 | |||
3d11a22c99 | |||
6289137827 | |||
da3da7c0e7 | |||
cf8a6d6173 | |||
088bc34194 | |||
53b1420ede | |||
8b6e018401 | |||
64da037e99 | |||
8a430fbd1c | |||
bcbd700367 | |||
84bccd0900 | |||
a6a0609b05 | |||
1bea4b0dfa | |||
ee120ef865 | |||
28d30b51f8 | |||
2fe0c65f4b | |||
ec2b43c2c3 | |||
48496e0675 | |||
edb1937cf7 | |||
4e599ee469 | |||
57ff2dee06 | |||
307156cc46 | |||
0dbb3b1601 | |||
5a0e1d88f4 | |||
12f971fb2d | |||
01fdca53e1 | |||
5240725041 | |||
b522f5e091 | |||
a47b8cf6f5 | |||
07a5bc1b0b | |||
f2491c5ed7 | |||
06082fe267 | |||
eae3b1946a | |||
3a6fe69f23 | |||
42bc1944a1 | |||
a541fbea18 | |||
3531ca2246 | |||
92c5d104d0 | |||
783e97ef1f | |||
ab2caaee11 | |||
443afc975c | |||
ac7baeab57 | |||
12674d493e | |||
51ececb64e | |||
57a3fab8a7 | |||
ca9bce9a45 | |||
b61ef24cce | |||
d8211c7ec7 | |||
b1a5e4afdd | |||
5b246af54e | |||
86f3625455 | |||
9bf495bfc9 | |||
e28f713ada | |||
62e3b83af6 | |||
1b34283810 | |||
401354976b | |||
7ada89d4e6 | |||
84ff152de5 | |||
b8d7c662cd | |||
babe9b993e | |||
578bc8164d | |||
9ada4a2e2c | |||
9e17648d8c | |||
90987db733 | |||
5c1fc3bf54 | |||
51ed39c093 | |||
6ef3a16869 | |||
794c6133ef | |||
9a0df80bbc | |||
ca5bc676d1 | |||
7957530225 | |||
de2c44ab5c | |||
4d88974864 | |||
067084feda | |||
d019e90162 | |||
31be5d41d9 | |||
f85cf722ff | |||
3258211f68 | |||
ffae2043f0 | |||
62ad17fb00 | |||
108eec3fee | |||
d584e39862 | |||
7c4cad064c | |||
154b525ce8 | |||
32c576bd3c | |||
8a134014b4 | |||
887902ea4d | |||
d16214228f | |||
3784e15106 | |||
84c51bc5ec | |||
efee85378e | |||
45f34430fd | |||
83ad92c421 | |||
fe2f153b55 | |||
a5a5237178 | |||
a789dcc978 | |||
b69f5ca7d4 | |||
0db0b27754 | |||
d705f5a554 | |||
5cee33eb72 | |||
85126c4eb9 | |||
a0a4a153e9 | |||
0b40977480 | |||
5c66bab3b8 | |||
8e0771c218 | |||
8dbf261fd9 | |||
79bb9300c1 | |||
ea4bc2dbff | |||
26675454bf | |||
1d99573192 | |||
f38abc55f1 | |||
dfeb2f7e80 | |||
bb1f7ebf20 | |||
d02c605367 | |||
c368f728c1 | |||
5566e5d152 | |||
57feabea66 | |||
16ecdd5839 | |||
85b9bdd641 | |||
6902485767 | |||
fb4007bb22 | |||
0a68558e7e | |||
fd604becbb | |||
5f98020a21 | |||
a580f7d6c5 | |||
12f0ff40b1 | |||
971df49fe2 | |||
f4ad493870 | |||
2a451f9eb3 | |||
278ec7176a | |||
deff5056fb | |||
c27bd3481e | |||
0fbc94eabc | |||
9097d0a325 | |||
5d0ab07343 | |||
9d6480c3cd | |||
a879c42bd3 | |||
39fe7eca6b | |||
66948316f7 | |||
57d9e0ac75 | |||
e4b687cf46 | |||
6d175460df | |||
520f25688a | |||
3b38a83274 | |||
97bd6cd216 | |||
d60cfd2604 | |||
9e59474e46 | |||
8a24b56331 | |||
0658712f65 | |||
d3e3a460ec | |||
28ba686cbf | |||
f311488d2c | |||
c38fab912b | |||
4cd6a1458e | |||
82c5085399 | |||
95bbd46eab | |||
85afdeef37 | |||
860184d542 | |||
3526f69047 | |||
295bc35ecf | |||
8f11d279d2 | |||
b157bae2c9 | |||
64a5e125c5 | |||
fb8ea5993f | |||
5c13012b56 | |||
523866c2cc | |||
56e9001a1a | |||
0730acc5a0 | |||
2faf796d2a | |||
3aea432b35 | |||
b20bc5c0ca | |||
5c89ec9b98 | |||
bbfa6488ac | |||
a1f16bc74c | |||
576681f29b | |||
370680a7a9 | |||
97aacd9b35 | |||
f05419f0fb | |||
a5e3aa693c | |||
89fde59a80 | |||
f0b1bddac4 | |||
33ca98ece9 | |||
1fac96c1f9 | |||
b9e6e43722 | |||
c49e065fea | |||
846badc480 | |||
8fe47b0a0d | |||
58b0420a8a | |||
afd4227df8 | |||
9624f92ede | |||
dea71556cc | |||
ff4ff30a68 | |||
00b922fc5d | |||
7522642393 | |||
b9d4412715 | |||
5441a8fa47 | |||
e13d14e6a3 | |||
d21a069619 | |||
13bc9c0c6e | |||
5afc82de6e | |||
bd566977e8 | |||
99169016d2 | |||
78c34fdc3c | |||
d081c935d7 | |||
bb0191f22b | |||
c619562313 | |||
6b6d3190cf | |||
3b05318525 | |||
a182c76815 | |||
6ed812db13 | |||
3212fb6838 | |||
f5f906dd0d | |||
bbbeb7d8ba | |||
c131e812ae | |||
686b2884ee | |||
e7c8693635 | |||
ec88bd0cd0 | |||
acdf9238fb | |||
d3f018fde8 | |||
4fcc93d922 | |||
61f4b5aa89 | |||
35dbf7a8a3 | |||
1b5582acf7 | |||
dde6f1e92d | |||
2d4eff21ca | |||
bca8c03e57 | |||
c07918e7d8 | |||
0e6961366a | |||
948a600ed5 | |||
9e23610b0f | |||
29905d86ae | |||
10eb654f27 | |||
4dde0665c8 | |||
a750bf8686 | |||
bef78efb49 | |||
ddf10250c7 | |||
fcd7bdc2b7 | |||
1e44c3585f | |||
5228b2a353 | |||
e0123026b6 | |||
653a30f4ca | |||
0f2347d070 | |||
da000c8314 | |||
f915a4bf20 | |||
732a6a3666 | |||
7b6c8363da | |||
4695117f2e | |||
e9f99d1c91 | |||
ef946a6c87 | |||
58aeab77d2 | |||
97ce6dfa6d | |||
bbb2b30506 | |||
15fe3050a1 | |||
c63c2d855e | |||
87a11a87c2 | |||
aa637fd38a | |||
e1f244a6e6 | |||
40a11d644c | |||
b28f8c0c43 | |||
90ffcfde89 | |||
a675c89c75 | |||
080b6ebe91 | |||
ae315ef7a1 | |||
aa69d36152 | |||
0aadb49c86 | |||
cdb9fefc48 | |||
7a7abe3de8 | |||
087ed9c92e | |||
7530803065 | |||
8a4460c47e | |||
1d57f22d58 | |||
ccf53daee1 | |||
eff998effb | |||
a2ea537a6f | |||
1fc0eba50d | |||
be1267ced5 | |||
f68a68a313 | |||
7a00378e2b | |||
c503f98f6d | |||
f763846e6e | |||
248572ee54 | |||
ddeeb89c03 | |||
0e9c7d564d | |||
08379b5533 | |||
92b8f28df3 | |||
71ff65b188 | |||
7e915ee379 | |||
3094e7f3b8 | |||
216ed05c6e | |||
7760a60794 | |||
5cff9754d7 | |||
2dee31930c | |||
2cde472650 | |||
9aaa4208a8 | |||
08ea52e77a | |||
2d716c4b01 | |||
966ee3ae6d | |||
ee35ddc8fd | |||
04cb5e2be3 | |||
427175153c | |||
0703ef62d3 | |||
d836ad141e | |||
7194c847b6 | |||
2e7714f864 | |||
5869789d75 | |||
c73652da0b | |||
05dab7f6bd | |||
10962b685e | |||
49bde05a55 | |||
6c7d6cf886 | |||
750115ff39 | |||
51b32cc7e4 | |||
836c647bdd | |||
4d33de9b49 | |||
017cf71fbd | |||
93407b14a6 | |||
154ca32a8a | |||
0d076d92db | |||
59f259b058 | |||
6bc72783f6 | |||
835fe06f1d | |||
81662fe827 | |||
a6c462781f | |||
16bc57438b | |||
3e795881ea | |||
b3a1fda650 | |||
088da24ebf | |||
3e6f46caec | |||
32c1ed8a9c | |||
b7a91663ab | |||
bb9f9ccf4f | |||
67e7f61af7 | |||
94451c2788 | |||
14bc6e5130 | |||
597ecb39cc | |||
addd8824cf | |||
1cca781a02 | |||
a2c456a526 | |||
f34f749e81 | |||
0524cede37 | |||
ca98080798 | |||
643fd0efc6 | |||
e536bb52ff | |||
c0e201b690 | |||
ae5fcdc67f | |||
f19a679b09 | |||
7ab7acfded | |||
700df1442d | |||
17b1be2661 | |||
8a070e8f7d | |||
a5669ae292 | |||
e77ef8fa8a | |||
e69130d9f1 | |||
cc606be74c | |||
df20b3b982 | |||
37b5595456 | |||
991384a7f6 | |||
0f3a1e7f9b | |||
41671d449f | |||
3a2b29c1ed | |||
973ad66b49 | |||
d107f90d1c | |||
effaf18523 | |||
b8040a430e | |||
640d2c5e30 | |||
856c379626 | |||
fc1c1cbea9 | |||
8f94fc26e3 | |||
afb097eda8 | |||
ca9c576e62 | |||
0e00ee42ec | |||
8ff98108e5 | |||
afc1abd878 | |||
52b5d2d869 | |||
8681a2536c | |||
745757ac6b | |||
bbb57fd64b | |||
f66f1a16b3 | |||
ff75b21f25 | |||
b778e37daa | |||
dde6cb0b92 | |||
1e57ab5de6 | |||
8130dd5cef | |||
bb43cd7a79 | |||
b50b17ac69 | |||
63bad18c33 | |||
56f533d00c | |||
793c8f889f | |||
c7d07294a6 | |||
871f50b911 | |||
06f44c0fd4 | |||
64b60c7995 | |||
fae165a5de | |||
a81cf0d2b3 | |||
abb6cfae6a | |||
e4270cacf4 | |||
558bff4008 | |||
6d7c9566df | |||
9e5bb84c0e | |||
256c5d68b2 | |||
0c99868416 | |||
d9c9ee5ac9 | |||
ff3535e8e0 | |||
55043eec45 | |||
45fca44c24 | |||
caea6c4661 | |||
65a1c2d829 | |||
a0a99e610d | |||
ad983b300b | |||
85a0bab6d7 | |||
a3f0da1ac4 | |||
854f068ed6 | |||
9b99e3dfe0 | |||
83375b0873 | |||
34f3c9539b | |||
cac1b21d39 | |||
49281ab84f | |||
ea54c58d4f | |||
1fb9a6dd32 | |||
9357280fce | |||
67da83aca5 | |||
4b783c0064 | |||
3e68d627b1 | |||
96828c90f5 | |||
dd9c3225cf | |||
cc33398cef | |||
beee6b77a0 | |||
581539c6ee | |||
d7bfb978ba | |||
d6ffa14035 | |||
653b7e959d | |||
424656519a | |||
e43ac53264 | |||
f79cce5de9 | |||
09d44e9925 | |||
4f3ba6742f | |||
65689e7fce | |||
f8afb681dd | |||
fda93f643e | |||
3cfd0fe7a8 | |||
9553c98de8 | |||
1e207342b5 | |||
d8ff53dfb8 | |||
d5e57948d1 | |||
7088f1e814 | |||
a50251e6cb | |||
72e37942f3 | |||
271e5b7fc9 | |||
6c27d8f996 | |||
9c653ff662 | |||
fe1586b094 | |||
04dcc9378d | |||
97d11b0187 | |||
a665f5d2d6 | |||
e3ff37c47a | |||
264fff7dd4 | |||
2a7c4b62a9 | |||
9d10856e84 | |||
a600dab7e5 | |||
2d89fe0883 | |||
e275b1a293 | |||
95219ae62d | |||
5338ce4447 | |||
adf09aeab1 | |||
706683ea72 | |||
c79fc209cd | |||
73ed68988f | |||
55300d4fdb | |||
4a37ae510e | |||
61ff3e86b2 | |||
3faae5defc | |||
44fe466999 | |||
59ac3c9fd3 | |||
43a3768066 | |||
24588bacfd | |||
b6912c1047 | |||
7644795950 | |||
76700ac892 | |||
27056f62e5 | |||
e7f3962612 | |||
099be04100 | |||
063f78c6ba | |||
62379f02c6 | |||
cae6b5527e | |||
955727181b | |||
6477ea6502 | |||
2550e46269 | |||
efe0229768 | |||
6d7ff6acea | |||
54c0d573d7 | |||
4adf0b9187 | |||
bed74b38d9 | |||
497448bf90 | |||
0fda25e471 | |||
c5df05b9a9 | |||
ab8fd4d005 | |||
15e6c27f8b | |||
5129cdc4f0 | |||
e862cbff95 | |||
477ec75323 | |||
f304290b7d | |||
a31f6d54df | |||
8d6cc16742 | |||
0c70b83e00 | |||
aab35600bc | |||
eaccdba4ab | |||
ec73ec092d | |||
36b51b8156 | |||
5bf6612a2e | |||
9429ab1472 | |||
d3040a80d7 | |||
c454717fa6 | |||
5bc0343ed3 | |||
e3a3f7cd64 | |||
d50e9d24be | |||
345890a558 | |||
a90861ae0c | |||
38ea7f2cf4 | |||
aa8b2189c6 | |||
6a528fce33 | |||
117fa7d4a1 | |||
410089afea | |||
91726e8aad | |||
6d9707a458 | |||
62d8022b51 | |||
94ab4ea341 | |||
7cbf1d70a7 | |||
7076e8e42f | |||
99830720f6 | |||
bc47993692 | |||
faacc8e0fa | |||
c6d45009f1 | |||
3f74c8e0e5 | |||
6387c520b7 | |||
aae7660410 | |||
22082f9e56 | |||
3d299b7468 | |||
be87f769f6 | |||
182670849e | |||
c2d2f4ed8f | |||
5aba946164 | |||
72b8cacf13 | |||
44c0bb2b44 | |||
658cb9fc4f | |||
dab90e4d42 | |||
5b95453ef2 | |||
89026d5d70 | |||
de61da99c4 | |||
9230ca4924 | |||
63385374ec | |||
56dec25ae2 | |||
cd316d7c71 | |||
5a81dd97d5 | |||
b24804d88c | |||
ba999105ef | |||
07e907c7d4 | |||
c539a052bd | |||
0540d3c6f6 | |||
430f69e01e | |||
7834e4a278 | |||
19d7a37abb | |||
d96870428f | |||
498458b410 | |||
3822b09904 | |||
744707a490 | |||
27b31371d4 | |||
0928562670 | |||
dc109cce26 | |||
de9465f991 | |||
bbfb1e4008 | |||
7a3c890009 | |||
378e961d85 | |||
96d9306413 | |||
b2b5c82aca | |||
8e547eecd5 | |||
f54dc4ab3d | |||
bf5b379b13 | |||
70afe15f68 | |||
b502c86662 | |||
c9aa267049 | |||
cdb6a84339 | |||
4ee8d2d305 | |||
2743fb0429 | |||
2d1a0e9b03 | |||
142fbcfd6f | |||
c4a2b682ff | |||
3ecfdccd9a | |||
8f03e3b107 | |||
8647233a8e | |||
c5023e1dc5 | |||
ca76db6116 | |||
c027507e03 | |||
d36276d85e | |||
6ec1561044 | |||
b1835b3855 | |||
9ec32a9e7b | |||
52e5c38aa5 | |||
e01096f531 | |||
1489c3f494 | |||
f9445e93bb | |||
bfdff4c5b8 | |||
6291fc9230 | |||
9ec3329899 | |||
915c614959 | |||
f4fcd4f506 | |||
e991bdae24 | |||
77787802fe | |||
08c878acd2 | |||
7d1b711c7d | |||
2fc465a7be | |||
ef227c5f42 | |||
111abdcfbd | |||
1bbc8a1944 | |||
409b16e5ab | |||
cb3c7e4319 | |||
27786671d2 | |||
2fdba3aacb | |||
74dbc20260 | |||
944d901436 | |||
2728672c28 | |||
123e934e72 | |||
f566dd305e | |||
d86906f1e6 | |||
bbe694fc52 | |||
477fd420b3 | |||
994cdc69c8 | |||
e74bd587f7 | |||
7ed860d4f1 | |||
fba5a63afe | |||
098a2b6e26 | |||
28121324ac | |||
54735a6723 | |||
3512b41c5c | |||
83e4c49e2b | |||
ef84da8481 | |||
4eae0c6b6f | |||
e3430ac7df | |||
3c728fb129 | |||
f25b437b70 | |||
7a800f98f6 | |||
eb21c652c0 | |||
2e5d141708 | |||
a72fa88a0d | |||
9c5729311e | |||
ad038b6289 | |||
681618275c | |||
14d495491d | |||
573f373d2b | |||
7202b410b0 | |||
d2779ed7ac | |||
adf130def8 | |||
49cdcf5c70 | |||
04a72260c5 | |||
59a79137b9 | |||
c0862f4f4c | |||
1770fe718a | |||
797b0812ab | |||
3708454f58 | |||
db35d77b63 | |||
f26c19cbcd | |||
9e1bd0f367 | |||
231040c633 | |||
c4307a9339 | |||
ddadc3d273 | |||
42f9f1f073 | |||
1e1865b73f | |||
81bf9f97c9 | |||
7da8f75d5b | |||
d1301eb0df | |||
45cb1a580a | |||
24c1e3053b | |||
5e9f5ca5d3 | |||
10555d4684 | |||
398182284c | |||
034ecc3210 | |||
c76573a97b | |||
8d62ee65b2 | |||
3944976a9a | |||
c4deebbf1e | |||
d13c59fef0 | |||
12969084d1 | |||
96157a897b | |||
2aaff0ad76 | |||
6296211a3e | |||
c94081774f | |||
c7a6be163f | |||
93a89b2681 | |||
23f837c388 | |||
984e752ce5 | |||
39b3b8ffb4 | |||
49c2816d54 | |||
79e2174e4d | |||
ab5e3f400f | |||
5a1b384352 | |||
89030ec0b4 | |||
889f5645b5 | |||
6b88ab75bc | |||
165f53fc6e | |||
d3952898c3 | |||
3c6665e7d6 | |||
4bb5c6ca7a | |||
38310f9022 | |||
58b9db5f7c | |||
44208d9258 | |||
8bd8e1b24a | |||
d2e1b17f18 | |||
072fd96254 | |||
d667ee2d10 | |||
83d317cff9 | |||
618454214b | |||
9ba306d47e | |||
4714ce9430 | |||
eb2a1dfdd2 | |||
664903dc88 | |||
9584f56b9d | |||
6ada9f0f38 | |||
e4571d8c12 | |||
1951e20d10 | |||
5c2a7ce2cc | |||
47820ef726 | |||
f83fc302a5 | |||
167ff563d1 | |||
0a3993c558 | |||
a425a47ddc | |||
c17a7733df | |||
653e8b9dd9 | |||
ab0979f930 | |||
0a09a39325 | |||
2f8100615a | |||
b13e9c4e3d | |||
9c6b5b904a | |||
25c0bd9b43 | |||
b9012a039b | |||
158f72cc0c | |||
61469cfeaf | |||
c5a3ffa363 | |||
3c46f5570b | |||
c7f2536735 | |||
8cde2966af | |||
0fe66f8ae4 | |||
4859929798 | |||
017831dd5b | |||
00d10e610f | |||
38c1d592b7 | |||
4d48980e74 | |||
c49aae9870 | |||
efe6dd2904 | |||
88c696240d | |||
b47f4ca5cf | |||
62dc59c2bd | |||
1a715d7db5 | |||
fc0662bb23 | |||
e787272901 | |||
1d1f5fea4a | |||
004541098d | |||
b44f24e3e6 | |||
9f6bb492bb | |||
817a3fb562 | |||
f935b1d542 | |||
915643a3e5 | |||
40b6ccf383 | |||
bd848aad7c | |||
ed0670cb17 | |||
6a4e730003 | |||
581c028d18 | |||
15339cf1c9 | |||
7770e41cb5 | |||
62cedb3aab | |||
d7a64dc02b | |||
0b2f1446bb | |||
e9e86aeacb | |||
908c18073a | |||
a2795c8055 | |||
e7db1dbc96 | |||
a1ddd9e1d3 | |||
aba0c234c2 | |||
566cb4c5f0 | |||
b71334ac3d | |||
fa572cd297 | |||
429e7141f2 | |||
810f9e057d | |||
f59ed3565d | |||
c92faee66e | |||
29efe1fc7e | |||
59b480ab4b | |||
7e7a3f0f71 | |||
bddd103a9f | |||
6b58409614 | |||
ead814616c | |||
6104ab6b6d | |||
f6e1aed504 | |||
bddf5aaa2f | |||
3ef52775c4 | |||
ebb9591c4d | |||
6f88d6530a | |||
f1e1d9f874 | |||
28080463d2 | |||
b9ff57c59e | |||
23524f8900 | |||
6b9858085f | |||
db87223269 | |||
d513584e52 | |||
844485ec6a | |||
1ea7537997 | |||
92c56eb820 | |||
cf856ea1ad | |||
2045a2bba3 | |||
6f4cccf8d2 | |||
0703c91fba | |||
9ded4e33c5 | |||
a19b4235c7 | |||
919229d63c | |||
cc05b050df | |||
920a287117 | |||
d49407427d | |||
d990df909d | |||
27d93c1848 | |||
70868b1e4a | |||
941d8b5c5c | |||
c52dfd55fb | |||
0c34eae172 | |||
7c30f4d085 | |||
040928d8bb | |||
9e688fb64c | |||
1143dc6e29 | |||
eb694ea706 | |||
81678971db | |||
7b7b327ff2 | |||
81ff700077 | |||
97fc1c3b1d | |||
6cfe494276 | |||
175506e7fd | |||
36bb7ac083 | |||
5d20fbbb6f | |||
e6402677c2 | |||
3eebf34038 | |||
b63bffe820 | |||
b63e3c37a6 | |||
43c278cdf9 | |||
18145adf08 | |||
296a27d106 | |||
1a55e20d35 | |||
7b748e550a | |||
68ac4eb796 | |||
8a94aa91fb | |||
f5182c7b9c | |||
95f720fffc | |||
6487c002f6 | |||
fb2c79df19 | |||
91c4607979 | |||
85d81b2cdd | |||
3e82c9ef67 | |||
9d25f34263 | |||
6e7137103c | |||
cef3e2dc5a | |||
b305591e14 | |||
51d026ca85 | |||
8c2f271528 | |||
524aaf5ec6 | |||
4eb01b21c8 | |||
bdc7554918 | |||
1fed223483 | |||
1e10489196 | |||
2a9ea6be87 | |||
7a5a822905 | |||
5c6155f9f4 | |||
348c3bc47d | |||
94d1f5888a | |||
c37e68e7c1 | |||
32341f88e3 | |||
66c3eb2f1a | |||
86dd005544 | |||
706f5e3b98 | |||
19a1c95046 | |||
905ed109ed | |||
43cd31ea9f | |||
5e86e4ed29 | |||
6d29e192e9 | |||
015e78928a | |||
716864deba | |||
e43d827a19 | |||
eb87121300 | |||
2b2fd74158 | |||
d9890a6a8f | |||
a15d71a255 | |||
9d1e2027a0 | |||
053ed9cc84 | |||
dad26582b6 | |||
6c8310ebb4 | |||
4ee11b072e | |||
901471f733 | |||
666092936c | |||
b007df89dd | |||
a04294d160 | |||
eebfb13053 | |||
0ddd4612b7 | |||
a90e645ccd | |||
420b78659b | |||
c9959145a9 | |||
c71a7e26a8 | |||
7ddb44b80e | |||
b5d362b2bf | |||
fdd42d425b | |||
39f8268147 | |||
a25899f3dc | |||
c1544423d6 | |||
e5defccd58 | |||
0921f8a74f | |||
25b16085da | |||
e1365b2464 | |||
fdb742419e | |||
129cf075e9 | |||
2c097bb7a2 | |||
9a39c6bcb1 | |||
f354c622ca | |||
2482ba016e | |||
fb835c024c | |||
07751c3d26 | |||
faba018b29 | |||
89884dc353 | |||
93f047023f | |||
8696dd39cb | |||
cf2a77af28 | |||
0185ee0993 | |||
4764b2f0be | |||
b65c384181 | |||
4996fce25a | |||
f7112cc182 | |||
71c37d82ad | |||
4eb9296910 | |||
a99ac5335c | |||
4e2641319b | |||
df219e23df | |||
7cf56d6f06 | |||
d7f02b448a | |||
1167639524 | |||
4ea9737de6 | |||
a3cd8a040a | |||
328901c24c | |||
3a98c6f6e6 | |||
d81c9d9b76 | |||
367f12f734 | |||
8d35b1eb2b | |||
0287d54847 | |||
24562d9b0c | |||
dc681fc1f6 | |||
86bcbb0d79 | |||
066c75531d | |||
8327d1fdfc | |||
d54f2f2e5e | |||
c5d28f0b27 | |||
de971cc845 | |||
f86324edb7 | |||
eeaf191633 | |||
3010f9fc75 | |||
d90bbce954 | |||
5cdb476dd1 | |||
ff23e265cd | |||
12d8570322 | |||
5883afb3ef | |||
05280a7ae3 | |||
d97e0063d5 | |||
856307d8bb | |||
16d7eae1c8 | |||
d8da0b3d81 | |||
92b12ee6c6 | |||
fc20680b95 | |||
979fc96899 | |||
63a9d4b2ae | |||
ce5f94920d | |||
341f451083 | |||
d13b8e5570 | |||
5655dce3b8 | |||
7b5107b73f | |||
bdde616f23 | |||
3ee91b9f2e | |||
0f4e7c9b0d | |||
1b5a867eec | |||
87c0ba9213 | |||
493100ba4d | |||
b68929caee | |||
9f7b79af00 | |||
4e54b1a45e | |||
a70a79b285 | |||
15fdaf2005 | |||
8cbdc8638f | |||
0bdd295cc0 | |||
7ebc6c43ff | |||
560d44479c | |||
32b078d418 | |||
2ff464b29d | |||
f3bafecef7 | |||
54add42550 | |||
04926db204 | |||
3e0641923d | |||
74925e547f | |||
7afdf792ab | |||
c28fd9c079 | |||
4baa574410 | |||
9f45d6efae | |||
cbbc54c495 | |||
7cee2509c0 | |||
48b484c5ac | |||
06125bff89 | |||
9fea1a5cf5 | |||
e401f5ff10 | |||
6a53ce29a4 | |||
8f24097836 | |||
4b9c0ea76d | |||
3bb8a4ed3f | |||
983cb25a07 | |||
68754f3931 | |||
5d4512b113 | |||
d21303f9dd | |||
4fde0cabc1 | |||
4a04127ce3 | |||
2de37f28e0 | |||
5a88a7cf5b | |||
1d25039ff5 | |||
8ead45c20b | |||
82a9e11058 | |||
b35e4fce99 | |||
e24e05dd01 | |||
90dedea40f | |||
c0c01612e9 | |||
b2b14e6ce3 | |||
290d6bd903 | |||
9c2ac6fbd5 | |||
a00dc5095b | |||
ff90894636 | |||
9e04c5ec83 | |||
abf2d7d74f | |||
1976bb3df0 | |||
350a0490ab | |||
37564ceda6 | |||
28c5a8a54b | |||
298a19bbc6 | |||
c47052a580 | |||
93da0cf8a1 | |||
79ce5537ab | |||
8e7bee9b56 | |||
f538259187 | |||
b1be979443 | |||
e997f92caf | |||
56434bfa89 | |||
6793ffa12b | |||
5413df1dfa | |||
c374447401 | |||
105922180f | |||
3a57eecc69 | |||
997b55236e | |||
4c268e65a0 | |||
0b53e485d8 | |||
9e22e912e3 | |||
123864fc05 | |||
7163a6664e | |||
4366c45e4e | |||
3a52c4dcf2 | |||
722b742780 | |||
748f22c192 | |||
43e2e58cbd | |||
35ddf36229 | |||
0fef66c739 | |||
508891e64b | |||
9e88224eb8 | |||
295693759e | |||
240d1851db | |||
6c9f040ebe | |||
5b081ab214 | |||
6ef4495a8f | |||
79addac698 | |||
7d5267e3a2 | |||
4edbc1f2bb | |||
6cf6e1d753 | |||
2e08dad9e6 | |||
af258efdb9 | |||
6eef141aef | |||
b8dd0890b3 | |||
ea3b00ad75 | |||
feb40e3a4d | |||
beabf95ad7 | |||
6ccce0906a | |||
bcb3087450 | |||
967d8de77a | |||
7a556abe15 | |||
5b1cfdef89 | |||
c16967c267 | |||
6a48ae37b2 | |||
e5871b928f | |||
6d8e51ab88 | |||
6315b6fcc0 | |||
fa01117498 | |||
490b380a04 | |||
61270e5e1c | |||
07a95ce571 | |||
04c4e50d72 | |||
7451fc637d | |||
12867d152c | |||
af5c97aebe | |||
8dfd66f701 | |||
ec51cbb5fb | |||
d671dbd5b7 | |||
1e635bd0bd | |||
b86b1e6d43 | |||
ddeea1e0c6 | |||
e376d2fb31 | |||
dd91c7ce6a | |||
c13df14581 | |||
02cea2330d | |||
413358abb9 | |||
0c82928981 | |||
b482423e61 | |||
93142e50c3 | |||
23f1a0b783 | |||
da180ba097 | |||
c42d1390d3 | |||
42ccb2fdbd | |||
dce533c246 | |||
9a188c975d | |||
3ebfeb09fe | |||
5435e0d1a1 | |||
e029cc6616 | |||
56a319b9da | |||
bcf19bc4be | |||
eb9d7d15ec | |||
a981b60c25 | |||
9371b2f70c | |||
c85fdb76ee | |||
e30c0af861 | |||
4a19c0e7b8 | |||
e9ba536d85 | |||
89043cba75 | |||
d5c267fd30 | |||
a0797e37f8 | |||
80e887d7bf | |||
cf6674539c | |||
39abd92ca8 | |||
45b7535137 | |||
da06519347 | |||
0f77f34bb6 | |||
651233454e | |||
a5c827af86 | |||
0b3f3be2b5 | |||
88125d8bd0 | |||
55f30db0ae | |||
9d93535674 | |||
4b2ff1457a | |||
cefa2ab1bd | |||
b1b75f0089 | |||
201e345c65 | |||
469b8739eb | |||
8523ad450d | |||
8b83125739 | |||
f52ff0f1e9 | |||
890757f03a | |||
4fc678542d | |||
3f649d4852 | |||
5f6f5e345e | |||
d98c42c0e3 | |||
723bd8c17f | |||
cd57d5cd38 | |||
a35382de94 | |||
a5eee8d1dc | |||
389da6aa48 | |||
b2c59e297b | |||
9219e0fba4 | |||
4873a9d3c3 | |||
070a5e1252 | |||
81e9caed7d | |||
7ddb40239b | |||
2f66a8d614 | |||
dbf6b8a797 | |||
befecc9fdf | |||
e868adde30 | |||
25a661e0c2 | |||
4f2784b38f | |||
48e3b95e77 | |||
b4a2681120 | |||
65ce550b37 | |||
0a99efa61f | |||
d5b7d1cc34 | |||
e0987f67e0 | |||
3666da8a4b | |||
f3f1e59eea | |||
677724af0c | |||
46698d7931 | |||
6d74d1e5f7 | |||
a188a1e150 | |||
d02301f758 | |||
0b63915430 | |||
b8ea9042e5 | |||
7b7e5921a4 | |||
7540c53e72 | |||
aaede53738 | |||
53cac027d0 | |||
7ace5a3a8b | |||
40859a2441 | |||
4535230059 | |||
126ac94f36 | |||
6f54ae24cd | |||
069a7e1f8a | |||
bd60295de5 | |||
930e82d7f4 | |||
37877e86ed | |||
263622f44f | |||
0b2edf05bb | |||
e29e4c2376 | |||
82f9ed49fa | |||
b0b65d017f | |||
1152f45849 | |||
dd88bd82c9 | |||
85944c2561 | |||
87c463c47a | |||
90af6dae6e | |||
39c64d85a2 | |||
234cc8e77f | |||
5cdc2dffda | |||
c2147ee154 | |||
b98259868b | |||
292570ad6c | |||
34ed2d834a | |||
933acf3389 | |||
44a3b8c04c | |||
a52511e692 | |||
8f8ff8d601 | |||
4515772993 | |||
c989bca173 | |||
587656619d | |||
da59147014 | |||
5e45db7610 | |||
ab72803e6f | |||
e872083d44 | |||
65cd28aa0e | |||
510b6f90db | |||
c43be6cf87 | |||
7e4d1925f0 | |||
d2d3166f35 | |||
2337aa64eb | |||
3cebfb6664 | |||
4b6f6ffe23 | |||
26d271dfbb | |||
1264c19f11 | |||
7f95a85fd4 | |||
0708b573bc | |||
9887edd580 | |||
1893266c59 | |||
92a7538ed3 | |||
5c3993444d | |||
ba068d40dd | |||
e32ee6ac05 | |||
40283d0522 | |||
a070e23178 | |||
b0bbd47185 | |||
1aa83290f5 | |||
8a2e8faadd | |||
44ff3f3dc9 | |||
38aab0aa83 | |||
2ec7232191 | |||
b9df7ecdc3 | |||
c60c0c97e7 | |||
870d4c4970 | |||
6c458f32f8 | |||
c036fe35a8 | |||
7599999dcd | |||
79b68dd78d | |||
648b0cb714 | |||
ac9c03f910 | |||
ca22d0761b | |||
7a63faf734 | |||
3bf1054a13 | |||
cbc4ac264e | |||
359d9c3f0a | |||
d77d35a4a9 | |||
6402c42b67 | |||
af4080b4b7 | |||
00064ddcfb | |||
2a836bb259 | |||
eb2fd823b2 | |||
5a20cc0de6 | |||
0851646e48 | |||
15540ae992 | |||
023b87b9d1 | |||
1bad861222 | |||
fe9ffa5953 | |||
07d909ff32 | |||
5065cdefff | |||
6975172d01 | |||
c8e9a91672 | |||
b7394d7942 | |||
671f22be38 | |||
6a3daa2a4e | |||
094996b8c9 | |||
8dc8941551 | |||
0bec6a43f6 | |||
f0b5eb09eb | |||
3cf7d2e9a6 | |||
be6078ad83 | |||
98eab2dbe7 | |||
be9172a7ac | |||
462ddce5b2 | |||
f7b29ec942 | |||
f98cabad7c | |||
0c359e4b9a | |||
37d6357806 | |||
53e034ce0b | |||
0893ee6d51 | |||
4d891f23b5 | |||
66ed58bfcc | |||
47f7c736cb | |||
228a297056 | |||
ad4b60efdd | |||
c87cdd3053 | |||
f15849cf00 | |||
bf35e27ea7 | |||
1e2e1b41f8 | |||
d56dc038d2 | |||
a5a9feab21 | |||
f0be151349 | |||
f78ffc0545 | |||
32d31c31af | |||
3b69c14f5d | |||
300c35b854 | |||
03fe9de2cb | |||
c56f4fa808 | |||
8f05cfa122 | |||
76eed9e50d | |||
84f4975520 | |||
55a73f556a | |||
5d7e5b00be | |||
62cd943c7b | |||
d6c5f2417c | |||
d7851e6359 | |||
d3c1e654f0 | |||
87a411b839 | |||
1583e7d274 | |||
4690912ac9 | |||
42e02ac03b | |||
39f502329f | |||
0734c4b820 | |||
a75c0610b7 | |||
613af7ceea | |||
074efe6c8d | |||
93ffb85b3d | |||
e943f07a85 | |||
0e6ea9199c | |||
36e93d2dd8 | |||
e6ca1958d3 | |||
4655b60999 | |||
dc6e98d2a8 | |||
6283391c99 | |||
20a092fb9f | |||
5dd0cd12ec | |||
efd92d81a9 | |||
8d7aa9078f | |||
b6f1c8dcc0 | |||
97243f3a76 | |||
241b283690 | |||
466b009135 | |||
68b4b74682 | |||
270fbfba4b | |||
92f3405dae | |||
b1efff659e | |||
0bdb21f0cb | |||
fab0ee3bfa | |||
bc5d742c66 | |||
eff7cfbb03 | |||
328de180a7 | |||
dcb22a9f99 | |||
a4cf279494 | |||
6e05ccd845 | |||
556888c4a9 | |||
fe8347ea8a | |||
361a6f08ac | |||
01d92531ee | |||
92ec07d63b | |||
1e1b18637e | |||
f1a7997af3 | |||
cec1f292f0 | |||
4fabd9cbd2 | |||
fadf84a752 | |||
06d4470b41 | |||
19099421dc | |||
6ddb92a089 | |||
e570835356 | |||
e567675473 | |||
7e38996301 | |||
22c494d399 | |||
3ad4335acc | |||
fd39f722a3 | |||
d5d7c0c24b | |||
351a5903b0 | |||
f300c0df01 | |||
d754091a87 | |||
cdf3f016df | |||
d7d81d7c12 | |||
e146fbe4e7 | |||
542df8898e | |||
2a5ed1a1d3 | |||
bf1cdd723a | |||
c6be24c731 | |||
6ffee2afd6 | |||
2e1ecc02bd | |||
6df973df27 | |||
4be8840120 | |||
529b81dadb | |||
6a62fe399b | |||
dae3aee5ff | |||
05ccbb5edd | |||
4f55e24c02 | |||
91b228966e | |||
1b9c5b393b | |||
57d4898e29 | |||
c2117982b8 | |||
1c4c486a85 | |||
ac72787768 | |||
26284ec3cc | |||
fef8c985bc | |||
36a1e0b67d | |||
37531b1884 | |||
855690523a | |||
38d1b0cba2 | |||
eddcecc160 | |||
9938d954c8 | |||
90caa2cabb | |||
5f2002bbcc | |||
a9614c3c91 | |||
46c4b699c8 | |||
1821328162 | |||
8045504abf | |||
c22fdec3c7 | |||
049e17116e | |||
dcffb7777f | |||
8694d14e65 | |||
172f7778fe | |||
34bb132b10 | |||
675f4e75b8 | |||
4a231cd951 | |||
976a0f5558 | |||
a1313b5b1e | |||
711ed74e09 | |||
058a4ac5f1 | |||
33791dbeb5 | |||
5a9c96454e | |||
4cc89a5a32 | |||
15d09038a6 | |||
3c776c7199 | |||
24cab2d535 | |||
594e038e75 | |||
a903912b96 | |||
44c365c3e2 | |||
7b68975a00 | |||
60deeb103e | |||
0b284f6c6c | |||
8a5c81349e | |||
33c56ebc67 | |||
31baf3a9af | |||
ad2fc7c6a6 | |||
58cf5686ea | |||
b4aa4a6965 | |||
b88b4632c2 | |||
1f1cefc036 | |||
4c8fcd93da | |||
fcc84c38dd | |||
6d200efe72 | |||
92956e2930 | |||
9b09c0fc83 | |||
770316dc20 | |||
0af96d2556 | |||
d5acc5ed9e | |||
fcafa0baa5 | |||
1ee754b056 | |||
b3b8d36995 | |||
9b32f592dc | |||
3e97b04a3d | |||
f20c8d495a | |||
8704e8a8fc | |||
94e8418939 | |||
feda78e052 | |||
8592a57553 | |||
b2de0bd87b | |||
e9e69d6e29 | |||
a90cc66f3c | |||
8bd37a1d91 | |||
b5c4ea56b8 | |||
fc392395fb | |||
b211742e5f | |||
0218d7001d | |||
4d663d57d6 | |||
8a63f7f504 | |||
c49a4165d0 | |||
a1bc0e3cb6 | |||
a013f02df2 | |||
50be790869 | |||
9e0f934e2b | |||
4f7b7f84ae | |||
c6285e6437 | |||
35f95aef6f | |||
7a509b4732 | |||
433937fb42 | |||
2eeb8dd271 | |||
b7cf41e4b3 | |||
3bb6815fc1 | |||
a67fe48b43 | |||
93b1171316 | |||
6ae9dc15cc | |||
49cf000df7 | |||
c4b7fdd27e | |||
275cd4988d | |||
f51cf573b5 | |||
191364c350 | |||
d90d1db609 | |||
b8bc9b3d8e | |||
f383eaa102 | |||
cecc7230c0 | |||
4b40b5377b | |||
370cb95b7f | |||
017449971e | |||
bc01593afb | |||
c9dce0bfd7 | |||
e78f631dfc | |||
6b6882f08b | |||
c2d65d34d5 | |||
13ccf6016e | |||
7ce7c3967c | |||
fc7e0fe6c7 | |||
5cc6e7a71e | |||
d556d39a2c | |||
54d332e1db | |||
e0bf5f0ccb | |||
1ff3d7c2d4 | |||
08611cfd75 | |||
9a529d64d1 | |||
a91b704b01 | |||
c9f28ca8e5 | |||
58e33d9e5a | |||
7800ba978d | |||
717f8a4e8f | |||
7b189d6f1f | |||
c4844e9ee2 | |||
23c8c74131 | |||
0676320169 | |||
d62e9b2857 | |||
878e35bfde | |||
2e98706a99 | |||
8c1e8de839 | |||
b26eedf9e9 | |||
44b41641f8 | |||
9ef90dbf30 | |||
d9d2a4eef9 | |||
9d67222f4e | |||
f5a68a40bf | |||
f06ae5ca6a | |||
3a0480e07d | |||
5d21667587 | |||
fdff182f11 | |||
0abcf03fde | |||
58f2ce8671 | |||
dd21f079e8 | |||
36a684ca1e | |||
bcc1234778 | |||
c1db636fb3 | |||
5b558ad936 | |||
b6d4f6b66e | |||
0ec5ab4175 | |||
0754100464 | |||
475ae8bd93 | |||
89ab8a74c0 | |||
72e62efc76 | |||
f56f969dd3 | |||
216ff5a952 | |||
7be89a7a01 | |||
59177bc8c0 | |||
c5b46a79c1 | |||
b4bc3b3c35 | |||
75e029db8b | |||
b8ced9e00b | |||
f8790b9482 | |||
8bd5bb8918 | |||
a7dfaa0bda | |||
e1dcea8bf0 | |||
b3d6304f1e | |||
f4ec85486a | |||
dfdb204b48 | |||
15fb780de6 | |||
3a4a3d080b | |||
b7ba944e88 | |||
9b59c75405 | |||
f71e85b8e2 | |||
8008c5b1fa | |||
9c6cf960b4 | |||
df206d2513 | |||
9e8cc00b73 | |||
ac5e28ea38 | |||
3b0f3483c4 | |||
7f70a70106 | |||
94e8250983 | |||
c013192ba7 | |||
0b6338321f | |||
b9c90c5581 | |||
5fefe39ba5 | |||
dfe891270a | |||
c5c5e0dbe8 | |||
3f4a875bf6 | |||
a3d263dd3a | |||
190fb8180a | |||
b02afb6b3d | |||
422604b438 | |||
57d697629d | |||
11d09fd3ba | |||
689486449d | |||
7c4a4eb58a | |||
9e71f55bfa | |||
51c3290bee | |||
738b51ae31 | |||
f03b2db7db | |||
49d1a032da | |||
765fe446cf | |||
afe0b65405 | |||
987648b0ad | |||
9504c5c360 | |||
f8a95d996f | |||
bf5c6b29fa | |||
22e3bbbf0a | |||
4ea9b62b5c | |||
6f1a600f6c | |||
de2259d27c | |||
adf007dadc | |||
4b8f56cf98 |
45
.circleci/config.yml
Normal file
45
.circleci/config.yml
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
# Use the latest 2.1 version of CircleCI pipeline process engine.
|
||||||
|
# See: https://circleci.com/docs/2.0/configuration-reference
|
||||||
|
version: 2.1
|
||||||
|
|
||||||
|
# Define a job to be invoked later in a workflow.
|
||||||
|
# See: https://circleci.com/docs/2.0/configuration-reference/#jobs
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
working_directory: ~/repo
|
||||||
|
# Specify the execution environment. You can specify an image from Dockerhub or use one of our Convenience Images from CircleCI's Developer Hub.
|
||||||
|
# See: https://circleci.com/docs/2.0/configuration-reference/#docker-machine-macos-windows-executor
|
||||||
|
docker:
|
||||||
|
- image: circleci/golang:1.16.10
|
||||||
|
# Add steps to the job
|
||||||
|
# See: https://circleci.com/docs/2.0/configuration-reference/#steps
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- restore_cache:
|
||||||
|
keys:
|
||||||
|
- go-mod-v4-{{ checksum "go.sum" }}
|
||||||
|
- run:
|
||||||
|
name: Install Dependencies
|
||||||
|
command: go mod download
|
||||||
|
- save_cache:
|
||||||
|
key: go-mod-v4-{{ checksum "go.sum" }}
|
||||||
|
paths:
|
||||||
|
- "/go/pkg/mod"
|
||||||
|
#- run:
|
||||||
|
# name: Run linter
|
||||||
|
# command: |
|
||||||
|
# go run build/ci.go lint
|
||||||
|
- run:
|
||||||
|
name: Run tests
|
||||||
|
command: |
|
||||||
|
go run build/ci.go test -coverage
|
||||||
|
- store_test_results:
|
||||||
|
path: /tmp/test-reports
|
||||||
|
|
||||||
|
# Invoke jobs via workflows
|
||||||
|
# See: https://circleci.com/docs/2.0/configuration-reference/#workflows
|
||||||
|
workflows:
|
||||||
|
sample: # This is the name of the workflow, feel free to change it to better match your workflow.
|
||||||
|
# Inside the workflow, you define the jobs you want to run.
|
||||||
|
jobs:
|
||||||
|
- build
|
12
.github/CODEOWNERS
vendored
12
.github/CODEOWNERS
vendored
@ -3,21 +3,21 @@
|
|||||||
|
|
||||||
accounts/usbwallet @karalabe
|
accounts/usbwallet @karalabe
|
||||||
accounts/scwallet @gballet
|
accounts/scwallet @gballet
|
||||||
accounts/abi @gballet
|
accounts/abi @gballet @MariusVanDerWijden
|
||||||
cmd/clef @holiman
|
cmd/clef @holiman
|
||||||
cmd/puppeth @karalabe
|
cmd/puppeth @karalabe
|
||||||
consensus @karalabe
|
consensus @karalabe
|
||||||
core/ @karalabe @holiman @rjl493456442
|
core/ @karalabe @holiman @rjl493456442
|
||||||
dashboard/ @kurkomisi
|
|
||||||
eth/ @karalabe @holiman @rjl493456442
|
eth/ @karalabe @holiman @rjl493456442
|
||||||
|
eth/catalyst/ @gballet
|
||||||
graphql/ @gballet
|
graphql/ @gballet
|
||||||
les/ @zsfelfoldi @rjl493456442
|
les/ @zsfelfoldi @rjl493456442
|
||||||
light/ @zsfelfoldi @rjl493456442
|
light/ @zsfelfoldi @rjl493456442
|
||||||
mobile/ @karalabe @ligi
|
mobile/ @karalabe @ligi
|
||||||
|
node/ @fjl @renaynay
|
||||||
p2p/ @fjl @zsfelfoldi
|
p2p/ @fjl @zsfelfoldi
|
||||||
rpc/ @fjl @holiman
|
rpc/ @fjl @holiman
|
||||||
p2p/simulations @zelig @nonsense @janos @justelad
|
p2p/simulations @fjl
|
||||||
p2p/protocols @zelig @nonsense @janos @justelad
|
p2p/protocols @fjl
|
||||||
p2p/testing @zelig @nonsense @janos @justelad
|
p2p/testing @fjl
|
||||||
signer/ @holiman
|
signer/ @holiman
|
||||||
whisper/ @gballet @gluk256
|
|
||||||
|
4
.github/CONTRIBUTING.md
vendored
4
.github/CONTRIBUTING.md
vendored
@ -30,11 +30,11 @@ Please make sure your contributions adhere to our coding guidelines:
|
|||||||
Before you submit a feature request, please check and make sure that it isn't
|
Before you submit a feature request, please check and make sure that it isn't
|
||||||
possible through some other means. The JavaScript-enabled console is a powerful
|
possible through some other means. The JavaScript-enabled console is a powerful
|
||||||
feature in the right hands. Please check our
|
feature in the right hands. Please check our
|
||||||
[Wiki page](https://github.com/ethereum/go-ethereum/wiki) for more info
|
[Geth documentation page](https://geth.ethereum.org/docs/) for more info
|
||||||
and help.
|
and help.
|
||||||
|
|
||||||
## Configuration, dependencies, and tests
|
## Configuration, dependencies, and tests
|
||||||
|
|
||||||
Please see the [Developers' Guide](https://github.com/ethereum/go-ethereum/wiki/Developers'-Guide)
|
Please see the [Developers' Guide](https://geth.ethereum.org/docs/developers/devguide)
|
||||||
for more details on configuring your environment, managing project dependencies
|
for more details on configuring your environment, managing project dependencies
|
||||||
and testing procedures.
|
and testing procedures.
|
||||||
|
26
.github/ISSUE_TEMPLATE.md
vendored
26
.github/ISSUE_TEMPLATE.md
vendored
@ -1,26 +0,0 @@
|
|||||||
Hi there,
|
|
||||||
|
|
||||||
Please note that this is an issue tracker reserved for bug reports and feature requests.
|
|
||||||
|
|
||||||
For general questions please use the gitter channel or the Ethereum stack exchange at https://ethereum.stackexchange.com.
|
|
||||||
|
|
||||||
#### System information
|
|
||||||
|
|
||||||
Geth version: `geth version`
|
|
||||||
OS & Version: Windows/Linux/OSX
|
|
||||||
Commit hash : (if `develop`)
|
|
||||||
|
|
||||||
#### Expected behaviour
|
|
||||||
|
|
||||||
|
|
||||||
#### Actual behaviour
|
|
||||||
|
|
||||||
|
|
||||||
#### Steps to reproduce the behaviour
|
|
||||||
|
|
||||||
|
|
||||||
#### Backtrace
|
|
||||||
|
|
||||||
````
|
|
||||||
[backtrace]
|
|
||||||
````
|
|
30
.github/ISSUE_TEMPLATE/bug.md
vendored
Normal file
30
.github/ISSUE_TEMPLATE/bug.md
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
---
|
||||||
|
name: Report a bug
|
||||||
|
about: Something with go-ethereum is not working as expected
|
||||||
|
title: ''
|
||||||
|
labels: 'type:bug'
|
||||||
|
assignees: ''
|
||||||
|
---
|
||||||
|
|
||||||
|
#### System information
|
||||||
|
|
||||||
|
Geth version: `geth version`
|
||||||
|
OS & Version: Windows/Linux/OSX
|
||||||
|
Commit hash : (if `develop`)
|
||||||
|
|
||||||
|
#### Expected behaviour
|
||||||
|
|
||||||
|
|
||||||
|
#### Actual behaviour
|
||||||
|
|
||||||
|
|
||||||
|
#### Steps to reproduce the behaviour
|
||||||
|
|
||||||
|
|
||||||
|
#### Backtrace
|
||||||
|
|
||||||
|
````
|
||||||
|
[backtrace]
|
||||||
|
````
|
||||||
|
|
||||||
|
When submitting logs: please submit them as text and not screenshots.
|
17
.github/ISSUE_TEMPLATE/feature.md
vendored
Normal file
17
.github/ISSUE_TEMPLATE/feature.md
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
name: Request a feature
|
||||||
|
about: Report a missing feature - e.g. as a step before submitting a PR
|
||||||
|
title: ''
|
||||||
|
labels: 'type:feature'
|
||||||
|
assignees: ''
|
||||||
|
---
|
||||||
|
|
||||||
|
# Rationale
|
||||||
|
|
||||||
|
Why should this feature exist?
|
||||||
|
What are the use-cases?
|
||||||
|
|
||||||
|
# Implementation
|
||||||
|
|
||||||
|
Do you have ideas regarding the implementation of this feature?
|
||||||
|
Are you willing to implement this feature?
|
9
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
9
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
name: Ask a question
|
||||||
|
about: Something is unclear
|
||||||
|
title: ''
|
||||||
|
labels: 'type:docs'
|
||||||
|
assignees: ''
|
||||||
|
---
|
||||||
|
|
||||||
|
This should only be used in very rare cases e.g. if you are not 100% sure if something is a bug or asking a question that leads to improving the documentation. For general questions please use [discord](https://discord.gg/nthXNEv) or the Ethereum stack exchange at https://ethereum.stackexchange.com.
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -24,6 +24,7 @@ build/_vendor/pkg
|
|||||||
|
|
||||||
# used by the Makefile
|
# used by the Makefile
|
||||||
/build/_workspace/
|
/build/_workspace/
|
||||||
|
/build/cache/
|
||||||
/build/bin/
|
/build/bin/
|
||||||
/geth*.zip
|
/geth*.zip
|
||||||
|
|
||||||
|
1
.gitmodules
vendored
1
.gitmodules
vendored
@ -1,3 +1,4 @@
|
|||||||
[submodule "tests"]
|
[submodule "tests"]
|
||||||
path = tests/testdata
|
path = tests/testdata
|
||||||
url = https://github.com/ethereum/tests
|
url = https://github.com/ethereum/tests
|
||||||
|
shallow = true
|
||||||
|
50
.golangci.yml
Normal file
50
.golangci.yml
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
# This file configures github.com/golangci/golangci-lint.
|
||||||
|
|
||||||
|
run:
|
||||||
|
timeout: 5m
|
||||||
|
tests: true
|
||||||
|
# default is true. Enables skipping of directories:
|
||||||
|
# vendor$, third_party$, testdata$, examples$, Godeps$, builtin$
|
||||||
|
skip-dirs-use-default: true
|
||||||
|
skip-files:
|
||||||
|
- core/genesis_alloc.go
|
||||||
|
|
||||||
|
linters:
|
||||||
|
disable-all: true
|
||||||
|
enable:
|
||||||
|
- deadcode
|
||||||
|
- goconst
|
||||||
|
- goimports
|
||||||
|
- gosimple
|
||||||
|
- govet
|
||||||
|
- ineffassign
|
||||||
|
- misspell
|
||||||
|
# - staticcheck
|
||||||
|
- unconvert
|
||||||
|
# - unused
|
||||||
|
- varcheck
|
||||||
|
|
||||||
|
linters-settings:
|
||||||
|
gofmt:
|
||||||
|
simplify: true
|
||||||
|
goconst:
|
||||||
|
min-len: 3 # minimum length of string constant
|
||||||
|
min-occurrences: 6 # minimum number of occurrences
|
||||||
|
|
||||||
|
issues:
|
||||||
|
exclude-rules:
|
||||||
|
- path: crypto/blake2b/
|
||||||
|
linters:
|
||||||
|
- deadcode
|
||||||
|
- path: crypto/bn256/cloudflare
|
||||||
|
linters:
|
||||||
|
- deadcode
|
||||||
|
- path: p2p/discv5/
|
||||||
|
linters:
|
||||||
|
- deadcode
|
||||||
|
- path: core/vm/instructions_test.go
|
||||||
|
linters:
|
||||||
|
- goconst
|
||||||
|
- path: cmd/faucet/
|
||||||
|
linters:
|
||||||
|
- deadcode
|
235
.travis.yml
235
.travis.yml
@ -2,12 +2,21 @@ language: go
|
|||||||
go_import_path: github.com/ethereum/go-ethereum
|
go_import_path: github.com/ethereum/go-ethereum
|
||||||
sudo: false
|
sudo: false
|
||||||
jobs:
|
jobs:
|
||||||
|
allow_failures:
|
||||||
|
- stage: build
|
||||||
|
os: osx
|
||||||
|
go: 1.17.x
|
||||||
|
env:
|
||||||
|
- azure-osx
|
||||||
|
- azure-ios
|
||||||
|
- cocoapods-ios
|
||||||
|
|
||||||
include:
|
include:
|
||||||
# This builder only tests code linters on latest version of Go
|
# This builder only tests code linters on latest version of Go
|
||||||
- stage: lint
|
- stage: lint
|
||||||
os: linux
|
os: linux
|
||||||
dist: xenial
|
dist: bionic
|
||||||
go: 1.13.x
|
go: 1.17.x
|
||||||
env:
|
env:
|
||||||
- lint
|
- lint
|
||||||
git:
|
git:
|
||||||
@ -15,55 +24,51 @@ jobs:
|
|||||||
script:
|
script:
|
||||||
- go run build/ci.go lint
|
- go run build/ci.go lint
|
||||||
|
|
||||||
|
# These builders create the Docker sub-images for multi-arch push and each
|
||||||
|
# will attempt to push the multi-arch image if they are the last builder
|
||||||
- stage: build
|
- stage: build
|
||||||
|
if: type = push
|
||||||
os: linux
|
os: linux
|
||||||
dist: xenial
|
arch: amd64
|
||||||
go: 1.11.x
|
dist: bionic
|
||||||
|
go: 1.17.x
|
||||||
|
env:
|
||||||
|
- docker
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
git:
|
||||||
|
submodules: false # avoid cloning ethereum/tests
|
||||||
|
before_install:
|
||||||
|
- export DOCKER_CLI_EXPERIMENTAL=enabled
|
||||||
script:
|
script:
|
||||||
- go run build/ci.go install
|
- go run build/ci.go docker -image -manifest amd64,arm64 -upload ethereum/client-go
|
||||||
- go run build/ci.go test -coverage $TEST_PACKAGES
|
|
||||||
|
|
||||||
- stage: build
|
- stage: build
|
||||||
|
if: type = push
|
||||||
os: linux
|
os: linux
|
||||||
dist: xenial
|
arch: arm64
|
||||||
go: 1.12.x
|
dist: bionic
|
||||||
|
go: 1.17.x
|
||||||
|
env:
|
||||||
|
- docker
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
git:
|
||||||
|
submodules: false # avoid cloning ethereum/tests
|
||||||
|
before_install:
|
||||||
|
- export DOCKER_CLI_EXPERIMENTAL=enabled
|
||||||
script:
|
script:
|
||||||
- go run build/ci.go install
|
- go run build/ci.go docker -image -manifest amd64,arm64 -upload ethereum/client-go
|
||||||
- go run build/ci.go test -coverage $TEST_PACKAGES
|
|
||||||
|
|
||||||
# These are the latest Go versions.
|
|
||||||
- stage: build
|
|
||||||
os: linux
|
|
||||||
dist: xenial
|
|
||||||
go: 1.13.x
|
|
||||||
script:
|
|
||||||
- go run build/ci.go install
|
|
||||||
- go run build/ci.go test -coverage $TEST_PACKAGES
|
|
||||||
|
|
||||||
- stage: build
|
|
||||||
os: osx
|
|
||||||
go: 1.13.x
|
|
||||||
script:
|
|
||||||
- echo "Increase the maximum number of open file descriptors on macOS"
|
|
||||||
- NOFILE=20480
|
|
||||||
- sudo sysctl -w kern.maxfiles=$NOFILE
|
|
||||||
- sudo sysctl -w kern.maxfilesperproc=$NOFILE
|
|
||||||
- sudo launchctl limit maxfiles $NOFILE $NOFILE
|
|
||||||
- sudo launchctl limit maxfiles
|
|
||||||
- ulimit -S -n $NOFILE
|
|
||||||
- ulimit -n
|
|
||||||
- unset -f cd # workaround for https://github.com/travis-ci/travis-ci/issues/8703
|
|
||||||
- go run build/ci.go install
|
|
||||||
- go run build/ci.go test -coverage $TEST_PACKAGES
|
|
||||||
|
|
||||||
# This builder does the Ubuntu PPA upload
|
# This builder does the Ubuntu PPA upload
|
||||||
- stage: build
|
- stage: build
|
||||||
if: type = push
|
if: type = push
|
||||||
os: linux
|
os: linux
|
||||||
dist: xenial
|
dist: bionic
|
||||||
go: 1.13.x
|
go: 1.17.x
|
||||||
env:
|
env:
|
||||||
- ubuntu-ppa
|
- ubuntu-ppa
|
||||||
|
- GO111MODULE=on
|
||||||
git:
|
git:
|
||||||
submodules: false # avoid cloning ethereum/tests
|
submodules: false # avoid cloning ethereum/tests
|
||||||
addons:
|
addons:
|
||||||
@ -75,22 +80,20 @@ jobs:
|
|||||||
- fakeroot
|
- fakeroot
|
||||||
- python-bzrlib
|
- python-bzrlib
|
||||||
- python-paramiko
|
- python-paramiko
|
||||||
cache:
|
|
||||||
directories:
|
|
||||||
- $HOME/.gobundle
|
|
||||||
script:
|
script:
|
||||||
- echo '|1|7SiYPr9xl3uctzovOTj4gMwAC1M=|t6ReES75Bo/PxlOPJ6/GsGbTrM0= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA0aKz5UTUndYgIGG7dQBV+HaeuEZJ2xPHo2DS2iSKvUL4xNMSAY4UguNW+pX56nAQmZKIZZ8MaEvSj6zMEDiq6HFfn5JcTlM80UwlnyKe8B8p7Nk06PPQLrnmQt5fh0HmEcZx+JU9TZsfCHPnX7MNz4ELfZE6cFsclClrKim3BHUIGq//t93DllB+h4O9LHjEUsQ1Sr63irDLSutkLJD6RXchjROXkNirlcNVHH/jwLWR5RcYilNX7S5bIkK8NlWPjsn/8Ua5O7I9/YoE97PpO6i73DTGLh5H9JN/SITwCKBkgSDWUt61uPK3Y11Gty7o2lWsBjhBUm2Y38CBsoGmBw==' >> ~/.ssh/known_hosts
|
- echo '|1|7SiYPr9xl3uctzovOTj4gMwAC1M=|t6ReES75Bo/PxlOPJ6/GsGbTrM0= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA0aKz5UTUndYgIGG7dQBV+HaeuEZJ2xPHo2DS2iSKvUL4xNMSAY4UguNW+pX56nAQmZKIZZ8MaEvSj6zMEDiq6HFfn5JcTlM80UwlnyKe8B8p7Nk06PPQLrnmQt5fh0HmEcZx+JU9TZsfCHPnX7MNz4ELfZE6cFsclClrKim3BHUIGq//t93DllB+h4O9LHjEUsQ1Sr63irDLSutkLJD6RXchjROXkNirlcNVHH/jwLWR5RcYilNX7S5bIkK8NlWPjsn/8Ua5O7I9/YoE97PpO6i73DTGLh5H9JN/SITwCKBkgSDWUt61uPK3Y11Gty7o2lWsBjhBUm2Y38CBsoGmBw==' >> ~/.ssh/known_hosts
|
||||||
- go run build/ci.go debsrc -upload ethereum/ethereum -sftp-user geth-ci -signer "Go Ethereum Linux Builder <geth-ci@ethereum.org>" -goversion 1.13.4 -gohash 95dbeab442ee2746b9acf0934c8e2fc26414a0565c008631b04addb8c02e7624 -gobundle $HOME/.gobundle/go.tar.gz
|
- go run build/ci.go debsrc -upload ethereum/ethereum -sftp-user geth-ci -signer "Go Ethereum Linux Builder <geth-ci@ethereum.org>"
|
||||||
|
|
||||||
# This builder does the Linux Azure uploads
|
# This builder does the Linux Azure uploads
|
||||||
- stage: build
|
- stage: build
|
||||||
if: type = push
|
if: type = push
|
||||||
os: linux
|
os: linux
|
||||||
dist: xenial
|
dist: bionic
|
||||||
sudo: required
|
sudo: required
|
||||||
go: 1.13.x
|
go: 1.17.x
|
||||||
env:
|
env:
|
||||||
- azure-linux
|
- azure-linux
|
||||||
|
- GO111MODULE=on
|
||||||
git:
|
git:
|
||||||
submodules: false # avoid cloning ethereum/tests
|
submodules: false # avoid cloning ethereum/tests
|
||||||
addons:
|
addons:
|
||||||
@ -99,105 +102,77 @@ jobs:
|
|||||||
- gcc-multilib
|
- gcc-multilib
|
||||||
script:
|
script:
|
||||||
# Build for the primary platforms that Trusty can manage
|
# Build for the primary platforms that Trusty can manage
|
||||||
- go run build/ci.go install
|
- go run build/ci.go install -dlgo
|
||||||
- go run build/ci.go archive -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
- go run build/ci.go archive -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
|
||||||
- go run build/ci.go install -arch 386
|
- go run build/ci.go install -dlgo -arch 386
|
||||||
- go run build/ci.go archive -arch 386 -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
- go run build/ci.go archive -arch 386 -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
|
||||||
|
|
||||||
# Switch over GCC to cross compilation (breaks 386, hence why do it here only)
|
# Switch over GCC to cross compilation (breaks 386, hence why do it here only)
|
||||||
- sudo -E apt-get -yq --no-install-suggests --no-install-recommends --force-yes install gcc-arm-linux-gnueabi libc6-dev-armel-cross gcc-arm-linux-gnueabihf libc6-dev-armhf-cross gcc-aarch64-linux-gnu libc6-dev-arm64-cross
|
- sudo -E apt-get -yq --no-install-suggests --no-install-recommends --force-yes install gcc-arm-linux-gnueabi libc6-dev-armel-cross gcc-arm-linux-gnueabihf libc6-dev-armhf-cross gcc-aarch64-linux-gnu libc6-dev-arm64-cross
|
||||||
- sudo ln -s /usr/include/asm-generic /usr/include/asm
|
- sudo ln -s /usr/include/asm-generic /usr/include/asm
|
||||||
|
|
||||||
- GOARM=5 go run build/ci.go install -arch arm -cc arm-linux-gnueabi-gcc
|
- GOARM=5 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabi-gcc
|
||||||
- GOARM=5 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
- GOARM=5 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
|
||||||
- GOARM=6 go run build/ci.go install -arch arm -cc arm-linux-gnueabi-gcc
|
- GOARM=6 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabi-gcc
|
||||||
- GOARM=6 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
- GOARM=6 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
|
||||||
- GOARM=7 go run build/ci.go install -arch arm -cc arm-linux-gnueabihf-gcc
|
- GOARM=7 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabihf-gcc
|
||||||
- GOARM=7 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
- GOARM=7 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
|
||||||
- go run build/ci.go install -arch arm64 -cc aarch64-linux-gnu-gcc
|
- go run build/ci.go install -dlgo -arch arm64 -cc aarch64-linux-gnu-gcc
|
||||||
- go run build/ci.go archive -arch arm64 -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
- go run build/ci.go archive -arch arm64 -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
|
||||||
|
|
||||||
# This builder does the Linux Azure MIPS xgo uploads
|
|
||||||
- stage: build
|
|
||||||
if: type = push
|
|
||||||
os: linux
|
|
||||||
dist: xenial
|
|
||||||
services:
|
|
||||||
- docker
|
|
||||||
go: 1.13.x
|
|
||||||
env:
|
|
||||||
- azure-linux-mips
|
|
||||||
git:
|
|
||||||
submodules: false # avoid cloning ethereum/tests
|
|
||||||
script:
|
|
||||||
- go run build/ci.go xgo --alltools -- --targets=linux/mips --ldflags '-extldflags "-static"' -v
|
|
||||||
- for bin in build/bin/*-linux-mips; do mv -f "${bin}" "${bin/-linux-mips/}"; done
|
|
||||||
- go run build/ci.go archive -arch mips -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
|
||||||
|
|
||||||
- go run build/ci.go xgo --alltools -- --targets=linux/mipsle --ldflags '-extldflags "-static"' -v
|
|
||||||
- for bin in build/bin/*-linux-mipsle; do mv -f "${bin}" "${bin/-linux-mipsle/}"; done
|
|
||||||
- go run build/ci.go archive -arch mipsle -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
|
||||||
|
|
||||||
- go run build/ci.go xgo --alltools -- --targets=linux/mips64 --ldflags '-extldflags "-static"' -v
|
|
||||||
- for bin in build/bin/*-linux-mips64; do mv -f "${bin}" "${bin/-linux-mips64/}"; done
|
|
||||||
- go run build/ci.go archive -arch mips64 -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
|
||||||
|
|
||||||
- go run build/ci.go xgo --alltools -- --targets=linux/mips64le --ldflags '-extldflags "-static"' -v
|
|
||||||
- for bin in build/bin/*-linux-mips64le; do mv -f "${bin}" "${bin/-linux-mips64le/}"; done
|
|
||||||
- go run build/ci.go archive -arch mips64le -type tar -signer LINUX_SIGNING_KEY -upload gethstore/builds
|
|
||||||
|
|
||||||
# This builder does the Android Maven and Azure uploads
|
# This builder does the Android Maven and Azure uploads
|
||||||
- stage: build
|
- stage: build
|
||||||
if: type = push
|
if: type = push
|
||||||
os: linux
|
os: linux
|
||||||
dist: xenial
|
dist: bionic
|
||||||
addons:
|
addons:
|
||||||
apt:
|
apt:
|
||||||
packages:
|
packages:
|
||||||
- oracle-java8-installer
|
- openjdk-8-jdk
|
||||||
- oracle-java8-set-default
|
|
||||||
language: android
|
|
||||||
android:
|
|
||||||
components:
|
|
||||||
- platform-tools
|
|
||||||
- tools
|
|
||||||
- android-15
|
|
||||||
- android-19
|
|
||||||
- android-24
|
|
||||||
env:
|
env:
|
||||||
- azure-android
|
- azure-android
|
||||||
- maven-android
|
- maven-android
|
||||||
|
- GO111MODULE=on
|
||||||
git:
|
git:
|
||||||
submodules: false # avoid cloning ethereum/tests
|
submodules: false # avoid cloning ethereum/tests
|
||||||
before_install:
|
before_install:
|
||||||
- curl https://dl.google.com/go/go1.13.linux-amd64.tar.gz | tar -xz
|
# Install Android and it's dependencies manually, Travis is stale
|
||||||
|
- export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
|
||||||
|
- curl https://dl.google.com/android/repository/commandlinetools-linux-6858069_latest.zip -o android.zip
|
||||||
|
- unzip -q android.zip -d $HOME/sdk && rm android.zip
|
||||||
|
- mv $HOME/sdk/cmdline-tools $HOME/sdk/latest && mkdir $HOME/sdk/cmdline-tools && mv $HOME/sdk/latest $HOME/sdk/cmdline-tools
|
||||||
|
- export PATH=$PATH:$HOME/sdk/cmdline-tools/latest/bin
|
||||||
|
- export ANDROID_HOME=$HOME/sdk
|
||||||
|
|
||||||
|
- yes | sdkmanager --licenses >/dev/null
|
||||||
|
- sdkmanager "platform-tools" "platforms;android-15" "platforms;android-19" "platforms;android-24" "ndk-bundle"
|
||||||
|
|
||||||
|
# Install Go to allow building with
|
||||||
|
- curl https://dl.google.com/go/go1.16.linux-amd64.tar.gz | tar -xz
|
||||||
- export PATH=`pwd`/go/bin:$PATH
|
- export PATH=`pwd`/go/bin:$PATH
|
||||||
- export GOROOT=`pwd`/go
|
- export GOROOT=`pwd`/go
|
||||||
- export GOPATH=$HOME/go
|
- export GOPATH=$HOME/go
|
||||||
script:
|
script:
|
||||||
# Build the Android archive and upload it to Maven Central and Azure
|
# Build the Android archive and upload it to Maven Central and Azure
|
||||||
- curl https://dl.google.com/android/repository/android-ndk-r19b-linux-x86_64.zip -o android-ndk-r19b.zip
|
|
||||||
- unzip -q android-ndk-r19b.zip && rm android-ndk-r19b.zip
|
|
||||||
- mv android-ndk-r19b $ANDROID_HOME/ndk-bundle
|
|
||||||
|
|
||||||
- mkdir -p $GOPATH/src/github.com/ethereum
|
- mkdir -p $GOPATH/src/github.com/ethereum
|
||||||
- ln -s `pwd` $GOPATH/src/github.com/ethereum/go-ethereum
|
- ln -s `pwd` $GOPATH/src/github.com/ethereum/go-ethereum
|
||||||
- go run build/ci.go aar -signer ANDROID_SIGNING_KEY -deploy https://oss.sonatype.org -upload gethstore/builds
|
- go run build/ci.go aar -signer ANDROID_SIGNING_KEY -signify SIGNIFY_KEY -deploy https://oss.sonatype.org -upload gethstore/builds
|
||||||
|
|
||||||
# This builder does the OSX Azure, iOS CocoaPods and iOS Azure uploads
|
# This builder does the OSX Azure, iOS CocoaPods and iOS Azure uploads
|
||||||
- stage: build
|
- stage: build
|
||||||
if: type = push
|
if: type = push
|
||||||
os: osx
|
os: osx
|
||||||
go: 1.13.x
|
go: 1.17.x
|
||||||
env:
|
env:
|
||||||
- azure-osx
|
- azure-osx
|
||||||
- azure-ios
|
- azure-ios
|
||||||
- cocoapods-ios
|
- cocoapods-ios
|
||||||
|
- GO111MODULE=on
|
||||||
git:
|
git:
|
||||||
submodules: false # avoid cloning ethereum/tests
|
submodules: false # avoid cloning ethereum/tests
|
||||||
script:
|
script:
|
||||||
- go run build/ci.go install
|
- go run build/ci.go install -dlgo
|
||||||
- go run build/ci.go archive -type tar -signer OSX_SIGNING_KEY -upload gethstore/builds
|
- go run build/ci.go archive -type tar -signer OSX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
|
||||||
|
|
||||||
# Build the iOS framework and upload it to CocoaPods and Azure
|
# Build the iOS framework and upload it to CocoaPods and Azure
|
||||||
- gem uninstall cocoapods -a -x
|
- gem uninstall cocoapods -a -x
|
||||||
@ -212,17 +187,61 @@ jobs:
|
|||||||
|
|
||||||
# Workaround for https://github.com/golang/go/issues/23749
|
# Workaround for https://github.com/golang/go/issues/23749
|
||||||
- export CGO_CFLAGS_ALLOW='-fmodules|-fblocks|-fobjc-arc'
|
- export CGO_CFLAGS_ALLOW='-fmodules|-fblocks|-fobjc-arc'
|
||||||
- go run build/ci.go xcode -signer IOS_SIGNING_KEY -deploy trunk -upload gethstore/builds
|
- go run build/ci.go xcode -signer IOS_SIGNING_KEY -signify SIGNIFY_KEY -deploy trunk -upload gethstore/builds
|
||||||
|
|
||||||
|
# These builders run the tests
|
||||||
|
- stage: build
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
dist: bionic
|
||||||
|
go: 1.17.x
|
||||||
|
env:
|
||||||
|
- GO111MODULE=on
|
||||||
|
script:
|
||||||
|
- go run build/ci.go test -coverage $TEST_PACKAGES
|
||||||
|
|
||||||
|
- stage: build
|
||||||
|
if: type = pull_request
|
||||||
|
os: linux
|
||||||
|
arch: arm64
|
||||||
|
dist: bionic
|
||||||
|
go: 1.17.x
|
||||||
|
env:
|
||||||
|
- GO111MODULE=on
|
||||||
|
script:
|
||||||
|
- go run build/ci.go test -coverage $TEST_PACKAGES
|
||||||
|
|
||||||
|
- stage: build
|
||||||
|
os: linux
|
||||||
|
dist: bionic
|
||||||
|
go: 1.16.x
|
||||||
|
env:
|
||||||
|
- GO111MODULE=on
|
||||||
|
script:
|
||||||
|
- go run build/ci.go test -coverage $TEST_PACKAGES
|
||||||
|
|
||||||
# This builder does the Azure archive purges to avoid accumulating junk
|
# This builder does the Azure archive purges to avoid accumulating junk
|
||||||
- stage: build
|
- stage: build
|
||||||
if: type = cron
|
if: type = cron
|
||||||
os: linux
|
os: linux
|
||||||
dist: xenial
|
dist: bionic
|
||||||
go: 1.13.x
|
go: 1.17.x
|
||||||
env:
|
env:
|
||||||
- azure-purge
|
- azure-purge
|
||||||
|
- GO111MODULE=on
|
||||||
git:
|
git:
|
||||||
submodules: false # avoid cloning ethereum/tests
|
submodules: false # avoid cloning ethereum/tests
|
||||||
script:
|
script:
|
||||||
- go run build/ci.go purge -store gethstore/builds -days 14
|
- go run build/ci.go purge -store gethstore/builds -days 14
|
||||||
|
|
||||||
|
# This builder executes race tests
|
||||||
|
- stage: build
|
||||||
|
if: type = cron
|
||||||
|
os: linux
|
||||||
|
dist: bionic
|
||||||
|
go: 1.17.x
|
||||||
|
env:
|
||||||
|
- GO111MODULE=on
|
||||||
|
script:
|
||||||
|
- go run build/ci.go test -race -coverage $TEST_PACKAGES
|
||||||
|
|
||||||
|
59
COPYING
59
COPYING
@ -1,7 +1,7 @@
|
|||||||
GNU GENERAL PUBLIC LICENSE
|
GNU GENERAL PUBLIC LICENSE
|
||||||
Version 3, 29 June 2007
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
Copyright (C) 2014 The go-ethereum Authors.
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
of this license document, but changing it is not allowed.
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
@ -616,4 +616,59 @@ above cannot be given local legal effect according to their terms,
|
|||||||
reviewing courts shall apply local law that most closely approximates
|
reviewing courts shall apply local law that most closely approximates
|
||||||
an absolute waiver of all civil liability in connection with the
|
an absolute waiver of all civil liability in connection with the
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
copy of the Program in return for a fee.
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||||
|
22
Dockerfile
22
Dockerfile
@ -1,10 +1,15 @@
|
|||||||
# Build Geth in a stock Go builder container
|
# Support setting various labels on the final image
|
||||||
FROM golang:1.13-alpine as builder
|
ARG COMMIT=""
|
||||||
|
ARG VERSION=""
|
||||||
|
ARG BUILDNUM=""
|
||||||
|
|
||||||
RUN apk add --no-cache make gcc musl-dev linux-headers git
|
# Build Geth in a stock Go builder container
|
||||||
|
FROM golang:1.17-alpine as builder
|
||||||
|
|
||||||
|
RUN apk add --no-cache gcc musl-dev linux-headers git
|
||||||
|
|
||||||
ADD . /go-ethereum
|
ADD . /go-ethereum
|
||||||
RUN cd /go-ethereum && make geth
|
RUN cd /go-ethereum && go run build/ci.go install ./cmd/geth
|
||||||
|
|
||||||
# Pull Geth into a second stage deploy alpine container
|
# Pull Geth into a second stage deploy alpine container
|
||||||
FROM alpine:latest
|
FROM alpine:latest
|
||||||
@ -12,5 +17,12 @@ FROM alpine:latest
|
|||||||
RUN apk add --no-cache ca-certificates
|
RUN apk add --no-cache ca-certificates
|
||||||
COPY --from=builder /go-ethereum/build/bin/geth /usr/local/bin/
|
COPY --from=builder /go-ethereum/build/bin/geth /usr/local/bin/
|
||||||
|
|
||||||
EXPOSE 8545 8546 8547 30303 30303/udp
|
EXPOSE 8545 8546 30303 30303/udp
|
||||||
ENTRYPOINT ["geth"]
|
ENTRYPOINT ["geth"]
|
||||||
|
|
||||||
|
# Add some metadata labels to help programatic image consumption
|
||||||
|
ARG COMMIT=""
|
||||||
|
ARG VERSION=""
|
||||||
|
ARG BUILDNUM=""
|
||||||
|
|
||||||
|
LABEL commit="$COMMIT" version="$VERSION" buildnum="$BUILDNUM"
|
||||||
|
@ -1,10 +1,15 @@
|
|||||||
# Build Geth in a stock Go builder container
|
# Support setting various labels on the final image
|
||||||
FROM golang:1.13-alpine as builder
|
ARG COMMIT=""
|
||||||
|
ARG VERSION=""
|
||||||
|
ARG BUILDNUM=""
|
||||||
|
|
||||||
RUN apk add --no-cache make gcc musl-dev linux-headers git
|
# Build Geth in a stock Go builder container
|
||||||
|
FROM golang:1.17-alpine as builder
|
||||||
|
|
||||||
|
RUN apk add --no-cache gcc musl-dev linux-headers git
|
||||||
|
|
||||||
ADD . /go-ethereum
|
ADD . /go-ethereum
|
||||||
RUN cd /go-ethereum && make all
|
RUN cd /go-ethereum && go run build/ci.go install
|
||||||
|
|
||||||
# Pull all binaries into a second stage deploy alpine container
|
# Pull all binaries into a second stage deploy alpine container
|
||||||
FROM alpine:latest
|
FROM alpine:latest
|
||||||
@ -12,4 +17,11 @@ FROM alpine:latest
|
|||||||
RUN apk add --no-cache ca-certificates
|
RUN apk add --no-cache ca-certificates
|
||||||
COPY --from=builder /go-ethereum/build/bin/* /usr/local/bin/
|
COPY --from=builder /go-ethereum/build/bin/* /usr/local/bin/
|
||||||
|
|
||||||
EXPOSE 8545 8546 8547 30303 30303/udp
|
EXPOSE 8545 8546 30303 30303/udp
|
||||||
|
|
||||||
|
# Add some metadata labels to help programatic image consumption
|
||||||
|
ARG COMMIT=""
|
||||||
|
ARG VERSION=""
|
||||||
|
ARG BUILDNUM=""
|
||||||
|
|
||||||
|
LABEL commit="$COMMIT" version="$VERSION" buildnum="$BUILDNUM"
|
||||||
|
124
Makefile
124
Makefile
@ -2,144 +2,50 @@
|
|||||||
# with Go source code. If you know what GOPATH is then you probably
|
# with Go source code. If you know what GOPATH is then you probably
|
||||||
# don't need to bother with make.
|
# don't need to bother with make.
|
||||||
|
|
||||||
.PHONY: geth android ios geth-cross evm all test clean
|
.PHONY: geth android ios evm all test clean
|
||||||
.PHONY: geth-linux geth-linux-386 geth-linux-amd64 geth-linux-mips64 geth-linux-mips64le
|
|
||||||
.PHONY: geth-linux-arm geth-linux-arm-5 geth-linux-arm-6 geth-linux-arm-7 geth-linux-arm64
|
|
||||||
.PHONY: geth-darwin geth-darwin-386 geth-darwin-amd64
|
|
||||||
.PHONY: geth-windows geth-windows-386 geth-windows-amd64
|
|
||||||
|
|
||||||
GOBIN = ./build/bin
|
GOBIN = ./build/bin
|
||||||
GO ?= latest
|
GO ?= latest
|
||||||
|
GORUN = env GO111MODULE=on go run
|
||||||
|
|
||||||
geth:
|
geth:
|
||||||
build/env.sh go run build/ci.go install ./cmd/geth
|
$(GORUN) build/ci.go install ./cmd/geth
|
||||||
@echo "Done building."
|
@echo "Done building."
|
||||||
@echo "Run \"$(GOBIN)/geth\" to launch geth."
|
@echo "Run \"$(GOBIN)/geth\" to launch geth."
|
||||||
|
|
||||||
all:
|
all:
|
||||||
build/env.sh go run build/ci.go install
|
$(GORUN) build/ci.go install
|
||||||
|
|
||||||
android:
|
android:
|
||||||
build/env.sh go run build/ci.go aar --local
|
$(GORUN) build/ci.go aar --local
|
||||||
@echo "Done building."
|
@echo "Done building."
|
||||||
@echo "Import \"$(GOBIN)/geth.aar\" to use the library."
|
@echo "Import \"$(GOBIN)/geth.aar\" to use the library."
|
||||||
|
@echo "Import \"$(GOBIN)/geth-sources.jar\" to add javadocs"
|
||||||
|
@echo "For more info see https://stackoverflow.com/questions/20994336/android-studio-how-to-attach-javadoc"
|
||||||
|
|
||||||
ios:
|
ios:
|
||||||
build/env.sh go run build/ci.go xcode --local
|
$(GORUN) build/ci.go xcode --local
|
||||||
@echo "Done building."
|
@echo "Done building."
|
||||||
@echo "Import \"$(GOBIN)/Geth.framework\" to use the library."
|
@echo "Import \"$(GOBIN)/Geth.framework\" to use the library."
|
||||||
|
|
||||||
test: all
|
test: all
|
||||||
build/env.sh go run build/ci.go test
|
$(GORUN) build/ci.go test
|
||||||
|
|
||||||
lint: ## Run linters.
|
lint: ## Run linters.
|
||||||
build/env.sh go run build/ci.go lint
|
$(GORUN) build/ci.go lint
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
./build/clean_go_build_cache.sh
|
env GO111MODULE=on go clean -cache
|
||||||
rm -fr build/_workspace/pkg/ $(GOBIN)/*
|
rm -fr build/_workspace/pkg/ $(GOBIN)/*
|
||||||
|
|
||||||
# The devtools target installs tools required for 'go generate'.
|
# The devtools target installs tools required for 'go generate'.
|
||||||
# You need to put $GOBIN (or $GOPATH/bin) in your PATH to use 'go generate'.
|
# You need to put $GOBIN (or $GOPATH/bin) in your PATH to use 'go generate'.
|
||||||
|
|
||||||
devtools:
|
devtools:
|
||||||
env GOBIN= go get -u golang.org/x/tools/cmd/stringer
|
env GOBIN= go install golang.org/x/tools/cmd/stringer@latest
|
||||||
env GOBIN= go get -u github.com/kevinburke/go-bindata/go-bindata
|
env GOBIN= go install github.com/kevinburke/go-bindata/go-bindata@latest
|
||||||
env GOBIN= go get -u github.com/fjl/gencodec
|
env GOBIN= go install github.com/fjl/gencodec@latest
|
||||||
env GOBIN= go get -u github.com/golang/protobuf/protoc-gen-go
|
env GOBIN= go install github.com/golang/protobuf/protoc-gen-go@latest
|
||||||
env GOBIN= go install ./cmd/abigen
|
env GOBIN= go install ./cmd/abigen
|
||||||
@type "npm" 2> /dev/null || echo 'Please install node.js and npm'
|
|
||||||
@type "solc" 2> /dev/null || echo 'Please install solc'
|
@type "solc" 2> /dev/null || echo 'Please install solc'
|
||||||
@type "protoc" 2> /dev/null || echo 'Please install protoc'
|
@type "protoc" 2> /dev/null || echo 'Please install protoc'
|
||||||
|
|
||||||
# Cross Compilation Targets (xgo)
|
|
||||||
|
|
||||||
geth-cross: geth-linux geth-darwin geth-windows geth-android geth-ios
|
|
||||||
@echo "Full cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-*
|
|
||||||
|
|
||||||
geth-linux: geth-linux-386 geth-linux-amd64 geth-linux-arm geth-linux-mips64 geth-linux-mips64le
|
|
||||||
@echo "Linux cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-*
|
|
||||||
|
|
||||||
geth-linux-386:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/386 -v ./cmd/geth
|
|
||||||
@echo "Linux 386 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep 386
|
|
||||||
|
|
||||||
geth-linux-amd64:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/amd64 -v ./cmd/geth
|
|
||||||
@echo "Linux amd64 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep amd64
|
|
||||||
|
|
||||||
geth-linux-arm: geth-linux-arm-5 geth-linux-arm-6 geth-linux-arm-7 geth-linux-arm64
|
|
||||||
@echo "Linux ARM cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep arm
|
|
||||||
|
|
||||||
geth-linux-arm-5:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/arm-5 -v ./cmd/geth
|
|
||||||
@echo "Linux ARMv5 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep arm-5
|
|
||||||
|
|
||||||
geth-linux-arm-6:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/arm-6 -v ./cmd/geth
|
|
||||||
@echo "Linux ARMv6 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep arm-6
|
|
||||||
|
|
||||||
geth-linux-arm-7:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/arm-7 -v ./cmd/geth
|
|
||||||
@echo "Linux ARMv7 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep arm-7
|
|
||||||
|
|
||||||
geth-linux-arm64:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/arm64 -v ./cmd/geth
|
|
||||||
@echo "Linux ARM64 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep arm64
|
|
||||||
|
|
||||||
geth-linux-mips:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/mips --ldflags '-extldflags "-static"' -v ./cmd/geth
|
|
||||||
@echo "Linux MIPS cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep mips
|
|
||||||
|
|
||||||
geth-linux-mipsle:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/mipsle --ldflags '-extldflags "-static"' -v ./cmd/geth
|
|
||||||
@echo "Linux MIPSle cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep mipsle
|
|
||||||
|
|
||||||
geth-linux-mips64:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/mips64 --ldflags '-extldflags "-static"' -v ./cmd/geth
|
|
||||||
@echo "Linux MIPS64 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep mips64
|
|
||||||
|
|
||||||
geth-linux-mips64le:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=linux/mips64le --ldflags '-extldflags "-static"' -v ./cmd/geth
|
|
||||||
@echo "Linux MIPS64le cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-linux-* | grep mips64le
|
|
||||||
|
|
||||||
geth-darwin: geth-darwin-386 geth-darwin-amd64
|
|
||||||
@echo "Darwin cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-darwin-*
|
|
||||||
|
|
||||||
geth-darwin-386:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=darwin/386 -v ./cmd/geth
|
|
||||||
@echo "Darwin 386 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-darwin-* | grep 386
|
|
||||||
|
|
||||||
geth-darwin-amd64:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=darwin/amd64 -v ./cmd/geth
|
|
||||||
@echo "Darwin amd64 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-darwin-* | grep amd64
|
|
||||||
|
|
||||||
geth-windows: geth-windows-386 geth-windows-amd64
|
|
||||||
@echo "Windows cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-windows-*
|
|
||||||
|
|
||||||
geth-windows-386:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=windows/386 -v ./cmd/geth
|
|
||||||
@echo "Windows 386 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-windows-* | grep 386
|
|
||||||
|
|
||||||
geth-windows-amd64:
|
|
||||||
build/env.sh go run build/ci.go xgo -- --go=$(GO) --targets=windows/amd64 -v ./cmd/geth
|
|
||||||
@echo "Windows amd64 cross compilation done:"
|
|
||||||
@ls -ld $(GOBIN)/geth-windows-* | grep amd64
|
|
||||||
|
115
README.md
115
README.md
@ -4,9 +4,9 @@ Official Golang implementation of the Ethereum protocol.
|
|||||||
|
|
||||||
[](https://godoc.org/github.com/ethereum/go-ethereum)
|
)](https://pkg.go.dev/github.com/ethereum/go-ethereum?tab=doc)
|
||||||
[](https://goreportcard.com/report/github.com/ethereum/go-ethereum)
|
[](https://goreportcard.com/report/github.com/ethereum/go-ethereum)
|
||||||
[](https://travis-ci.org/ethereum/go-ethereum)
|
[](https://travis-ci.com/ethereum/go-ethereum)
|
||||||
[](https://discord.gg/nthXNEv)
|
[](https://discord.gg/nthXNEv)
|
||||||
|
|
||||||
Automated builds are available for stable releases and the unstable master branch. Binary
|
Automated builds are available for stable releases and the unstable master branch. Binary
|
||||||
@ -14,9 +14,9 @@ archives are published at https://geth.ethereum.org/downloads/.
|
|||||||
|
|
||||||
## Building the source
|
## Building the source
|
||||||
|
|
||||||
For prerequisites and detailed build instructions please read the [Installation Instructions](https://github.com/ethereum/go-ethereum/wiki/Building-Ethereum) on the wiki.
|
For prerequisites and detailed build instructions please read the [Installation Instructions](https://geth.ethereum.org/docs/install-and-build/installing-geth).
|
||||||
|
|
||||||
Building `geth` requires both a Go (version 1.10 or later) and a C compiler. You can install
|
Building `geth` requires both a Go (version 1.14 or later) and a C compiler. You can install
|
||||||
them using your favourite package manager. Once the dependencies are installed, run
|
them using your favourite package manager. Once the dependencies are installed, run
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@ -36,18 +36,19 @@ directory.
|
|||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
| :-----------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
| :-----------: | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| **`geth`** | Our main Ethereum CLI client. It is the entry point into the Ethereum network (main-, test- or private net), capable of running as a full node (default), archive node (retaining all historical state) or a light node (retrieving data live). It can be used by other processes as a gateway into the Ethereum network via JSON RPC endpoints exposed on top of HTTP, WebSocket and/or IPC transports. `geth --help` and the [CLI Wiki page](https://github.com/ethereum/go-ethereum/wiki/Command-Line-Options) for command line options. |
|
| **`geth`** | Our main Ethereum CLI client. It is the entry point into the Ethereum network (main-, test- or private net), capable of running as a full node (default), archive node (retaining all historical state) or a light node (retrieving data live). It can be used by other processes as a gateway into the Ethereum network via JSON RPC endpoints exposed on top of HTTP, WebSocket and/or IPC transports. `geth --help` and the [CLI page](https://geth.ethereum.org/docs/interface/command-line-options) for command line options. |
|
||||||
| `abigen` | Source code generator to convert Ethereum contract definitions into easy to use, compile-time type-safe Go packages. It operates on plain [Ethereum contract ABIs](https://github.com/ethereum/wiki/wiki/Ethereum-Contract-ABI) with expanded functionality if the contract bytecode is also available. However, it also accepts Solidity source files, making development much more streamlined. Please see our [Native DApps](https://github.com/ethereum/go-ethereum/wiki/Native-DApps:-Go-bindings-to-Ethereum-contracts) wiki page for details. |
|
| `clef` | Stand-alone signing tool, which can be used as a backend signer for `geth`. |
|
||||||
|
| `devp2p` | Utilities to interact with nodes on the networking layer, without running a full blockchain. |
|
||||||
|
| `abigen` | Source code generator to convert Ethereum contract definitions into easy to use, compile-time type-safe Go packages. It operates on plain [Ethereum contract ABIs](https://docs.soliditylang.org/en/develop/abi-spec.html) with expanded functionality if the contract bytecode is also available. However, it also accepts Solidity source files, making development much more streamlined. Please see our [Native DApps](https://geth.ethereum.org/docs/dapp/native-bindings) page for details. |
|
||||||
| `bootnode` | Stripped down version of our Ethereum client implementation that only takes part in the network node discovery protocol, but does not run any of the higher level application protocols. It can be used as a lightweight bootstrap node to aid in finding peers in private networks. |
|
| `bootnode` | Stripped down version of our Ethereum client implementation that only takes part in the network node discovery protocol, but does not run any of the higher level application protocols. It can be used as a lightweight bootstrap node to aid in finding peers in private networks. |
|
||||||
| `evm` | Developer utility version of the EVM (Ethereum Virtual Machine) that is capable of running bytecode snippets within a configurable environment and execution mode. Its purpose is to allow isolated, fine-grained debugging of EVM opcodes (e.g. `evm --code 60ff60ff --debug`). |
|
| `evm` | Developer utility version of the EVM (Ethereum Virtual Machine) that is capable of running bytecode snippets within a configurable environment and execution mode. Its purpose is to allow isolated, fine-grained debugging of EVM opcodes (e.g. `evm --code 60ff60ff --debug run`). |
|
||||||
| `gethrpctest` | Developer utility tool to support our [ethereum/rpc-test](https://github.com/ethereum/rpc-tests) test suite which validates baseline conformity to the [Ethereum JSON RPC](https://github.com/ethereum/wiki/wiki/JSON-RPC) specs. Please see the [test suite's readme](https://github.com/ethereum/rpc-tests/blob/master/README.md) for details. |
|
| `rlpdump` | Developer utility tool to convert binary RLP ([Recursive Length Prefix](https://eth.wiki/en/fundamentals/rlp)) dumps (data encoding used by the Ethereum protocol both network as well as consensus wise) to user-friendlier hierarchical representation (e.g. `rlpdump --hex CE0183FFFFFFC4C304050583616263`). |
|
||||||
| `rlpdump` | Developer utility tool to convert binary RLP ([Recursive Length Prefix](https://github.com/ethereum/wiki/wiki/RLP)) dumps (data encoding used by the Ethereum protocol both network as well as consensus wise) to user-friendlier hierarchical representation (e.g. `rlpdump --hex CE0183FFFFFFC4C304050583616263`). |
|
|
||||||
| `puppeth` | a CLI wizard that aids in creating a new Ethereum network. |
|
| `puppeth` | a CLI wizard that aids in creating a new Ethereum network. |
|
||||||
|
|
||||||
## Running `geth`
|
## Running `geth`
|
||||||
|
|
||||||
Going through all the possible command line flags is out of scope here (please consult our
|
Going through all the possible command line flags is out of scope here (please consult our
|
||||||
[CLI Wiki page](https://github.com/ethereum/go-ethereum/wiki/Command-Line-Options)),
|
[CLI Wiki page](https://geth.ethereum.org/docs/interface/command-line-options)),
|
||||||
but we've enumerated a few common parameter combos to get you up to speed quickly
|
but we've enumerated a few common parameter combos to get you up to speed quickly
|
||||||
on how you can run your own `geth` instance.
|
on how you can run your own `geth` instance.
|
||||||
|
|
||||||
@ -63,16 +64,17 @@ $ geth console
|
|||||||
```
|
```
|
||||||
|
|
||||||
This command will:
|
This command will:
|
||||||
* Start `geth` in fast sync mode (default, can be changed with the `--syncmode` flag),
|
* Start `geth` in snap sync mode (default, can be changed with the `--syncmode` flag),
|
||||||
causing it to download more data in exchange for avoiding processing the entire history
|
causing it to download more data in exchange for avoiding processing the entire history
|
||||||
of the Ethereum network, which is very CPU intensive.
|
of the Ethereum network, which is very CPU intensive.
|
||||||
* Start up `geth`'s built-in interactive [JavaScript console](https://github.com/ethereum/go-ethereum/wiki/JavaScript-Console),
|
* Start up `geth`'s built-in interactive [JavaScript console](https://geth.ethereum.org/docs/interface/javascript-console),
|
||||||
(via the trailing `console` subcommand) through which you can invoke all official [`web3` methods](https://github.com/ethereum/wiki/wiki/JavaScript-API)
|
(via the trailing `console` subcommand) through which you can interact using [`web3` methods](https://web3js.readthedocs.io/)
|
||||||
as well as `geth`'s own [management APIs](https://github.com/ethereum/go-ethereum/wiki/Management-APIs).
|
(note: the `web3` version bundled within `geth` is very old, and not up to date with official docs),
|
||||||
|
as well as `geth`'s own [management APIs](https://geth.ethereum.org/docs/rpc/server).
|
||||||
This tool is optional and if you leave it out you can always attach to an already running
|
This tool is optional and if you leave it out you can always attach to an already running
|
||||||
`geth` instance with `geth attach`.
|
`geth` instance with `geth attach`.
|
||||||
|
|
||||||
### A Full node on the Ethereum test network
|
### A Full node on the Görli test network
|
||||||
|
|
||||||
Transitioning towards developers, if you'd like to play around with creating Ethereum
|
Transitioning towards developers, if you'd like to play around with creating Ethereum
|
||||||
contracts, you almost certainly would like to do that without any real money involved until
|
contracts, you almost certainly would like to do that without any real money involved until
|
||||||
@ -81,23 +83,24 @@ network, you want to join the **test** network with your node, which is fully eq
|
|||||||
the main network, but with play-Ether only.
|
the main network, but with play-Ether only.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ geth --testnet console
|
$ geth --goerli console
|
||||||
```
|
```
|
||||||
|
|
||||||
The `console` subcommand has the exact same meaning as above and they are equally
|
The `console` subcommand has the exact same meaning as above and they are equally
|
||||||
useful on the testnet too. Please see above for their explanations if you've skipped here.
|
useful on the testnet too. Please, see above for their explanations if you've skipped here.
|
||||||
|
|
||||||
Specifying the `--testnet` flag, however, will reconfigure your `geth` instance a bit:
|
Specifying the `--goerli` flag, however, will reconfigure your `geth` instance a bit:
|
||||||
|
|
||||||
|
* Instead of connecting the main Ethereum network, the client will connect to the Görli
|
||||||
|
test network, which uses different P2P bootnodes, different network IDs and genesis
|
||||||
|
states.
|
||||||
* Instead of using the default data directory (`~/.ethereum` on Linux for example), `geth`
|
* Instead of using the default data directory (`~/.ethereum` on Linux for example), `geth`
|
||||||
will nest itself one level deeper into a `testnet` subfolder (`~/.ethereum/testnet` on
|
will nest itself one level deeper into a `goerli` subfolder (`~/.ethereum/goerli` on
|
||||||
Linux). Note, on OSX and Linux this also means that attaching to a running testnet node
|
Linux). Note, on OSX and Linux this also means that attaching to a running testnet node
|
||||||
requires the use of a custom endpoint since `geth attach` will try to attach to a
|
requires the use of a custom endpoint since `geth attach` will try to attach to a
|
||||||
production node endpoint by default. E.g.
|
production node endpoint by default, e.g.,
|
||||||
`geth attach <datadir>/testnet/geth.ipc`. Windows users are not affected by
|
`geth attach <datadir>/goerli/geth.ipc`. Windows users are not affected by
|
||||||
this.
|
this.
|
||||||
* Instead of connecting the main Ethereum network, the client will connect to the test
|
|
||||||
network, which uses different P2P bootnodes, different network IDs and genesis states.
|
|
||||||
|
|
||||||
*Note: Although there are some internal protective measures to prevent transactions from
|
*Note: Although there are some internal protective measures to prevent transactions from
|
||||||
crossing over between the main network and test network, you should make sure to always
|
crossing over between the main network and test network, you should make sure to always
|
||||||
@ -107,17 +110,26 @@ accounts available between them.*
|
|||||||
|
|
||||||
### Full node on the Rinkeby test network
|
### Full node on the Rinkeby test network
|
||||||
|
|
||||||
The above test network is a cross-client one based on the ethash proof-of-work consensus
|
Go Ethereum also supports connecting to the older proof-of-authority based test network
|
||||||
algorithm. As such, it has certain extra overhead and is more susceptible to reorganization
|
called [*Rinkeby*](https://www.rinkeby.io) which is operated by members of the community.
|
||||||
attacks due to the network's low difficulty/security. Go Ethereum also supports connecting
|
|
||||||
to a proof-of-authority based test network called [*Rinkeby*](https://www.rinkeby.io)
|
|
||||||
(operated by members of the community). This network is lighter, more secure, but is only
|
|
||||||
supported by go-ethereum.
|
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ geth --rinkeby console
|
$ geth --rinkeby console
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Full node on the Ropsten test network
|
||||||
|
|
||||||
|
In addition to Görli and Rinkeby, Geth also supports the ancient Ropsten testnet. The
|
||||||
|
Ropsten test network is based on the Ethash proof-of-work consensus algorithm. As such,
|
||||||
|
it has certain extra overhead and is more susceptible to reorganization attacks due to the
|
||||||
|
network's low difficulty/security.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ geth --ropsten console
|
||||||
|
```
|
||||||
|
|
||||||
|
*Note: Older Geth configurations store the Ropsten database in the `testnet` subdirectory.*
|
||||||
|
|
||||||
### Configuration
|
### Configuration
|
||||||
|
|
||||||
As an alternative to passing the numerous flags to the `geth` binary, you can also pass a
|
As an alternative to passing the numerous flags to the `geth` binary, you can also pass a
|
||||||
@ -152,7 +164,7 @@ above command does. It will also create a persistent volume in your home direct
|
|||||||
saving your blockchain as well as map the default ports. There is also an `alpine` tag
|
saving your blockchain as well as map the default ports. There is also an `alpine` tag
|
||||||
available for a slim version of the image.
|
available for a slim version of the image.
|
||||||
|
|
||||||
Do not forget `--rpcaddr 0.0.0.0`, if you want to access RPC from other containers
|
Do not forget `--http.addr 0.0.0.0`, if you want to access RPC from other containers
|
||||||
and/or hosts. By default, `geth` binds to the local interface and RPC endpoints is not
|
and/or hosts. By default, `geth` binds to the local interface and RPC endpoints is not
|
||||||
accessible from the outside.
|
accessible from the outside.
|
||||||
|
|
||||||
@ -160,8 +172,8 @@ accessible from the outside.
|
|||||||
|
|
||||||
As a developer, sooner rather than later you'll want to start interacting with `geth` and the
|
As a developer, sooner rather than later you'll want to start interacting with `geth` and the
|
||||||
Ethereum network via your own programs and not manually through the console. To aid
|
Ethereum network via your own programs and not manually through the console. To aid
|
||||||
this, `geth` has built-in support for a JSON-RPC based APIs ([standard APIs](https://github.com/ethereum/wiki/wiki/JSON-RPC)
|
this, `geth` has built-in support for a JSON-RPC based APIs ([standard APIs](https://eth.wiki/json-rpc/API)
|
||||||
and [`geth` specific APIs](https://github.com/ethereum/go-ethereum/wiki/Management-APIs)).
|
and [`geth` specific APIs](https://geth.ethereum.org/docs/rpc/server)).
|
||||||
These can be exposed via HTTP, WebSockets and IPC (UNIX sockets on UNIX based
|
These can be exposed via HTTP, WebSockets and IPC (UNIX sockets on UNIX based
|
||||||
platforms, and named pipes on Windows).
|
platforms, and named pipes on Windows).
|
||||||
|
|
||||||
@ -172,16 +184,16 @@ you'd expect.
|
|||||||
|
|
||||||
HTTP based JSON-RPC API options:
|
HTTP based JSON-RPC API options:
|
||||||
|
|
||||||
* `--rpc` Enable the HTTP-RPC server
|
* `--http` Enable the HTTP-RPC server
|
||||||
* `--rpcaddr` HTTP-RPC server listening interface (default: `localhost`)
|
* `--http.addr` HTTP-RPC server listening interface (default: `localhost`)
|
||||||
* `--rpcport` HTTP-RPC server listening port (default: `8545`)
|
* `--http.port` HTTP-RPC server listening port (default: `8545`)
|
||||||
* `--rpcapi` API's offered over the HTTP-RPC interface (default: `eth,net,web3`)
|
* `--http.api` API's offered over the HTTP-RPC interface (default: `eth,net,web3`)
|
||||||
* `--rpccorsdomain` Comma separated list of domains from which to accept cross origin requests (browser enforced)
|
* `--http.corsdomain` Comma separated list of domains from which to accept cross origin requests (browser enforced)
|
||||||
* `--ws` Enable the WS-RPC server
|
* `--ws` Enable the WS-RPC server
|
||||||
* `--wsaddr` WS-RPC server listening interface (default: `localhost`)
|
* `--ws.addr` WS-RPC server listening interface (default: `localhost`)
|
||||||
* `--wsport` WS-RPC server listening port (default: `8546`)
|
* `--ws.port` WS-RPC server listening port (default: `8546`)
|
||||||
* `--wsapi` API's offered over the WS-RPC interface (default: `eth,net,web3`)
|
* `--ws.api` API's offered over the WS-RPC interface (default: `eth,net,web3`)
|
||||||
* `--wsorigins` Origins from which to accept websockets requests
|
* `--ws.origins` Origins from which to accept websockets requests
|
||||||
* `--ipcdisable` Disable the IPC-RPC server
|
* `--ipcdisable` Disable the IPC-RPC server
|
||||||
* `--ipcapi` API's offered over the IPC-RPC interface (default: `admin,debug,eth,miner,net,personal,shh,txpool,web3`)
|
* `--ipcapi` API's offered over the IPC-RPC interface (default: `admin,debug,eth,miner,net,personal,shh,txpool,web3`)
|
||||||
* `--ipcpath` Filename for IPC socket/pipe within the datadir (explicit paths escape it)
|
* `--ipcpath` Filename for IPC socket/pipe within the datadir (explicit paths escape it)
|
||||||
@ -217,7 +229,10 @@ aware of and agree upon. This consists of a small JSON file (e.g. call it `genes
|
|||||||
"eip158Block": 0,
|
"eip158Block": 0,
|
||||||
"byzantiumBlock": 0,
|
"byzantiumBlock": 0,
|
||||||
"constantinopleBlock": 0,
|
"constantinopleBlock": 0,
|
||||||
"petersburgBlock": 0
|
"petersburgBlock": 0,
|
||||||
|
"istanbulBlock": 0,
|
||||||
|
"berlinBlock": 0,
|
||||||
|
"londonBlock": 0
|
||||||
},
|
},
|
||||||
"alloc": {},
|
"alloc": {},
|
||||||
"coinbase": "0x0000000000000000000000000000000000000000",
|
"coinbase": "0x0000000000000000000000000000000000000000",
|
||||||
@ -266,7 +281,7 @@ $ bootnode --genkey=boot.key
|
|||||||
$ bootnode --nodekey=boot.key
|
$ bootnode --nodekey=boot.key
|
||||||
```
|
```
|
||||||
|
|
||||||
With the bootnode online, it will display an [`enode` URL](https://github.com/ethereum/wiki/wiki/enode-url-format)
|
With the bootnode online, it will display an [`enode` URL](https://eth.wiki/en/fundamentals/enode-url-format)
|
||||||
that other nodes can use to connect to it and exchange peer information. Make sure to
|
that other nodes can use to connect to it and exchange peer information. Make sure to
|
||||||
replace the displayed IP address information (most probably `[::]`) with your externally
|
replace the displayed IP address information (most probably `[::]`) with your externally
|
||||||
accessible IP to get the actual `enode` URL.
|
accessible IP to get the actual `enode` URL.
|
||||||
@ -294,7 +309,7 @@ also need to configure a miner to process transactions and create new blocks for
|
|||||||
Mining on the public Ethereum network is a complex task as it's only feasible using GPUs,
|
Mining on the public Ethereum network is a complex task as it's only feasible using GPUs,
|
||||||
requiring an OpenCL or CUDA enabled `ethminer` instance. For information on such a
|
requiring an OpenCL or CUDA enabled `ethminer` instance. For information on such a
|
||||||
setup, please consult the [EtherMining subreddit](https://www.reddit.com/r/EtherMining/)
|
setup, please consult the [EtherMining subreddit](https://www.reddit.com/r/EtherMining/)
|
||||||
and the [Genoil miner](https://github.com/Genoil/cpp-ethereum) repository.
|
and the [ethminer](https://github.com/ethereum-mining/ethminer) repository.
|
||||||
|
|
||||||
In a private network setting, however a single CPU miner instance is more than enough for
|
In a private network setting, however a single CPU miner instance is more than enough for
|
||||||
practical purposes as it can produce a stable stream of blocks at the correct intervals
|
practical purposes as it can produce a stable stream of blocks at the correct intervals
|
||||||
@ -303,13 +318,13 @@ ones either). To start a `geth` instance for mining, run it with all your usual
|
|||||||
by:
|
by:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ geth <usual-flags> --mine --miner.threads=1 --etherbase=0x0000000000000000000000000000000000000000
|
$ geth <usual-flags> --mine --miner.threads=1 --miner.etherbase=0x0000000000000000000000000000000000000000
|
||||||
```
|
```
|
||||||
|
|
||||||
Which will start mining blocks and transactions on a single CPU thread, crediting all
|
Which will start mining blocks and transactions on a single CPU thread, crediting all
|
||||||
proceedings to the account specified by `--etherbase`. You can further tune the mining
|
proceedings to the account specified by `--miner.etherbase`. You can further tune the mining
|
||||||
by changing the default gas limit blocks converge to (`--targetgaslimit`) and the price
|
by changing the default gas limit blocks converge to (`--miner.targetgaslimit`) and the price
|
||||||
transactions are accepted at (`--gasprice`).
|
transactions are accepted at (`--miner.gasprice`).
|
||||||
|
|
||||||
## Contribution
|
## Contribution
|
||||||
|
|
||||||
@ -318,7 +333,7 @@ from anyone on the internet, and are grateful for even the smallest of fixes!
|
|||||||
|
|
||||||
If you'd like to contribute to go-ethereum, please fork, fix, commit and send a pull request
|
If you'd like to contribute to go-ethereum, please fork, fix, commit and send a pull request
|
||||||
for the maintainers to review and merge into the main code base. If you wish to submit
|
for the maintainers to review and merge into the main code base. If you wish to submit
|
||||||
more complex changes though, please check up with the core devs first on [our gitter channel](https://gitter.im/ethereum/go-ethereum)
|
more complex changes though, please check up with the core devs first on [our Discord Server](https://discord.gg/invite/nthXNEv)
|
||||||
to ensure those changes are in line with the general philosophy of the project and/or get
|
to ensure those changes are in line with the general philosophy of the project and/or get
|
||||||
some early feedback which can make both your efforts much lighter as well as our review
|
some early feedback which can make both your efforts much lighter as well as our review
|
||||||
and merge procedures quick and simple.
|
and merge procedures quick and simple.
|
||||||
@ -333,7 +348,7 @@ Please make sure your contributions adhere to our coding guidelines:
|
|||||||
* Commit messages should be prefixed with the package(s) they modify.
|
* Commit messages should be prefixed with the package(s) they modify.
|
||||||
* E.g. "eth, rpc: make trace configs optional"
|
* E.g. "eth, rpc: make trace configs optional"
|
||||||
|
|
||||||
Please see the [Developers' Guide](https://github.com/ethereum/go-ethereum/wiki/Developers'-Guide)
|
Please see the [Developers' Guide](https://geth.ethereum.org/docs/developers/devguide)
|
||||||
for more details on configuring your environment, managing project dependencies, and
|
for more details on configuring your environment, managing project dependencies, and
|
||||||
testing procedures.
|
testing procedures.
|
||||||
|
|
||||||
|
12
SECURITY.md
12
SECURITY.md
@ -2,31 +2,31 @@
|
|||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
Please see Releases. We recommend to use the most recent released version.
|
Please see [Releases](https://github.com/ethereum/go-ethereum/releases). We recommend using the [most recently released version](https://github.com/ethereum/go-ethereum/releases/latest).
|
||||||
|
|
||||||
## Audit reports
|
## Audit reports
|
||||||
|
|
||||||
Audit reports are published in the `docs` folder: https://github.com/ethereum/go-ethereum/tree/master/docs/audits
|
Audit reports are published in the `docs` folder: https://github.com/ethereum/go-ethereum/tree/master/docs/audits
|
||||||
|
|
||||||
|
|
||||||
| Scope | Date | Report Link |
|
| Scope | Date | Report Link |
|
||||||
| ------- | ------- | ----------- |
|
| ------- | ------- | ----------- |
|
||||||
| `geth` | 20170425 | [pdf](https://github.com/ethereum/go-ethereum/blob/master/docs/audits/2017-04-25_Geth-audit_Truesec.pdf) |
|
| `geth` | 20170425 | [pdf](https://github.com/ethereum/go-ethereum/blob/master/docs/audits/2017-04-25_Geth-audit_Truesec.pdf) |
|
||||||
| `clef` | 20180914 | [pdf](https://github.com/ethereum/go-ethereum/blob/master/docs/audits/2018-09-14_Clef-audit_NCC.pdf) |
|
| `clef` | 20180914 | [pdf](https://github.com/ethereum/go-ethereum/blob/master/docs/audits/2018-09-14_Clef-audit_NCC.pdf) |
|
||||||
|
| `Discv5` | 20191015 | [pdf](https://github.com/ethereum/go-ethereum/blob/master/docs/audits/2019-10-15_Discv5_audit_LeastAuthority.pdf) |
|
||||||
|
| `Discv5` | 20200124 | [pdf](https://github.com/ethereum/go-ethereum/blob/master/docs/audits/2020-01-24_DiscV5_audit_Cure53.pdf) |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
**Please do not file a public ticket** mentioning the vulnerability.
|
**Please do not file a public ticket** mentioning the vulnerability.
|
||||||
|
|
||||||
To find out how to disclose a vulnerability in Ethereum visit [https://bounty.ethereum.org](https://bounty.ethereum.org) or email bounty@ethereum.org.
|
To find out how to disclose a vulnerability in Ethereum visit [https://bounty.ethereum.org](https://bounty.ethereum.org) or email bounty@ethereum.org. Please read the [disclosure page](https://github.com/ethereum/go-ethereum/security/advisories?state=published) for more information about publically disclosed security vulnerabilities.
|
||||||
|
|
||||||
|
Use the built-in `geth version-check` feature to check whether the software is affected by any known vulnerability. This command will fetch the latest [`vulnerabilities.json`](https://geth.ethereum.org/docs/vulnerabilities/vulnerabilities.json) file which contains known security vulnerabilities concerning `geth`, and cross-check the data against its own version number.
|
||||||
|
|
||||||
The following key may be used to communicate sensitive information to developers.
|
The following key may be used to communicate sensitive information to developers.
|
||||||
|
|
||||||
Fingerprint: `AE96 ED96 9E47 9B00 84F3 E17F E88D 3334 FA5F 6A0A`
|
Fingerprint: `AE96 ED96 9E47 9B00 84F3 E17F E88D 3334 FA5F 6A0A`
|
||||||
|
|
||||||
|
|
||||||
```
|
```
|
||||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
Version: GnuPG v1
|
Version: GnuPG v1
|
||||||
|
@ -19,10 +19,12 @@ package abi
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
)
|
)
|
||||||
|
|
||||||
// The ABI holds information about a contract's context and available
|
// The ABI holds information about a contract's context and available
|
||||||
@ -32,6 +34,13 @@ type ABI struct {
|
|||||||
Constructor Method
|
Constructor Method
|
||||||
Methods map[string]Method
|
Methods map[string]Method
|
||||||
Events map[string]Event
|
Events map[string]Event
|
||||||
|
Errors map[string]Error
|
||||||
|
|
||||||
|
// Additional "special" functions introduced in solidity v0.6.0.
|
||||||
|
// It's separated from the original default fallback. Each contract
|
||||||
|
// can only define one fallback and receive function.
|
||||||
|
Fallback Method // Note it's also used to represent legacy fallback before v0.6.0
|
||||||
|
Receive Method
|
||||||
}
|
}
|
||||||
|
|
||||||
// JSON returns a parsed ABI interface and error if it failed.
|
// JSON returns a parsed ABI interface and error if it failed.
|
||||||
@ -42,7 +51,6 @@ func JSON(reader io.Reader) (ABI, error) {
|
|||||||
if err := dec.Decode(&abi); err != nil {
|
if err := dec.Decode(&abi); err != nil {
|
||||||
return ABI{}, err
|
return ABI{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return abi, nil
|
return abi, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,104 +78,131 @@ func (abi ABI) Pack(name string, args ...interface{}) ([]byte, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// Pack up the method ID too if not a constructor and return
|
// Pack up the method ID too if not a constructor and return
|
||||||
return append(method.ID(), arguments...), nil
|
return append(method.ID, arguments...), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unpack output in v according to the abi specification
|
func (abi ABI) getArguments(name string, data []byte) (Arguments, error) {
|
||||||
func (abi ABI) Unpack(v interface{}, name string, data []byte) (err error) {
|
|
||||||
// since there can't be naming collisions with contracts and events,
|
// since there can't be naming collisions with contracts and events,
|
||||||
// we need to decide whether we're calling a method or an event
|
// we need to decide whether we're calling a method or an event
|
||||||
|
var args Arguments
|
||||||
if method, ok := abi.Methods[name]; ok {
|
if method, ok := abi.Methods[name]; ok {
|
||||||
if len(data)%32 != 0 {
|
if len(data)%32 != 0 {
|
||||||
return fmt.Errorf("abi: improperly formatted output: %s - Bytes: [%+v]", string(data), data)
|
return nil, fmt.Errorf("abi: improperly formatted output: %s - Bytes: [%+v]", string(data), data)
|
||||||
}
|
}
|
||||||
return method.Outputs.Unpack(v, data)
|
args = method.Outputs
|
||||||
}
|
}
|
||||||
if event, ok := abi.Events[name]; ok {
|
if event, ok := abi.Events[name]; ok {
|
||||||
return event.Inputs.Unpack(v, data)
|
args = event.Inputs
|
||||||
}
|
}
|
||||||
return fmt.Errorf("abi: could not locate named method or event")
|
if args == nil {
|
||||||
|
return nil, errors.New("abi: could not locate named method or event")
|
||||||
|
}
|
||||||
|
return args, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnpackIntoMap unpacks a log into the provided map[string]interface{}
|
// Unpack unpacks the output according to the abi specification.
|
||||||
|
func (abi ABI) Unpack(name string, data []byte) ([]interface{}, error) {
|
||||||
|
args, err := abi.getArguments(name, data)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return args.Unpack(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnpackIntoInterface unpacks the output in v according to the abi specification.
|
||||||
|
// It performs an additional copy. Please only use, if you want to unpack into a
|
||||||
|
// structure that does not strictly conform to the abi structure (e.g. has additional arguments)
|
||||||
|
func (abi ABI) UnpackIntoInterface(v interface{}, name string, data []byte) error {
|
||||||
|
args, err := abi.getArguments(name, data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
unpacked, err := args.Unpack(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return args.Copy(v, unpacked)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnpackIntoMap unpacks a log into the provided map[string]interface{}.
|
||||||
func (abi ABI) UnpackIntoMap(v map[string]interface{}, name string, data []byte) (err error) {
|
func (abi ABI) UnpackIntoMap(v map[string]interface{}, name string, data []byte) (err error) {
|
||||||
// since there can't be naming collisions with contracts and events,
|
args, err := abi.getArguments(name, data)
|
||||||
// we need to decide whether we're calling a method or an event
|
if err != nil {
|
||||||
if method, ok := abi.Methods[name]; ok {
|
return err
|
||||||
if len(data)%32 != 0 {
|
|
||||||
return fmt.Errorf("abi: improperly formatted output")
|
|
||||||
}
|
|
||||||
return method.Outputs.UnpackIntoMap(v, data)
|
|
||||||
}
|
}
|
||||||
if event, ok := abi.Events[name]; ok {
|
return args.UnpackIntoMap(v, data)
|
||||||
return event.Inputs.UnpackIntoMap(v, data)
|
|
||||||
}
|
|
||||||
return fmt.Errorf("abi: could not locate named method or event")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnmarshalJSON implements json.Unmarshaler interface
|
// UnmarshalJSON implements json.Unmarshaler interface.
|
||||||
func (abi *ABI) UnmarshalJSON(data []byte) error {
|
func (abi *ABI) UnmarshalJSON(data []byte) error {
|
||||||
var fields []struct {
|
var fields []struct {
|
||||||
Type string
|
Type string
|
||||||
Name string
|
Name string
|
||||||
Constant bool
|
Inputs []Argument
|
||||||
|
Outputs []Argument
|
||||||
|
|
||||||
|
// Status indicator which can be: "pure", "view",
|
||||||
|
// "nonpayable" or "payable".
|
||||||
|
StateMutability string
|
||||||
|
|
||||||
|
// Deprecated Status indicators, but removed in v0.6.0.
|
||||||
|
Constant bool // True if function is either pure or view
|
||||||
|
Payable bool // True if function is payable
|
||||||
|
|
||||||
|
// Event relevant indicator represents the event is
|
||||||
|
// declared as anonymous.
|
||||||
Anonymous bool
|
Anonymous bool
|
||||||
Inputs []Argument
|
|
||||||
Outputs []Argument
|
|
||||||
}
|
}
|
||||||
if err := json.Unmarshal(data, &fields); err != nil {
|
if err := json.Unmarshal(data, &fields); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
abi.Methods = make(map[string]Method)
|
abi.Methods = make(map[string]Method)
|
||||||
abi.Events = make(map[string]Event)
|
abi.Events = make(map[string]Event)
|
||||||
|
abi.Errors = make(map[string]Error)
|
||||||
for _, field := range fields {
|
for _, field := range fields {
|
||||||
switch field.Type {
|
switch field.Type {
|
||||||
case "constructor":
|
case "constructor":
|
||||||
abi.Constructor = Method{
|
abi.Constructor = NewMethod("", "", Constructor, field.StateMutability, field.Constant, field.Payable, field.Inputs, nil)
|
||||||
Inputs: field.Inputs,
|
case "function":
|
||||||
|
name := overloadedName(field.Name, func(s string) bool { _, ok := abi.Methods[s]; return ok })
|
||||||
|
abi.Methods[name] = NewMethod(name, field.Name, Function, field.StateMutability, field.Constant, field.Payable, field.Inputs, field.Outputs)
|
||||||
|
case "fallback":
|
||||||
|
// New introduced function type in v0.6.0, check more detail
|
||||||
|
// here https://solidity.readthedocs.io/en/v0.6.0/contracts.html#fallback-function
|
||||||
|
if abi.HasFallback() {
|
||||||
|
return errors.New("only single fallback is allowed")
|
||||||
}
|
}
|
||||||
// empty defaults to function according to the abi spec
|
abi.Fallback = NewMethod("", "", Fallback, field.StateMutability, field.Constant, field.Payable, nil, nil)
|
||||||
case "function", "":
|
case "receive":
|
||||||
name := field.Name
|
// New introduced function type in v0.6.0, check more detail
|
||||||
_, ok := abi.Methods[name]
|
// here https://solidity.readthedocs.io/en/v0.6.0/contracts.html#fallback-function
|
||||||
for idx := 0; ok; idx++ {
|
if abi.HasReceive() {
|
||||||
name = fmt.Sprintf("%s%d", field.Name, idx)
|
return errors.New("only single receive is allowed")
|
||||||
_, ok = abi.Methods[name]
|
|
||||||
}
|
}
|
||||||
abi.Methods[name] = Method{
|
if field.StateMutability != "payable" {
|
||||||
Name: name,
|
return errors.New("the statemutability of receive can only be payable")
|
||||||
RawName: field.Name,
|
|
||||||
Const: field.Constant,
|
|
||||||
Inputs: field.Inputs,
|
|
||||||
Outputs: field.Outputs,
|
|
||||||
}
|
}
|
||||||
|
abi.Receive = NewMethod("", "", Receive, field.StateMutability, field.Constant, field.Payable, nil, nil)
|
||||||
case "event":
|
case "event":
|
||||||
name := field.Name
|
name := overloadedName(field.Name, func(s string) bool { _, ok := abi.Events[s]; return ok })
|
||||||
_, ok := abi.Events[name]
|
abi.Events[name] = NewEvent(name, field.Name, field.Anonymous, field.Inputs)
|
||||||
for idx := 0; ok; idx++ {
|
case "error":
|
||||||
name = fmt.Sprintf("%s%d", field.Name, idx)
|
abi.Errors[field.Name] = NewError(field.Name, field.Inputs)
|
||||||
_, ok = abi.Events[name]
|
default:
|
||||||
}
|
return fmt.Errorf("abi: could not recognize type %v of field %v", field.Type, field.Name)
|
||||||
abi.Events[name] = Event{
|
|
||||||
Name: name,
|
|
||||||
RawName: field.Name,
|
|
||||||
Anonymous: field.Anonymous,
|
|
||||||
Inputs: field.Inputs,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// MethodById looks up a method by the 4-byte id
|
// MethodById looks up a method by the 4-byte id,
|
||||||
// returns nil if none found
|
// returns nil if none found.
|
||||||
func (abi *ABI) MethodById(sigdata []byte) (*Method, error) {
|
func (abi *ABI) MethodById(sigdata []byte) (*Method, error) {
|
||||||
if len(sigdata) < 4 {
|
if len(sigdata) < 4 {
|
||||||
return nil, fmt.Errorf("data too short (%d bytes) for abi method lookup", len(sigdata))
|
return nil, fmt.Errorf("data too short (%d bytes) for abi method lookup", len(sigdata))
|
||||||
}
|
}
|
||||||
for _, method := range abi.Methods {
|
for _, method := range abi.Methods {
|
||||||
if bytes.Equal(method.ID(), sigdata[:4]) {
|
if bytes.Equal(method.ID, sigdata[:4]) {
|
||||||
return &method, nil
|
return &method, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -178,9 +213,58 @@ func (abi *ABI) MethodById(sigdata []byte) (*Method, error) {
|
|||||||
// ABI and returns nil if none found.
|
// ABI and returns nil if none found.
|
||||||
func (abi *ABI) EventByID(topic common.Hash) (*Event, error) {
|
func (abi *ABI) EventByID(topic common.Hash) (*Event, error) {
|
||||||
for _, event := range abi.Events {
|
for _, event := range abi.Events {
|
||||||
if bytes.Equal(event.ID().Bytes(), topic.Bytes()) {
|
if bytes.Equal(event.ID.Bytes(), topic.Bytes()) {
|
||||||
return &event, nil
|
return &event, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("no event with id: %#x", topic.Hex())
|
return nil, fmt.Errorf("no event with id: %#x", topic.Hex())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HasFallback returns an indicator whether a fallback function is included.
|
||||||
|
func (abi *ABI) HasFallback() bool {
|
||||||
|
return abi.Fallback.Type == Fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasReceive returns an indicator whether a receive function is included.
|
||||||
|
func (abi *ABI) HasReceive() bool {
|
||||||
|
return abi.Receive.Type == Receive
|
||||||
|
}
|
||||||
|
|
||||||
|
// revertSelector is a special function selector for revert reason unpacking.
|
||||||
|
var revertSelector = crypto.Keccak256([]byte("Error(string)"))[:4]
|
||||||
|
|
||||||
|
// UnpackRevert resolves the abi-encoded revert reason. According to the solidity
|
||||||
|
// spec https://solidity.readthedocs.io/en/latest/control-structures.html#revert,
|
||||||
|
// the provided revert reason is abi-encoded as if it were a call to a function
|
||||||
|
// `Error(string)`. So it's a special tool for it.
|
||||||
|
func UnpackRevert(data []byte) (string, error) {
|
||||||
|
if len(data) < 4 {
|
||||||
|
return "", errors.New("invalid data for unpacking")
|
||||||
|
}
|
||||||
|
if !bytes.Equal(data[:4], revertSelector) {
|
||||||
|
return "", errors.New("invalid data for unpacking")
|
||||||
|
}
|
||||||
|
typ, _ := NewType("string", "", nil)
|
||||||
|
unpacked, err := (Arguments{{Type: typ}}).Unpack(data[4:])
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return unpacked[0].(string), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// overloadedName returns the next available name for a given thing.
|
||||||
|
// Needed since solidity allows for overloading.
|
||||||
|
//
|
||||||
|
// e.g. if the abi contains Methods send, send1
|
||||||
|
// overloadedName would return send2 for input send.
|
||||||
|
//
|
||||||
|
// overloadedName works for methods, events and errors.
|
||||||
|
func overloadedName(rawName string, isAvail func(string) bool) string {
|
||||||
|
name := rawName
|
||||||
|
ok := isAvail(name)
|
||||||
|
for idx := 0; ok; idx++ {
|
||||||
|
name = fmt.Sprintf("%s%d", rawName, idx)
|
||||||
|
ok = isAvail(name)
|
||||||
|
}
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
@ -19,6 +19,7 @@ package abi
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/big"
|
"math/big"
|
||||||
"reflect"
|
"reflect"
|
||||||
@ -26,57 +27,108 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
|
"github.com/ethereum/go-ethereum/common/math"
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
)
|
)
|
||||||
|
|
||||||
const jsondata = `
|
const jsondata = `
|
||||||
[
|
[
|
||||||
{ "type" : "function", "name" : "balance", "constant" : true },
|
{ "type" : "function", "name" : ""},
|
||||||
{ "type" : "function", "name" : "send", "constant" : false, "inputs" : [ { "name" : "amount", "type" : "uint256" } ] }
|
{ "type" : "function", "name" : "balance", "stateMutability" : "view" },
|
||||||
|
{ "type" : "function", "name" : "send", "inputs" : [ { "name" : "amount", "type" : "uint256" } ] },
|
||||||
|
{ "type" : "function", "name" : "test", "inputs" : [ { "name" : "number", "type" : "uint32" } ] },
|
||||||
|
{ "type" : "function", "name" : "string", "inputs" : [ { "name" : "inputs", "type" : "string" } ] },
|
||||||
|
{ "type" : "function", "name" : "bool", "inputs" : [ { "name" : "inputs", "type" : "bool" } ] },
|
||||||
|
{ "type" : "function", "name" : "address", "inputs" : [ { "name" : "inputs", "type" : "address" } ] },
|
||||||
|
{ "type" : "function", "name" : "uint64[2]", "inputs" : [ { "name" : "inputs", "type" : "uint64[2]" } ] },
|
||||||
|
{ "type" : "function", "name" : "uint64[]", "inputs" : [ { "name" : "inputs", "type" : "uint64[]" } ] },
|
||||||
|
{ "type" : "function", "name" : "int8", "inputs" : [ { "name" : "inputs", "type" : "int8" } ] },
|
||||||
|
{ "type" : "function", "name" : "bytes32", "inputs" : [ { "name" : "inputs", "type" : "bytes32" } ] },
|
||||||
|
{ "type" : "function", "name" : "foo", "inputs" : [ { "name" : "inputs", "type" : "uint32" } ] },
|
||||||
|
{ "type" : "function", "name" : "bar", "inputs" : [ { "name" : "inputs", "type" : "uint32" }, { "name" : "string", "type" : "uint16" } ] },
|
||||||
|
{ "type" : "function", "name" : "slice", "inputs" : [ { "name" : "inputs", "type" : "uint32[2]" } ] },
|
||||||
|
{ "type" : "function", "name" : "slice256", "inputs" : [ { "name" : "inputs", "type" : "uint256[2]" } ] },
|
||||||
|
{ "type" : "function", "name" : "sliceAddress", "inputs" : [ { "name" : "inputs", "type" : "address[]" } ] },
|
||||||
|
{ "type" : "function", "name" : "sliceMultiAddress", "inputs" : [ { "name" : "a", "type" : "address[]" }, { "name" : "b", "type" : "address[]" } ] },
|
||||||
|
{ "type" : "function", "name" : "nestedArray", "inputs" : [ { "name" : "a", "type" : "uint256[2][2]" }, { "name" : "b", "type" : "address[]" } ] },
|
||||||
|
{ "type" : "function", "name" : "nestedArray2", "inputs" : [ { "name" : "a", "type" : "uint8[][2]" } ] },
|
||||||
|
{ "type" : "function", "name" : "nestedSlice", "inputs" : [ { "name" : "a", "type" : "uint8[][]" } ] },
|
||||||
|
{ "type" : "function", "name" : "receive", "inputs" : [ { "name" : "memo", "type" : "bytes" }], "outputs" : [], "payable" : true, "stateMutability" : "payable" },
|
||||||
|
{ "type" : "function", "name" : "fixedArrStr", "stateMutability" : "view", "inputs" : [ { "name" : "str", "type" : "string" }, { "name" : "fixedArr", "type" : "uint256[2]" } ] },
|
||||||
|
{ "type" : "function", "name" : "fixedArrBytes", "stateMutability" : "view", "inputs" : [ { "name" : "bytes", "type" : "bytes" }, { "name" : "fixedArr", "type" : "uint256[2]" } ] },
|
||||||
|
{ "type" : "function", "name" : "mixedArrStr", "stateMutability" : "view", "inputs" : [ { "name" : "str", "type" : "string" }, { "name" : "fixedArr", "type" : "uint256[2]" }, { "name" : "dynArr", "type" : "uint256[]" } ] },
|
||||||
|
{ "type" : "function", "name" : "doubleFixedArrStr", "stateMutability" : "view", "inputs" : [ { "name" : "str", "type" : "string" }, { "name" : "fixedArr1", "type" : "uint256[2]" }, { "name" : "fixedArr2", "type" : "uint256[3]" } ] },
|
||||||
|
{ "type" : "function", "name" : "multipleMixedArrStr", "stateMutability" : "view", "inputs" : [ { "name" : "str", "type" : "string" }, { "name" : "fixedArr1", "type" : "uint256[2]" }, { "name" : "dynArr", "type" : "uint256[]" }, { "name" : "fixedArr2", "type" : "uint256[3]" } ] },
|
||||||
|
{ "type" : "function", "name" : "overloadedNames", "stateMutability" : "view", "inputs": [ { "components": [ { "internalType": "uint256", "name": "_f", "type": "uint256" }, { "internalType": "uint256", "name": "__f", "type": "uint256"}, { "internalType": "uint256", "name": "f", "type": "uint256"}],"internalType": "struct Overloader.F", "name": "f","type": "tuple"}]}
|
||||||
]`
|
]`
|
||||||
|
|
||||||
const jsondata2 = `
|
var (
|
||||||
[
|
Uint256, _ = NewType("uint256", "", nil)
|
||||||
{ "type" : "function", "name" : "balance", "constant" : true },
|
Uint32, _ = NewType("uint32", "", nil)
|
||||||
{ "type" : "function", "name" : "send", "constant" : false, "inputs" : [ { "name" : "amount", "type" : "uint256" } ] },
|
Uint16, _ = NewType("uint16", "", nil)
|
||||||
{ "type" : "function", "name" : "test", "constant" : false, "inputs" : [ { "name" : "number", "type" : "uint32" } ] },
|
String, _ = NewType("string", "", nil)
|
||||||
{ "type" : "function", "name" : "string", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "string" } ] },
|
Bool, _ = NewType("bool", "", nil)
|
||||||
{ "type" : "function", "name" : "bool", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "bool" } ] },
|
Bytes, _ = NewType("bytes", "", nil)
|
||||||
{ "type" : "function", "name" : "address", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "address" } ] },
|
Bytes32, _ = NewType("bytes32", "", nil)
|
||||||
{ "type" : "function", "name" : "uint64[2]", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "uint64[2]" } ] },
|
Address, _ = NewType("address", "", nil)
|
||||||
{ "type" : "function", "name" : "uint64[]", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "uint64[]" } ] },
|
Uint64Arr, _ = NewType("uint64[]", "", nil)
|
||||||
{ "type" : "function", "name" : "foo", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "uint32" } ] },
|
AddressArr, _ = NewType("address[]", "", nil)
|
||||||
{ "type" : "function", "name" : "bar", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "uint32" }, { "name" : "string", "type" : "uint16" } ] },
|
Int8, _ = NewType("int8", "", nil)
|
||||||
{ "type" : "function", "name" : "slice", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "uint32[2]" } ] },
|
// Special types for testing
|
||||||
{ "type" : "function", "name" : "slice256", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "uint256[2]" } ] },
|
Uint32Arr2, _ = NewType("uint32[2]", "", nil)
|
||||||
{ "type" : "function", "name" : "sliceAddress", "constant" : false, "inputs" : [ { "name" : "inputs", "type" : "address[]" } ] },
|
Uint64Arr2, _ = NewType("uint64[2]", "", nil)
|
||||||
{ "type" : "function", "name" : "sliceMultiAddress", "constant" : false, "inputs" : [ { "name" : "a", "type" : "address[]" }, { "name" : "b", "type" : "address[]" } ] },
|
Uint256Arr, _ = NewType("uint256[]", "", nil)
|
||||||
{ "type" : "function", "name" : "nestedArray", "constant" : false, "inputs" : [ { "name" : "a", "type" : "uint256[2][2]" }, { "name" : "b", "type" : "address[]" } ] },
|
Uint256Arr2, _ = NewType("uint256[2]", "", nil)
|
||||||
{ "type" : "function", "name" : "nestedArray2", "constant" : false, "inputs" : [ { "name" : "a", "type" : "uint8[][2]" } ] },
|
Uint256Arr3, _ = NewType("uint256[3]", "", nil)
|
||||||
{ "type" : "function", "name" : "nestedSlice", "constant" : false, "inputs" : [ { "name" : "a", "type" : "uint8[][]" } ] }
|
Uint256ArrNested, _ = NewType("uint256[2][2]", "", nil)
|
||||||
]`
|
Uint8ArrNested, _ = NewType("uint8[][2]", "", nil)
|
||||||
|
Uint8SliceNested, _ = NewType("uint8[][]", "", nil)
|
||||||
|
TupleF, _ = NewType("tuple", "struct Overloader.F", []ArgumentMarshaling{
|
||||||
|
{Name: "_f", Type: "uint256"},
|
||||||
|
{Name: "__f", Type: "uint256"},
|
||||||
|
{Name: "f", Type: "uint256"}})
|
||||||
|
)
|
||||||
|
|
||||||
|
var methods = map[string]Method{
|
||||||
|
"": NewMethod("", "", Function, "", false, false, nil, nil),
|
||||||
|
"balance": NewMethod("balance", "balance", Function, "view", false, false, nil, nil),
|
||||||
|
"send": NewMethod("send", "send", Function, "", false, false, []Argument{{"amount", Uint256, false}}, nil),
|
||||||
|
"test": NewMethod("test", "test", Function, "", false, false, []Argument{{"number", Uint32, false}}, nil),
|
||||||
|
"string": NewMethod("string", "string", Function, "", false, false, []Argument{{"inputs", String, false}}, nil),
|
||||||
|
"bool": NewMethod("bool", "bool", Function, "", false, false, []Argument{{"inputs", Bool, false}}, nil),
|
||||||
|
"address": NewMethod("address", "address", Function, "", false, false, []Argument{{"inputs", Address, false}}, nil),
|
||||||
|
"uint64[]": NewMethod("uint64[]", "uint64[]", Function, "", false, false, []Argument{{"inputs", Uint64Arr, false}}, nil),
|
||||||
|
"uint64[2]": NewMethod("uint64[2]", "uint64[2]", Function, "", false, false, []Argument{{"inputs", Uint64Arr2, false}}, nil),
|
||||||
|
"int8": NewMethod("int8", "int8", Function, "", false, false, []Argument{{"inputs", Int8, false}}, nil),
|
||||||
|
"bytes32": NewMethod("bytes32", "bytes32", Function, "", false, false, []Argument{{"inputs", Bytes32, false}}, nil),
|
||||||
|
"foo": NewMethod("foo", "foo", Function, "", false, false, []Argument{{"inputs", Uint32, false}}, nil),
|
||||||
|
"bar": NewMethod("bar", "bar", Function, "", false, false, []Argument{{"inputs", Uint32, false}, {"string", Uint16, false}}, nil),
|
||||||
|
"slice": NewMethod("slice", "slice", Function, "", false, false, []Argument{{"inputs", Uint32Arr2, false}}, nil),
|
||||||
|
"slice256": NewMethod("slice256", "slice256", Function, "", false, false, []Argument{{"inputs", Uint256Arr2, false}}, nil),
|
||||||
|
"sliceAddress": NewMethod("sliceAddress", "sliceAddress", Function, "", false, false, []Argument{{"inputs", AddressArr, false}}, nil),
|
||||||
|
"sliceMultiAddress": NewMethod("sliceMultiAddress", "sliceMultiAddress", Function, "", false, false, []Argument{{"a", AddressArr, false}, {"b", AddressArr, false}}, nil),
|
||||||
|
"nestedArray": NewMethod("nestedArray", "nestedArray", Function, "", false, false, []Argument{{"a", Uint256ArrNested, false}, {"b", AddressArr, false}}, nil),
|
||||||
|
"nestedArray2": NewMethod("nestedArray2", "nestedArray2", Function, "", false, false, []Argument{{"a", Uint8ArrNested, false}}, nil),
|
||||||
|
"nestedSlice": NewMethod("nestedSlice", "nestedSlice", Function, "", false, false, []Argument{{"a", Uint8SliceNested, false}}, nil),
|
||||||
|
"receive": NewMethod("receive", "receive", Function, "payable", false, true, []Argument{{"memo", Bytes, false}}, []Argument{}),
|
||||||
|
"fixedArrStr": NewMethod("fixedArrStr", "fixedArrStr", Function, "view", false, false, []Argument{{"str", String, false}, {"fixedArr", Uint256Arr2, false}}, nil),
|
||||||
|
"fixedArrBytes": NewMethod("fixedArrBytes", "fixedArrBytes", Function, "view", false, false, []Argument{{"bytes", Bytes, false}, {"fixedArr", Uint256Arr2, false}}, nil),
|
||||||
|
"mixedArrStr": NewMethod("mixedArrStr", "mixedArrStr", Function, "view", false, false, []Argument{{"str", String, false}, {"fixedArr", Uint256Arr2, false}, {"dynArr", Uint256Arr, false}}, nil),
|
||||||
|
"doubleFixedArrStr": NewMethod("doubleFixedArrStr", "doubleFixedArrStr", Function, "view", false, false, []Argument{{"str", String, false}, {"fixedArr1", Uint256Arr2, false}, {"fixedArr2", Uint256Arr3, false}}, nil),
|
||||||
|
"multipleMixedArrStr": NewMethod("multipleMixedArrStr", "multipleMixedArrStr", Function, "view", false, false, []Argument{{"str", String, false}, {"fixedArr1", Uint256Arr2, false}, {"dynArr", Uint256Arr, false}, {"fixedArr2", Uint256Arr3, false}}, nil),
|
||||||
|
"overloadedNames": NewMethod("overloadedNames", "overloadedNames", Function, "view", false, false, []Argument{{"f", TupleF, false}}, nil),
|
||||||
|
}
|
||||||
|
|
||||||
func TestReader(t *testing.T) {
|
func TestReader(t *testing.T) {
|
||||||
Uint256, _ := NewType("uint256", "", nil)
|
abi := ABI{
|
||||||
exp := ABI{
|
Methods: methods,
|
||||||
Methods: map[string]Method{
|
|
||||||
"balance": {
|
|
||||||
"balance", "balance", true, nil, nil,
|
|
||||||
},
|
|
||||||
"send": {
|
|
||||||
"send", "send", false, []Argument{
|
|
||||||
{"amount", Uint256, false},
|
|
||||||
}, nil,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
abi, err := JSON(strings.NewReader(jsondata))
|
exp, err := JSON(strings.NewReader(jsondata))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// deep equal fails for some reason
|
|
||||||
for name, expM := range exp.Methods {
|
for name, expM := range exp.Methods {
|
||||||
gotM, exist := abi.Methods[name]
|
gotM, exist := abi.Methods[name]
|
||||||
if !exist {
|
if !exist {
|
||||||
@ -98,8 +150,55 @@ func TestReader(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestInvalidABI(t *testing.T) {
|
||||||
|
json := `[{ "type" : "function", "name" : "", "constant" : fals }]`
|
||||||
|
_, err := JSON(strings.NewReader(json))
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("invalid json should produce error")
|
||||||
|
}
|
||||||
|
json2 := `[{ "type" : "function", "name" : "send", "constant" : false, "inputs" : [ { "name" : "amount", "typ" : "uint256" } ] }]`
|
||||||
|
_, err = JSON(strings.NewReader(json2))
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("invalid json should produce error")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestConstructor tests a constructor function.
|
||||||
|
// The test is based on the following contract:
|
||||||
|
// contract TestConstructor {
|
||||||
|
// constructor(uint256 a, uint256 b) public{}
|
||||||
|
// }
|
||||||
|
func TestConstructor(t *testing.T) {
|
||||||
|
json := `[{ "inputs": [{"internalType": "uint256","name": "a","type": "uint256" },{ "internalType": "uint256","name": "b","type": "uint256"}],"stateMutability": "nonpayable","type": "constructor"}]`
|
||||||
|
method := NewMethod("", "", Constructor, "nonpayable", false, false, []Argument{{"a", Uint256, false}, {"b", Uint256, false}}, nil)
|
||||||
|
// Test from JSON
|
||||||
|
abi, err := JSON(strings.NewReader(json))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(abi.Constructor, method) {
|
||||||
|
t.Error("Missing expected constructor")
|
||||||
|
}
|
||||||
|
// Test pack/unpack
|
||||||
|
packed, err := abi.Pack("", big.NewInt(1), big.NewInt(2))
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
unpacked, err := abi.Constructor.Inputs.Unpack(packed)
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(unpacked[0], big.NewInt(1)) {
|
||||||
|
t.Error("Unable to pack/unpack from constructor")
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(unpacked[1], big.NewInt(2)) {
|
||||||
|
t.Error("Unable to pack/unpack from constructor")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestTestNumbers(t *testing.T) {
|
func TestTestNumbers(t *testing.T) {
|
||||||
abi, err := JSON(strings.NewReader(jsondata2))
|
abi, err := JSON(strings.NewReader(jsondata))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -135,60 +234,22 @@ func TestTestNumbers(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTestString(t *testing.T) {
|
|
||||||
abi, err := JSON(strings.NewReader(jsondata2))
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := abi.Pack("string", "hello world"); err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTestBool(t *testing.T) {
|
|
||||||
abi, err := JSON(strings.NewReader(jsondata2))
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := abi.Pack("bool", true); err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTestSlice(t *testing.T) {
|
|
||||||
abi, err := JSON(strings.NewReader(jsondata2))
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
slice := make([]uint64, 2)
|
|
||||||
if _, err := abi.Pack("uint64[2]", slice); err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
if _, err := abi.Pack("uint64[]", slice); err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMethodSignature(t *testing.T) {
|
func TestMethodSignature(t *testing.T) {
|
||||||
String, _ := NewType("string", "", nil)
|
m := NewMethod("foo", "foo", Function, "", false, false, []Argument{{"bar", String, false}, {"baz", String, false}}, nil)
|
||||||
m := Method{"foo", "foo", false, []Argument{{"bar", String, false}, {"baz", String, false}}, nil}
|
|
||||||
exp := "foo(string,string)"
|
exp := "foo(string,string)"
|
||||||
if m.Sig() != exp {
|
if m.Sig != exp {
|
||||||
t.Error("signature mismatch", exp, "!=", m.Sig())
|
t.Error("signature mismatch", exp, "!=", m.Sig)
|
||||||
}
|
}
|
||||||
|
|
||||||
idexp := crypto.Keccak256([]byte(exp))[:4]
|
idexp := crypto.Keccak256([]byte(exp))[:4]
|
||||||
if !bytes.Equal(m.ID(), idexp) {
|
if !bytes.Equal(m.ID, idexp) {
|
||||||
t.Errorf("expected ids to match %x != %x", m.ID(), idexp)
|
t.Errorf("expected ids to match %x != %x", m.ID, idexp)
|
||||||
}
|
}
|
||||||
|
|
||||||
uintt, _ := NewType("uint256", "", nil)
|
m = NewMethod("foo", "foo", Function, "", false, false, []Argument{{"bar", Uint256, false}}, nil)
|
||||||
m = Method{"foo", "foo", false, []Argument{{"bar", uintt, false}}, nil}
|
|
||||||
exp = "foo(uint256)"
|
exp = "foo(uint256)"
|
||||||
if m.Sig() != exp {
|
if m.Sig != exp {
|
||||||
t.Error("signature mismatch", exp, "!=", m.Sig())
|
t.Error("signature mismatch", exp, "!=", m.Sig)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Method with tuple arguments
|
// Method with tuple arguments
|
||||||
@ -204,10 +265,10 @@ func TestMethodSignature(t *testing.T) {
|
|||||||
{Name: "y", Type: "int256"},
|
{Name: "y", Type: "int256"},
|
||||||
}},
|
}},
|
||||||
})
|
})
|
||||||
m = Method{"foo", "foo", false, []Argument{{"s", s, false}, {"bar", String, false}}, nil}
|
m = NewMethod("foo", "foo", Function, "", false, false, []Argument{{"s", s, false}, {"bar", String, false}}, nil)
|
||||||
exp = "foo((int256,int256[],(int256,int256)[],(int256,int256)[2]),string)"
|
exp = "foo((int256,int256[],(int256,int256)[],(int256,int256)[2]),string)"
|
||||||
if m.Sig() != exp {
|
if m.Sig != exp {
|
||||||
t.Error("signature mismatch", exp, "!=", m.Sig())
|
t.Error("signature mismatch", exp, "!=", m.Sig)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,12 +280,12 @@ func TestOverloadedMethodSignature(t *testing.T) {
|
|||||||
}
|
}
|
||||||
check := func(name string, expect string, method bool) {
|
check := func(name string, expect string, method bool) {
|
||||||
if method {
|
if method {
|
||||||
if abi.Methods[name].Sig() != expect {
|
if abi.Methods[name].Sig != expect {
|
||||||
t.Fatalf("The signature of overloaded method mismatch, want %s, have %s", expect, abi.Methods[name].Sig())
|
t.Fatalf("The signature of overloaded method mismatch, want %s, have %s", expect, abi.Methods[name].Sig)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if abi.Events[name].Sig() != expect {
|
if abi.Events[name].Sig != expect {
|
||||||
t.Fatalf("The signature of overloaded event mismatch, want %s, have %s", expect, abi.Events[name].Sig())
|
t.Fatalf("The signature of overloaded event mismatch, want %s, have %s", expect, abi.Events[name].Sig)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -234,8 +295,22 @@ func TestOverloadedMethodSignature(t *testing.T) {
|
|||||||
check("bar0", "bar(uint256,uint256)", false)
|
check("bar0", "bar(uint256,uint256)", false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestCustomErrors(t *testing.T) {
|
||||||
|
json := `[{ "inputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ],"name": "MyError", "type": "error"} ]`
|
||||||
|
abi, err := JSON(strings.NewReader(json))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
check := func(name string, expect string) {
|
||||||
|
if abi.Errors[name].Sig != expect {
|
||||||
|
t.Fatalf("The signature of overloaded method mismatch, want %s, have %s", expect, abi.Methods[name].Sig)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
check("MyError", "MyError(uint256)")
|
||||||
|
}
|
||||||
|
|
||||||
func TestMultiPack(t *testing.T) {
|
func TestMultiPack(t *testing.T) {
|
||||||
abi, err := JSON(strings.NewReader(jsondata2))
|
abi, err := JSON(strings.NewReader(jsondata))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -400,15 +475,7 @@ func TestInputVariableInputLength(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestInputFixedArrayAndVariableInputLength(t *testing.T) {
|
func TestInputFixedArrayAndVariableInputLength(t *testing.T) {
|
||||||
const definition = `[
|
abi, err := JSON(strings.NewReader(jsondata))
|
||||||
{ "type" : "function", "name" : "fixedArrStr", "constant" : true, "inputs" : [ { "name" : "str", "type" : "string" }, { "name" : "fixedArr", "type" : "uint256[2]" } ] },
|
|
||||||
{ "type" : "function", "name" : "fixedArrBytes", "constant" : true, "inputs" : [ { "name" : "str", "type" : "bytes" }, { "name" : "fixedArr", "type" : "uint256[2]" } ] },
|
|
||||||
{ "type" : "function", "name" : "mixedArrStr", "constant" : true, "inputs" : [ { "name" : "str", "type" : "string" }, { "name" : "fixedArr", "type": "uint256[2]" }, { "name" : "dynArr", "type": "uint256[]" } ] },
|
|
||||||
{ "type" : "function", "name" : "doubleFixedArrStr", "constant" : true, "inputs" : [ { "name" : "str", "type" : "string" }, { "name" : "fixedArr1", "type": "uint256[2]" }, { "name" : "fixedArr2", "type": "uint256[3]" } ] },
|
|
||||||
{ "type" : "function", "name" : "multipleMixedArrStr", "constant" : true, "inputs" : [ { "name" : "str", "type" : "string" }, { "name" : "fixedArr1", "type": "uint256[2]" }, { "name" : "dynArr", "type" : "uint256[]" }, { "name" : "fixedArr2", "type" : "uint256[3]" } ] }
|
|
||||||
]`
|
|
||||||
|
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -555,7 +622,7 @@ func TestInputFixedArrayAndVariableInputLength(t *testing.T) {
|
|||||||
strvalue = common.RightPadBytes([]byte(strin), 32)
|
strvalue = common.RightPadBytes([]byte(strin), 32)
|
||||||
fixedarrin1value1 = common.LeftPadBytes(fixedarrin1[0].Bytes(), 32)
|
fixedarrin1value1 = common.LeftPadBytes(fixedarrin1[0].Bytes(), 32)
|
||||||
fixedarrin1value2 = common.LeftPadBytes(fixedarrin1[1].Bytes(), 32)
|
fixedarrin1value2 = common.LeftPadBytes(fixedarrin1[1].Bytes(), 32)
|
||||||
dynarroffset = U256(big.NewInt(int64(256 + ((len(strin)/32)+1)*32)))
|
dynarroffset = math.U256Bytes(big.NewInt(int64(256 + ((len(strin)/32)+1)*32)))
|
||||||
dynarrlength = make([]byte, 32)
|
dynarrlength = make([]byte, 32)
|
||||||
dynarrlength[31] = byte(len(dynarrin))
|
dynarrlength[31] = byte(len(dynarrin))
|
||||||
dynarrinvalue1 = common.LeftPadBytes(dynarrin[0].Bytes(), 32)
|
dynarrinvalue1 = common.LeftPadBytes(dynarrin[0].Bytes(), 32)
|
||||||
@ -582,7 +649,7 @@ func TestInputFixedArrayAndVariableInputLength(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestDefaultFunctionParsing(t *testing.T) {
|
func TestDefaultFunctionParsing(t *testing.T) {
|
||||||
const definition = `[{ "name" : "balance" }]`
|
const definition = `[{ "name" : "balance", "type" : "function" }]`
|
||||||
|
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -602,8 +669,6 @@ func TestBareEvents(t *testing.T) {
|
|||||||
{ "type" : "event", "name" : "tuple", "inputs" : [{ "indexed":false, "name":"t", "type":"tuple", "components":[{"name":"a", "type":"uint256"}] }, { "indexed":true, "name":"arg1", "type":"address" }] }
|
{ "type" : "event", "name" : "tuple", "inputs" : [{ "indexed":false, "name":"t", "type":"tuple", "components":[{"name":"a", "type":"uint256"}] }, { "indexed":true, "name":"arg1", "type":"address" }] }
|
||||||
]`
|
]`
|
||||||
|
|
||||||
arg0, _ := NewType("uint256", "", nil)
|
|
||||||
arg1, _ := NewType("address", "", nil)
|
|
||||||
tuple, _ := NewType("tuple", "", []ArgumentMarshaling{{Name: "a", Type: "uint256"}})
|
tuple, _ := NewType("tuple", "", []ArgumentMarshaling{{Name: "a", Type: "uint256"}})
|
||||||
|
|
||||||
expectedEvents := map[string]struct {
|
expectedEvents := map[string]struct {
|
||||||
@ -613,12 +678,12 @@ func TestBareEvents(t *testing.T) {
|
|||||||
"balance": {false, nil},
|
"balance": {false, nil},
|
||||||
"anon": {true, nil},
|
"anon": {true, nil},
|
||||||
"args": {false, []Argument{
|
"args": {false, []Argument{
|
||||||
{Name: "arg0", Type: arg0, Indexed: false},
|
{Name: "arg0", Type: Uint256, Indexed: false},
|
||||||
{Name: "arg1", Type: arg1, Indexed: true},
|
{Name: "arg1", Type: Address, Indexed: true},
|
||||||
}},
|
}},
|
||||||
"tuple": {false, []Argument{
|
"tuple": {false, []Argument{
|
||||||
{Name: "t", Type: tuple, Indexed: false},
|
{Name: "t", Type: tuple, Indexed: false},
|
||||||
{Name: "arg1", Type: arg1, Indexed: true},
|
{Name: "arg1", Type: Address, Indexed: true},
|
||||||
}},
|
}},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -692,7 +757,7 @@ func TestUnpackEvent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
var ev ReceivedEvent
|
var ev ReceivedEvent
|
||||||
|
|
||||||
err = abi.Unpack(&ev, "received", data)
|
err = abi.UnpackIntoInterface(&ev, "received", data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -701,7 +766,7 @@ func TestUnpackEvent(t *testing.T) {
|
|||||||
Sender common.Address
|
Sender common.Address
|
||||||
}
|
}
|
||||||
var receivedAddrEv ReceivedAddrEvent
|
var receivedAddrEv ReceivedAddrEvent
|
||||||
err = abi.Unpack(&receivedAddrEv, "receivedAddr", data)
|
err = abi.UnpackIntoInterface(&receivedAddrEv, "receivedAddr", data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -891,45 +956,25 @@ func TestUnpackIntoMapNamingConflict(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestABI_MethodById(t *testing.T) {
|
func TestABI_MethodById(t *testing.T) {
|
||||||
const abiJSON = `[
|
abi, err := JSON(strings.NewReader(jsondata))
|
||||||
{"type":"function","name":"receive","constant":false,"inputs":[{"name":"memo","type":"bytes"}],"outputs":[],"payable":true,"stateMutability":"payable"},
|
|
||||||
{"type":"event","name":"received","anonymous":false,"inputs":[{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}]},
|
|
||||||
{"type":"function","name":"fixedArrStr","constant":true,"inputs":[{"name":"str","type":"string"},{"name":"fixedArr","type":"uint256[2]"}]},
|
|
||||||
{"type":"function","name":"fixedArrBytes","constant":true,"inputs":[{"name":"str","type":"bytes"},{"name":"fixedArr","type":"uint256[2]"}]},
|
|
||||||
{"type":"function","name":"mixedArrStr","constant":true,"inputs":[{"name":"str","type":"string"},{"name":"fixedArr","type":"uint256[2]"},{"name":"dynArr","type":"uint256[]"}]},
|
|
||||||
{"type":"function","name":"doubleFixedArrStr","constant":true,"inputs":[{"name":"str","type":"string"},{"name":"fixedArr1","type":"uint256[2]"},{"name":"fixedArr2","type":"uint256[3]"}]},
|
|
||||||
{"type":"function","name":"multipleMixedArrStr","constant":true,"inputs":[{"name":"str","type":"string"},{"name":"fixedArr1","type":"uint256[2]"},{"name":"dynArr","type":"uint256[]"},{"name":"fixedArr2","type":"uint256[3]"}]},
|
|
||||||
{"type":"function","name":"balance","constant":true},
|
|
||||||
{"type":"function","name":"send","constant":false,"inputs":[{"name":"amount","type":"uint256"}]},
|
|
||||||
{"type":"function","name":"test","constant":false,"inputs":[{"name":"number","type":"uint32"}]},
|
|
||||||
{"type":"function","name":"string","constant":false,"inputs":[{"name":"inputs","type":"string"}]},
|
|
||||||
{"type":"function","name":"bool","constant":false,"inputs":[{"name":"inputs","type":"bool"}]},
|
|
||||||
{"type":"function","name":"address","constant":false,"inputs":[{"name":"inputs","type":"address"}]},
|
|
||||||
{"type":"function","name":"uint64[2]","constant":false,"inputs":[{"name":"inputs","type":"uint64[2]"}]},
|
|
||||||
{"type":"function","name":"uint64[]","constant":false,"inputs":[{"name":"inputs","type":"uint64[]"}]},
|
|
||||||
{"type":"function","name":"foo","constant":false,"inputs":[{"name":"inputs","type":"uint32"}]},
|
|
||||||
{"type":"function","name":"bar","constant":false,"inputs":[{"name":"inputs","type":"uint32"},{"name":"string","type":"uint16"}]},
|
|
||||||
{"type":"function","name":"_slice","constant":false,"inputs":[{"name":"inputs","type":"uint32[2]"}]},
|
|
||||||
{"type":"function","name":"__slice256","constant":false,"inputs":[{"name":"inputs","type":"uint256[2]"}]},
|
|
||||||
{"type":"function","name":"sliceAddress","constant":false,"inputs":[{"name":"inputs","type":"address[]"}]},
|
|
||||||
{"type":"function","name":"sliceMultiAddress","constant":false,"inputs":[{"name":"a","type":"address[]"},{"name":"b","type":"address[]"}]}
|
|
||||||
]
|
|
||||||
`
|
|
||||||
abi, err := JSON(strings.NewReader(abiJSON))
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
for name, m := range abi.Methods {
|
for name, m := range abi.Methods {
|
||||||
a := fmt.Sprintf("%v", m)
|
a := fmt.Sprintf("%v", m)
|
||||||
m2, err := abi.MethodById(m.ID())
|
m2, err := abi.MethodById(m.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Failed to look up ABI method: %v", err)
|
t.Fatalf("Failed to look up ABI method: %v", err)
|
||||||
}
|
}
|
||||||
b := fmt.Sprintf("%v", m2)
|
b := fmt.Sprintf("%v", m2)
|
||||||
if a != b {
|
if a != b {
|
||||||
t.Errorf("Method %v (id %v) not 'findable' by id in ABI", name, common.ToHex(m.ID()))
|
t.Errorf("Method %v (id %x) not 'findable' by id in ABI", name, m.ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// test unsuccessful lookups
|
||||||
|
if _, err = abi.MethodById(crypto.Keccak256()); err == nil {
|
||||||
|
t.Error("Expected error: no method with this id")
|
||||||
|
}
|
||||||
// Also test empty
|
// Also test empty
|
||||||
if _, err := abi.MethodById([]byte{0x00}); err == nil {
|
if _, err := abi.MethodById([]byte{0x00}); err == nil {
|
||||||
t.Errorf("Expected error, too short to decode data")
|
t.Errorf("Expected error, too short to decode data")
|
||||||
@ -995,8 +1040,8 @@ func TestABI_EventById(t *testing.T) {
|
|||||||
t.Errorf("We should find a event for topic %s, test #%d", topicID.Hex(), testnum)
|
t.Errorf("We should find a event for topic %s, test #%d", topicID.Hex(), testnum)
|
||||||
}
|
}
|
||||||
|
|
||||||
if event.ID() != topicID {
|
if event.ID != topicID {
|
||||||
t.Errorf("Event id %s does not match topic %s, test #%d", event.ID().Hex(), topicID.Hex(), testnum)
|
t.Errorf("Event id %s does not match topic %s, test #%d", event.ID.Hex(), topicID.Hex(), testnum)
|
||||||
}
|
}
|
||||||
|
|
||||||
unknowntopicID := crypto.Keccak256Hash([]byte("unknownEvent"))
|
unknowntopicID := crypto.Keccak256Hash([]byte("unknownEvent"))
|
||||||
@ -1010,26 +1055,6 @@ func TestABI_EventById(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDuplicateMethodNames(t *testing.T) {
|
|
||||||
abiJSON := `[{"constant":false,"inputs":[{"name":"to","type":"address"},{"name":"value","type":"uint256"}],"name":"transfer","outputs":[{"name":"ok","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"to","type":"address"},{"name":"value","type":"uint256"},{"name":"data","type":"bytes"}],"name":"transfer","outputs":[{"name":"ok","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"to","type":"address"},{"name":"value","type":"uint256"},{"name":"data","type":"bytes"},{"name":"customFallback","type":"string"}],"name":"transfer","outputs":[{"name":"ok","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"}]`
|
|
||||||
contractAbi, err := JSON(strings.NewReader(abiJSON))
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
if _, ok := contractAbi.Methods["transfer"]; !ok {
|
|
||||||
t.Fatalf("Could not find original method")
|
|
||||||
}
|
|
||||||
if _, ok := contractAbi.Methods["transfer0"]; !ok {
|
|
||||||
t.Fatalf("Could not find duplicate method")
|
|
||||||
}
|
|
||||||
if _, ok := contractAbi.Methods["transfer1"]; !ok {
|
|
||||||
t.Fatalf("Could not find duplicate method")
|
|
||||||
}
|
|
||||||
if _, ok := contractAbi.Methods["transfer2"]; ok {
|
|
||||||
t.Fatalf("Should not have found extra method")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestDoubleDuplicateMethodNames checks that if transfer0 already exists, there won't be a name
|
// TestDoubleDuplicateMethodNames checks that if transfer0 already exists, there won't be a name
|
||||||
// conflict and that the second transfer method will be renamed transfer1.
|
// conflict and that the second transfer method will be renamed transfer1.
|
||||||
func TestDoubleDuplicateMethodNames(t *testing.T) {
|
func TestDoubleDuplicateMethodNames(t *testing.T) {
|
||||||
@ -1051,3 +1076,87 @@ func TestDoubleDuplicateMethodNames(t *testing.T) {
|
|||||||
t.Fatalf("Should not have found extra method")
|
t.Fatalf("Should not have found extra method")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TestDoubleDuplicateEventNames checks that if send0 already exists, there won't be a name
|
||||||
|
// conflict and that the second send event will be renamed send1.
|
||||||
|
// The test runs the abi of the following contract.
|
||||||
|
// contract DuplicateEvent {
|
||||||
|
// event send(uint256 a);
|
||||||
|
// event send0();
|
||||||
|
// event send();
|
||||||
|
// }
|
||||||
|
func TestDoubleDuplicateEventNames(t *testing.T) {
|
||||||
|
abiJSON := `[{"anonymous": false,"inputs": [{"indexed": false,"internalType": "uint256","name": "a","type": "uint256"}],"name": "send","type": "event"},{"anonymous": false,"inputs": [],"name": "send0","type": "event"},{ "anonymous": false, "inputs": [],"name": "send","type": "event"}]`
|
||||||
|
contractAbi, err := JSON(strings.NewReader(abiJSON))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if _, ok := contractAbi.Events["send"]; !ok {
|
||||||
|
t.Fatalf("Could not find original event")
|
||||||
|
}
|
||||||
|
if _, ok := contractAbi.Events["send0"]; !ok {
|
||||||
|
t.Fatalf("Could not find duplicate event")
|
||||||
|
}
|
||||||
|
if _, ok := contractAbi.Events["send1"]; !ok {
|
||||||
|
t.Fatalf("Could not find duplicate event")
|
||||||
|
}
|
||||||
|
if _, ok := contractAbi.Events["send2"]; ok {
|
||||||
|
t.Fatalf("Should not have found extra event")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestUnnamedEventParam checks that an event with unnamed parameters is
|
||||||
|
// correctly handled.
|
||||||
|
// The test runs the abi of the following contract.
|
||||||
|
// contract TestEvent {
|
||||||
|
// event send(uint256, uint256);
|
||||||
|
// }
|
||||||
|
func TestUnnamedEventParam(t *testing.T) {
|
||||||
|
abiJSON := `[{ "anonymous": false, "inputs": [{ "indexed": false,"internalType": "uint256", "name": "","type": "uint256"},{"indexed": false,"internalType": "uint256","name": "","type": "uint256"}],"name": "send","type": "event"}]`
|
||||||
|
contractAbi, err := JSON(strings.NewReader(abiJSON))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
event, ok := contractAbi.Events["send"]
|
||||||
|
if !ok {
|
||||||
|
t.Fatalf("Could not find event")
|
||||||
|
}
|
||||||
|
if event.Inputs[0].Name != "arg0" {
|
||||||
|
t.Fatalf("Could not find input")
|
||||||
|
}
|
||||||
|
if event.Inputs[1].Name != "arg1" {
|
||||||
|
t.Fatalf("Could not find input")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUnpackRevert(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
var cases = []struct {
|
||||||
|
input string
|
||||||
|
expect string
|
||||||
|
expectErr error
|
||||||
|
}{
|
||||||
|
{"", "", errors.New("invalid data for unpacking")},
|
||||||
|
{"08c379a1", "", errors.New("invalid data for unpacking")},
|
||||||
|
{"08c379a00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000d72657665727420726561736f6e00000000000000000000000000000000000000", "revert reason", nil},
|
||||||
|
}
|
||||||
|
for index, c := range cases {
|
||||||
|
t.Run(fmt.Sprintf("case %d", index), func(t *testing.T) {
|
||||||
|
got, err := UnpackRevert(common.Hex2Bytes(c.input))
|
||||||
|
if c.expectErr != nil {
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("Expected non-nil error")
|
||||||
|
}
|
||||||
|
if err.Error() != c.expectErr.Error() {
|
||||||
|
t.Fatalf("Expected error mismatch, want %v, got %v", c.expectErr, err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if c.expect != got {
|
||||||
|
t.Fatalf("Output mismatch, want %v, got %v", c.expect, got)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -41,7 +41,7 @@ type ArgumentMarshaling struct {
|
|||||||
Indexed bool
|
Indexed bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnmarshalJSON implements json.Unmarshaler interface
|
// UnmarshalJSON implements json.Unmarshaler interface.
|
||||||
func (argument *Argument) UnmarshalJSON(data []byte) error {
|
func (argument *Argument) UnmarshalJSON(data []byte) error {
|
||||||
var arg ArgumentMarshaling
|
var arg ArgumentMarshaling
|
||||||
err := json.Unmarshal(data, &arg)
|
err := json.Unmarshal(data, &arg)
|
||||||
@ -59,19 +59,7 @@ func (argument *Argument) UnmarshalJSON(data []byte) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// LengthNonIndexed returns the number of arguments when not counting 'indexed' ones. Only events
|
// NonIndexed returns the arguments with indexed arguments filtered out.
|
||||||
// can ever have 'indexed' arguments, it should always be false on arguments for method input/output
|
|
||||||
func (arguments Arguments) LengthNonIndexed() int {
|
|
||||||
out := 0
|
|
||||||
for _, arg := range arguments {
|
|
||||||
if !arg.Indexed {
|
|
||||||
out++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
// NonIndexed returns the arguments with indexed arguments filtered out
|
|
||||||
func (arguments Arguments) NonIndexed() Arguments {
|
func (arguments Arguments) NonIndexed() Arguments {
|
||||||
var ret []Argument
|
var ret []Argument
|
||||||
for _, arg := range arguments {
|
for _, arg := range arguments {
|
||||||
@ -82,216 +70,127 @@ func (arguments Arguments) NonIndexed() Arguments {
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
// isTuple returns true for non-atomic constructs, like (uint,uint) or uint[]
|
// isTuple returns true for non-atomic constructs, like (uint,uint) or uint[].
|
||||||
func (arguments Arguments) isTuple() bool {
|
func (arguments Arguments) isTuple() bool {
|
||||||
return len(arguments) > 1
|
return len(arguments) > 1
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unpack performs the operation hexdata -> Go format
|
// Unpack performs the operation hexdata -> Go format.
|
||||||
func (arguments Arguments) Unpack(v interface{}, data []byte) error {
|
func (arguments Arguments) Unpack(data []byte) ([]interface{}, error) {
|
||||||
if len(data) == 0 {
|
if len(data) == 0 {
|
||||||
if len(arguments) != 0 {
|
if len(arguments) != 0 {
|
||||||
return fmt.Errorf("abi: attempting to unmarshall an empty string while arguments are expected")
|
return nil, fmt.Errorf("abi: attempting to unmarshall an empty string while arguments are expected")
|
||||||
} else {
|
|
||||||
return nil // Nothing to unmarshal, return
|
|
||||||
}
|
}
|
||||||
|
// Nothing to unmarshal, return default variables
|
||||||
|
nonIndexedArgs := arguments.NonIndexed()
|
||||||
|
defaultVars := make([]interface{}, len(nonIndexedArgs))
|
||||||
|
for index, arg := range nonIndexedArgs {
|
||||||
|
defaultVars[index] = reflect.New(arg.Type.GetType())
|
||||||
|
}
|
||||||
|
return defaultVars, nil
|
||||||
}
|
}
|
||||||
// make sure the passed value is arguments pointer
|
return arguments.UnpackValues(data)
|
||||||
if reflect.Ptr != reflect.ValueOf(v).Kind() {
|
|
||||||
return fmt.Errorf("abi: Unpack(non-pointer %T)", v)
|
|
||||||
}
|
|
||||||
marshalledValues, err := arguments.UnpackValues(data)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if arguments.isTuple() {
|
|
||||||
return arguments.unpackTuple(v, marshalledValues)
|
|
||||||
}
|
|
||||||
return arguments.unpackAtomic(v, marshalledValues[0])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnpackIntoMap performs the operation hexdata -> mapping of argument name to argument value
|
// UnpackIntoMap performs the operation hexdata -> mapping of argument name to argument value.
|
||||||
func (arguments Arguments) UnpackIntoMap(v map[string]interface{}, data []byte) error {
|
func (arguments Arguments) UnpackIntoMap(v map[string]interface{}, data []byte) error {
|
||||||
if len(data) == 0 {
|
|
||||||
if len(arguments) != 0 {
|
|
||||||
return fmt.Errorf("abi: attempting to unmarshall an empty string while arguments are expected")
|
|
||||||
} else {
|
|
||||||
return nil // Nothing to unmarshal, return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
marshalledValues, err := arguments.UnpackValues(data)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return arguments.unpackIntoMap(v, marshalledValues)
|
|
||||||
}
|
|
||||||
|
|
||||||
// unpack sets the unmarshalled value to go format.
|
|
||||||
// Note the dst here must be settable.
|
|
||||||
func unpack(t *Type, dst interface{}, src interface{}) error {
|
|
||||||
var (
|
|
||||||
dstVal = reflect.ValueOf(dst).Elem()
|
|
||||||
srcVal = reflect.ValueOf(src)
|
|
||||||
)
|
|
||||||
tuple, typ := false, t
|
|
||||||
for {
|
|
||||||
if typ.T == SliceTy || typ.T == ArrayTy {
|
|
||||||
typ = typ.Elem
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
tuple = typ.T == TupleTy
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if !tuple {
|
|
||||||
return set(dstVal, srcVal)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dereferences interface or pointer wrapper
|
|
||||||
dstVal = indirectInterfaceOrPtr(dstVal)
|
|
||||||
|
|
||||||
switch t.T {
|
|
||||||
case TupleTy:
|
|
||||||
if dstVal.Kind() != reflect.Struct {
|
|
||||||
return fmt.Errorf("abi: invalid dst value for unpack, want struct, got %s", dstVal.Kind())
|
|
||||||
}
|
|
||||||
fieldmap, err := mapArgNamesToStructFields(t.TupleRawNames, dstVal)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for i, elem := range t.TupleElems {
|
|
||||||
fname := fieldmap[t.TupleRawNames[i]]
|
|
||||||
field := dstVal.FieldByName(fname)
|
|
||||||
if !field.IsValid() {
|
|
||||||
return fmt.Errorf("abi: field %s can't found in the given value", t.TupleRawNames[i])
|
|
||||||
}
|
|
||||||
if err := unpack(elem, field.Addr().Interface(), srcVal.Field(i).Interface()); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
case SliceTy:
|
|
||||||
if dstVal.Kind() != reflect.Slice {
|
|
||||||
return fmt.Errorf("abi: invalid dst value for unpack, want slice, got %s", dstVal.Kind())
|
|
||||||
}
|
|
||||||
slice := reflect.MakeSlice(dstVal.Type(), srcVal.Len(), srcVal.Len())
|
|
||||||
for i := 0; i < slice.Len(); i++ {
|
|
||||||
if err := unpack(t.Elem, slice.Index(i).Addr().Interface(), srcVal.Index(i).Interface()); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dstVal.Set(slice)
|
|
||||||
case ArrayTy:
|
|
||||||
if dstVal.Kind() != reflect.Array {
|
|
||||||
return fmt.Errorf("abi: invalid dst value for unpack, want array, got %s", dstVal.Kind())
|
|
||||||
}
|
|
||||||
array := reflect.New(dstVal.Type()).Elem()
|
|
||||||
for i := 0; i < array.Len(); i++ {
|
|
||||||
if err := unpack(t.Elem, array.Index(i).Addr().Interface(), srcVal.Index(i).Interface()); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dstVal.Set(array)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// unpackIntoMap unpacks marshalledValues into the provided map[string]interface{}
|
|
||||||
func (arguments Arguments) unpackIntoMap(v map[string]interface{}, marshalledValues []interface{}) error {
|
|
||||||
// Make sure map is not nil
|
// Make sure map is not nil
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return fmt.Errorf("abi: cannot unpack into a nil map")
|
return fmt.Errorf("abi: cannot unpack into a nil map")
|
||||||
}
|
}
|
||||||
|
if len(data) == 0 {
|
||||||
|
if len(arguments) != 0 {
|
||||||
|
return fmt.Errorf("abi: attempting to unmarshall an empty string while arguments are expected")
|
||||||
|
}
|
||||||
|
return nil // Nothing to unmarshal, return
|
||||||
|
}
|
||||||
|
marshalledValues, err := arguments.UnpackValues(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
for i, arg := range arguments.NonIndexed() {
|
for i, arg := range arguments.NonIndexed() {
|
||||||
v[arg.Name] = marshalledValues[i]
|
v[arg.Name] = marshalledValues[i]
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// unpackAtomic unpacks ( hexdata -> go ) a single value
|
// Copy performs the operation go format -> provided struct.
|
||||||
func (arguments Arguments) unpackAtomic(v interface{}, marshalledValues interface{}) error {
|
func (arguments Arguments) Copy(v interface{}, values []interface{}) error {
|
||||||
if arguments.LengthNonIndexed() == 0 {
|
// make sure the passed value is arguments pointer
|
||||||
return nil
|
if reflect.Ptr != reflect.ValueOf(v).Kind() {
|
||||||
|
return fmt.Errorf("abi: Unpack(non-pointer %T)", v)
|
||||||
}
|
}
|
||||||
argument := arguments.NonIndexed()[0]
|
if len(values) == 0 {
|
||||||
elem := reflect.ValueOf(v).Elem()
|
if len(arguments) != 0 {
|
||||||
|
return fmt.Errorf("abi: attempting to copy no values while %d arguments are expected", len(arguments))
|
||||||
if elem.Kind() == reflect.Struct && argument.Type.T != TupleTy {
|
|
||||||
fieldmap, err := mapArgNamesToStructFields([]string{argument.Name}, elem)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
field := elem.FieldByName(fieldmap[argument.Name])
|
return nil // Nothing to copy, return
|
||||||
if !field.IsValid() {
|
|
||||||
return fmt.Errorf("abi: field %s can't be found in the given value", argument.Name)
|
|
||||||
}
|
|
||||||
return unpack(&argument.Type, field.Addr().Interface(), marshalledValues)
|
|
||||||
}
|
}
|
||||||
return unpack(&argument.Type, elem.Addr().Interface(), marshalledValues)
|
if arguments.isTuple() {
|
||||||
|
return arguments.copyTuple(v, values)
|
||||||
|
}
|
||||||
|
return arguments.copyAtomic(v, values[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
// unpackTuple unpacks ( hexdata -> go ) a batch of values.
|
// unpackAtomic unpacks ( hexdata -> go ) a single value
|
||||||
func (arguments Arguments) unpackTuple(v interface{}, marshalledValues []interface{}) error {
|
func (arguments Arguments) copyAtomic(v interface{}, marshalledValues interface{}) error {
|
||||||
var (
|
dst := reflect.ValueOf(v).Elem()
|
||||||
value = reflect.ValueOf(v).Elem()
|
src := reflect.ValueOf(marshalledValues)
|
||||||
typ = value.Type()
|
|
||||||
kind = value.Kind()
|
|
||||||
)
|
|
||||||
if err := requireUnpackKind(value, typ, kind, arguments); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the interface is a struct, get of abi->struct_field mapping
|
if dst.Kind() == reflect.Struct {
|
||||||
var abi2struct map[string]string
|
return set(dst.Field(0), src)
|
||||||
if kind == reflect.Struct {
|
}
|
||||||
var (
|
return set(dst, src)
|
||||||
argNames []string
|
}
|
||||||
err error
|
|
||||||
)
|
// copyTuple copies a batch of values from marshalledValues to v.
|
||||||
for _, arg := range arguments.NonIndexed() {
|
func (arguments Arguments) copyTuple(v interface{}, marshalledValues []interface{}) error {
|
||||||
argNames = append(argNames, arg.Name)
|
value := reflect.ValueOf(v).Elem()
|
||||||
|
nonIndexedArgs := arguments.NonIndexed()
|
||||||
|
|
||||||
|
switch value.Kind() {
|
||||||
|
case reflect.Struct:
|
||||||
|
argNames := make([]string, len(nonIndexedArgs))
|
||||||
|
for i, arg := range nonIndexedArgs {
|
||||||
|
argNames[i] = arg.Name
|
||||||
}
|
}
|
||||||
abi2struct, err = mapArgNamesToStructFields(argNames, value)
|
var err error
|
||||||
|
abi2struct, err := mapArgNamesToStructFields(argNames, value)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
for i, arg := range nonIndexedArgs {
|
||||||
for i, arg := range arguments.NonIndexed() {
|
|
||||||
switch kind {
|
|
||||||
case reflect.Struct:
|
|
||||||
field := value.FieldByName(abi2struct[arg.Name])
|
field := value.FieldByName(abi2struct[arg.Name])
|
||||||
if !field.IsValid() {
|
if !field.IsValid() {
|
||||||
return fmt.Errorf("abi: field %s can't be found in the given value", arg.Name)
|
return fmt.Errorf("abi: field %s can't be found in the given value", arg.Name)
|
||||||
}
|
}
|
||||||
if err := unpack(&arg.Type, field.Addr().Interface(), marshalledValues[i]); err != nil {
|
if err := set(field, reflect.ValueOf(marshalledValues[i])); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
case reflect.Slice, reflect.Array:
|
|
||||||
if value.Len() < i {
|
|
||||||
return fmt.Errorf("abi: insufficient number of arguments for unpack, want %d, got %d", len(arguments), value.Len())
|
|
||||||
}
|
|
||||||
v := value.Index(i)
|
|
||||||
if err := requireAssignable(v, reflect.ValueOf(marshalledValues[i])); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := unpack(&arg.Type, v.Addr().Interface(), marshalledValues[i]); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("abi:[2] cannot unmarshal tuple in to %v", typ)
|
|
||||||
}
|
}
|
||||||
|
case reflect.Slice, reflect.Array:
|
||||||
|
if value.Len() < len(marshalledValues) {
|
||||||
|
return fmt.Errorf("abi: insufficient number of arguments for unpack, want %d, got %d", len(arguments), value.Len())
|
||||||
|
}
|
||||||
|
for i := range nonIndexedArgs {
|
||||||
|
if err := set(value.Index(i), reflect.ValueOf(marshalledValues[i])); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("abi:[2] cannot unmarshal tuple in to %v", value.Type())
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnpackValues can be used to unpack ABI-encoded hexdata according to the ABI-specification,
|
// UnpackValues can be used to unpack ABI-encoded hexdata according to the ABI-specification,
|
||||||
// without supplying a struct to unpack into. Instead, this method returns a list containing the
|
// without supplying a struct to unpack into. Instead, this method returns a list containing the
|
||||||
// values. An atomic argument will be a list with one element.
|
// values. An atomic argument will be a list with one element.
|
||||||
func (arguments Arguments) UnpackValues(data []byte) ([]interface{}, error) {
|
func (arguments Arguments) UnpackValues(data []byte) ([]interface{}, error) {
|
||||||
retval := make([]interface{}, 0, arguments.LengthNonIndexed())
|
nonIndexedArgs := arguments.NonIndexed()
|
||||||
|
retval := make([]interface{}, 0, len(nonIndexedArgs))
|
||||||
virtualArgs := 0
|
virtualArgs := 0
|
||||||
for index, arg := range arguments.NonIndexed() {
|
for index, arg := range nonIndexedArgs {
|
||||||
marshalledValue, err := toGoType((index+virtualArgs)*32, arg.Type, data)
|
marshalledValue, err := toGoType((index+virtualArgs)*32, arg.Type, data)
|
||||||
if arg.Type.T == ArrayTy && !isDynamicType(arg.Type) {
|
if arg.Type.T == ArrayTy && !isDynamicType(arg.Type) {
|
||||||
// If we have a static array, like [3]uint256, these are coded as
|
// If we have a static array, like [3]uint256, these are coded as
|
||||||
@ -318,18 +217,18 @@ func (arguments Arguments) UnpackValues(data []byte) ([]interface{}, error) {
|
|||||||
return retval, nil
|
return retval, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// PackValues performs the operation Go format -> Hexdata
|
// PackValues performs the operation Go format -> Hexdata.
|
||||||
// It is the semantic opposite of UnpackValues
|
// It is the semantic opposite of UnpackValues.
|
||||||
func (arguments Arguments) PackValues(args []interface{}) ([]byte, error) {
|
func (arguments Arguments) PackValues(args []interface{}) ([]byte, error) {
|
||||||
return arguments.Pack(args...)
|
return arguments.Pack(args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pack performs the operation Go format -> Hexdata
|
// Pack performs the operation Go format -> Hexdata.
|
||||||
func (arguments Arguments) Pack(args ...interface{}) ([]byte, error) {
|
func (arguments Arguments) Pack(args ...interface{}) ([]byte, error) {
|
||||||
// Make sure arguments match up and pack them
|
// Make sure arguments match up and pack them
|
||||||
abiArgs := arguments
|
abiArgs := arguments
|
||||||
if len(args) != len(abiArgs) {
|
if len(args) != len(abiArgs) {
|
||||||
return nil, fmt.Errorf("argument count mismatch: %d for %d", len(args), len(abiArgs))
|
return nil, fmt.Errorf("argument count mismatch: got %d for %d", len(args), len(abiArgs))
|
||||||
}
|
}
|
||||||
// variable input is the output appended at the end of packed
|
// variable input is the output appended at the end of packed
|
||||||
// output. This is used for strings and bytes types input.
|
// output. This is used for strings and bytes types input.
|
||||||
|
@ -17,10 +17,12 @@
|
|||||||
package bind
|
package bind
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"crypto/ecdsa"
|
"crypto/ecdsa"
|
||||||
"errors"
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"math/big"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/accounts"
|
"github.com/ethereum/go-ethereum/accounts"
|
||||||
"github.com/ethereum/go-ethereum/accounts/external"
|
"github.com/ethereum/go-ethereum/accounts/external"
|
||||||
@ -28,11 +30,21 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/core/types"
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
|
"github.com/ethereum/go-ethereum/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ErrNoChainID is returned whenever the user failed to specify a chain id.
|
||||||
|
var ErrNoChainID = errors.New("no chain id specified")
|
||||||
|
|
||||||
|
// ErrNotAuthorized is returned when an account is not properly unlocked.
|
||||||
|
var ErrNotAuthorized = errors.New("not authorized to sign this account")
|
||||||
|
|
||||||
// NewTransactor is a utility method to easily create a transaction signer from
|
// NewTransactor is a utility method to easily create a transaction signer from
|
||||||
// an encrypted json key stream and the associated passphrase.
|
// an encrypted json key stream and the associated passphrase.
|
||||||
|
//
|
||||||
|
// Deprecated: Use NewTransactorWithChainID instead.
|
||||||
func NewTransactor(keyin io.Reader, passphrase string) (*TransactOpts, error) {
|
func NewTransactor(keyin io.Reader, passphrase string) (*TransactOpts, error) {
|
||||||
|
log.Warn("WARNING: NewTransactor has been deprecated in favour of NewTransactorWithChainID")
|
||||||
json, err := ioutil.ReadAll(keyin)
|
json, err := ioutil.ReadAll(keyin)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -45,13 +57,17 @@ func NewTransactor(keyin io.Reader, passphrase string) (*TransactOpts, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewKeyStoreTransactor is a utility method to easily create a transaction signer from
|
// NewKeyStoreTransactor is a utility method to easily create a transaction signer from
|
||||||
// an decrypted key from a keystore
|
// an decrypted key from a keystore.
|
||||||
|
//
|
||||||
|
// Deprecated: Use NewKeyStoreTransactorWithChainID instead.
|
||||||
func NewKeyStoreTransactor(keystore *keystore.KeyStore, account accounts.Account) (*TransactOpts, error) {
|
func NewKeyStoreTransactor(keystore *keystore.KeyStore, account accounts.Account) (*TransactOpts, error) {
|
||||||
|
log.Warn("WARNING: NewKeyStoreTransactor has been deprecated in favour of NewTransactorWithChainID")
|
||||||
|
signer := types.HomesteadSigner{}
|
||||||
return &TransactOpts{
|
return &TransactOpts{
|
||||||
From: account.Address,
|
From: account.Address,
|
||||||
Signer: func(signer types.Signer, address common.Address, tx *types.Transaction) (*types.Transaction, error) {
|
Signer: func(address common.Address, tx *types.Transaction) (*types.Transaction, error) {
|
||||||
if address != account.Address {
|
if address != account.Address {
|
||||||
return nil, errors.New("not authorized to sign this account")
|
return nil, ErrNotAuthorized
|
||||||
}
|
}
|
||||||
signature, err := keystore.SignHash(account, signer.Hash(tx).Bytes())
|
signature, err := keystore.SignHash(account, signer.Hash(tx).Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -59,18 +75,23 @@ func NewKeyStoreTransactor(keystore *keystore.KeyStore, account accounts.Account
|
|||||||
}
|
}
|
||||||
return tx.WithSignature(signer, signature)
|
return tx.WithSignature(signer, signature)
|
||||||
},
|
},
|
||||||
|
Context: context.Background(),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewKeyedTransactor is a utility method to easily create a transaction signer
|
// NewKeyedTransactor is a utility method to easily create a transaction signer
|
||||||
// from a single private key.
|
// from a single private key.
|
||||||
|
//
|
||||||
|
// Deprecated: Use NewKeyedTransactorWithChainID instead.
|
||||||
func NewKeyedTransactor(key *ecdsa.PrivateKey) *TransactOpts {
|
func NewKeyedTransactor(key *ecdsa.PrivateKey) *TransactOpts {
|
||||||
|
log.Warn("WARNING: NewKeyedTransactor has been deprecated in favour of NewKeyedTransactorWithChainID")
|
||||||
keyAddr := crypto.PubkeyToAddress(key.PublicKey)
|
keyAddr := crypto.PubkeyToAddress(key.PublicKey)
|
||||||
|
signer := types.HomesteadSigner{}
|
||||||
return &TransactOpts{
|
return &TransactOpts{
|
||||||
From: keyAddr,
|
From: keyAddr,
|
||||||
Signer: func(signer types.Signer, address common.Address, tx *types.Transaction) (*types.Transaction, error) {
|
Signer: func(address common.Address, tx *types.Transaction) (*types.Transaction, error) {
|
||||||
if address != keyAddr {
|
if address != keyAddr {
|
||||||
return nil, errors.New("not authorized to sign this account")
|
return nil, ErrNotAuthorized
|
||||||
}
|
}
|
||||||
signature, err := crypto.Sign(signer.Hash(tx).Bytes(), key)
|
signature, err := crypto.Sign(signer.Hash(tx).Bytes(), key)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -78,19 +99,82 @@ func NewKeyedTransactor(key *ecdsa.PrivateKey) *TransactOpts {
|
|||||||
}
|
}
|
||||||
return tx.WithSignature(signer, signature)
|
return tx.WithSignature(signer, signature)
|
||||||
},
|
},
|
||||||
|
Context: context.Background(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewTransactorWithChainID is a utility method to easily create a transaction signer from
|
||||||
|
// an encrypted json key stream and the associated passphrase.
|
||||||
|
func NewTransactorWithChainID(keyin io.Reader, passphrase string, chainID *big.Int) (*TransactOpts, error) {
|
||||||
|
json, err := ioutil.ReadAll(keyin)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
key, err := keystore.DecryptKey(json, passphrase)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return NewKeyedTransactorWithChainID(key.PrivateKey, chainID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewKeyStoreTransactorWithChainID is a utility method to easily create a transaction signer from
|
||||||
|
// an decrypted key from a keystore.
|
||||||
|
func NewKeyStoreTransactorWithChainID(keystore *keystore.KeyStore, account accounts.Account, chainID *big.Int) (*TransactOpts, error) {
|
||||||
|
if chainID == nil {
|
||||||
|
return nil, ErrNoChainID
|
||||||
|
}
|
||||||
|
signer := types.LatestSignerForChainID(chainID)
|
||||||
|
return &TransactOpts{
|
||||||
|
From: account.Address,
|
||||||
|
Signer: func(address common.Address, tx *types.Transaction) (*types.Transaction, error) {
|
||||||
|
if address != account.Address {
|
||||||
|
return nil, ErrNotAuthorized
|
||||||
|
}
|
||||||
|
signature, err := keystore.SignHash(account, signer.Hash(tx).Bytes())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return tx.WithSignature(signer, signature)
|
||||||
|
},
|
||||||
|
Context: context.Background(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewKeyedTransactorWithChainID is a utility method to easily create a transaction signer
|
||||||
|
// from a single private key.
|
||||||
|
func NewKeyedTransactorWithChainID(key *ecdsa.PrivateKey, chainID *big.Int) (*TransactOpts, error) {
|
||||||
|
keyAddr := crypto.PubkeyToAddress(key.PublicKey)
|
||||||
|
if chainID == nil {
|
||||||
|
return nil, ErrNoChainID
|
||||||
|
}
|
||||||
|
signer := types.LatestSignerForChainID(chainID)
|
||||||
|
return &TransactOpts{
|
||||||
|
From: keyAddr,
|
||||||
|
Signer: func(address common.Address, tx *types.Transaction) (*types.Transaction, error) {
|
||||||
|
if address != keyAddr {
|
||||||
|
return nil, ErrNotAuthorized
|
||||||
|
}
|
||||||
|
signature, err := crypto.Sign(signer.Hash(tx).Bytes(), key)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return tx.WithSignature(signer, signature)
|
||||||
|
},
|
||||||
|
Context: context.Background(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
// NewClefTransactor is a utility method to easily create a transaction signer
|
// NewClefTransactor is a utility method to easily create a transaction signer
|
||||||
// with a clef backend.
|
// with a clef backend.
|
||||||
func NewClefTransactor(clef *external.ExternalSigner, account accounts.Account) *TransactOpts {
|
func NewClefTransactor(clef *external.ExternalSigner, account accounts.Account) *TransactOpts {
|
||||||
return &TransactOpts{
|
return &TransactOpts{
|
||||||
From: account.Address,
|
From: account.Address,
|
||||||
Signer: func(signer types.Signer, address common.Address, transaction *types.Transaction) (*types.Transaction, error) {
|
Signer: func(address common.Address, transaction *types.Transaction) (*types.Transaction, error) {
|
||||||
if address != account.Address {
|
if address != account.Address {
|
||||||
return nil, errors.New("not authorized to sign this account")
|
return nil, ErrNotAuthorized
|
||||||
}
|
}
|
||||||
return clef.SignTx(account, transaction, nil) // Clef enforces its own chain id
|
return clef.SignTx(account, transaction, nil) // Clef enforces its own chain id
|
||||||
},
|
},
|
||||||
|
Context: context.Background(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,22 +32,23 @@ var (
|
|||||||
// have any code associated with it (i.e. suicided).
|
// have any code associated with it (i.e. suicided).
|
||||||
ErrNoCode = errors.New("no contract code at given address")
|
ErrNoCode = errors.New("no contract code at given address")
|
||||||
|
|
||||||
// This error is raised when attempting to perform a pending state action
|
// ErrNoPendingState is raised when attempting to perform a pending state action
|
||||||
// on a backend that doesn't implement PendingContractCaller.
|
// on a backend that doesn't implement PendingContractCaller.
|
||||||
ErrNoPendingState = errors.New("backend does not support pending state")
|
ErrNoPendingState = errors.New("backend does not support pending state")
|
||||||
|
|
||||||
// This error is returned by WaitDeployed if contract creation leaves an
|
// ErrNoCodeAfterDeploy is returned by WaitDeployed if contract creation leaves
|
||||||
// empty contract behind.
|
// an empty contract behind.
|
||||||
ErrNoCodeAfterDeploy = errors.New("no contract code after deployment")
|
ErrNoCodeAfterDeploy = errors.New("no contract code after deployment")
|
||||||
)
|
)
|
||||||
|
|
||||||
// ContractCaller defines the methods needed to allow operating with contract on a read
|
// ContractCaller defines the methods needed to allow operating with a contract on a read
|
||||||
// only basis.
|
// only basis.
|
||||||
type ContractCaller interface {
|
type ContractCaller interface {
|
||||||
// CodeAt returns the code of the given account. This is needed to differentiate
|
// CodeAt returns the code of the given account. This is needed to differentiate
|
||||||
// between contract internal errors and the local chain being out of sync.
|
// between contract internal errors and the local chain being out of sync.
|
||||||
CodeAt(ctx context.Context, contract common.Address, blockNumber *big.Int) ([]byte, error)
|
CodeAt(ctx context.Context, contract common.Address, blockNumber *big.Int) ([]byte, error)
|
||||||
// ContractCall executes an Ethereum contract call with the specified data as the
|
|
||||||
|
// CallContract executes an Ethereum contract call with the specified data as the
|
||||||
// input.
|
// input.
|
||||||
CallContract(ctx context.Context, call ethereum.CallMsg, blockNumber *big.Int) ([]byte, error)
|
CallContract(ctx context.Context, call ethereum.CallMsg, blockNumber *big.Int) ([]byte, error)
|
||||||
}
|
}
|
||||||
@ -58,28 +59,41 @@ type ContractCaller interface {
|
|||||||
type PendingContractCaller interface {
|
type PendingContractCaller interface {
|
||||||
// PendingCodeAt returns the code of the given account in the pending state.
|
// PendingCodeAt returns the code of the given account in the pending state.
|
||||||
PendingCodeAt(ctx context.Context, contract common.Address) ([]byte, error)
|
PendingCodeAt(ctx context.Context, contract common.Address) ([]byte, error)
|
||||||
|
|
||||||
// PendingCallContract executes an Ethereum contract call against the pending state.
|
// PendingCallContract executes an Ethereum contract call against the pending state.
|
||||||
PendingCallContract(ctx context.Context, call ethereum.CallMsg) ([]byte, error)
|
PendingCallContract(ctx context.Context, call ethereum.CallMsg) ([]byte, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ContractTransactor defines the methods needed to allow operating with contract
|
// ContractTransactor defines the methods needed to allow operating with a contract
|
||||||
// on a write only basis. Beside the transacting method, the remainder are helpers
|
// on a write only basis. Besides the transacting method, the remainder are helpers
|
||||||
// used when the user does not provide some needed values, but rather leaves it up
|
// used when the user does not provide some needed values, but rather leaves it up
|
||||||
// to the transactor to decide.
|
// to the transactor to decide.
|
||||||
type ContractTransactor interface {
|
type ContractTransactor interface {
|
||||||
|
// HeaderByNumber returns a block header from the current canonical chain. If
|
||||||
|
// number is nil, the latest known header is returned.
|
||||||
|
HeaderByNumber(ctx context.Context, number *big.Int) (*types.Header, error)
|
||||||
|
|
||||||
// PendingCodeAt returns the code of the given account in the pending state.
|
// PendingCodeAt returns the code of the given account in the pending state.
|
||||||
PendingCodeAt(ctx context.Context, account common.Address) ([]byte, error)
|
PendingCodeAt(ctx context.Context, account common.Address) ([]byte, error)
|
||||||
|
|
||||||
// PendingNonceAt retrieves the current pending nonce associated with an account.
|
// PendingNonceAt retrieves the current pending nonce associated with an account.
|
||||||
PendingNonceAt(ctx context.Context, account common.Address) (uint64, error)
|
PendingNonceAt(ctx context.Context, account common.Address) (uint64, error)
|
||||||
|
|
||||||
// SuggestGasPrice retrieves the currently suggested gas price to allow a timely
|
// SuggestGasPrice retrieves the currently suggested gas price to allow a timely
|
||||||
// execution of a transaction.
|
// execution of a transaction.
|
||||||
SuggestGasPrice(ctx context.Context) (*big.Int, error)
|
SuggestGasPrice(ctx context.Context) (*big.Int, error)
|
||||||
|
|
||||||
|
// SuggestGasTipCap retrieves the currently suggested 1559 priority fee to allow
|
||||||
|
// a timely execution of a transaction.
|
||||||
|
SuggestGasTipCap(ctx context.Context) (*big.Int, error)
|
||||||
|
|
||||||
// EstimateGas tries to estimate the gas needed to execute a specific
|
// EstimateGas tries to estimate the gas needed to execute a specific
|
||||||
// transaction based on the current pending state of the backend blockchain.
|
// transaction based on the current pending state of the backend blockchain.
|
||||||
// There is no guarantee that this is the true gas limit requirement as other
|
// There is no guarantee that this is the true gas limit requirement as other
|
||||||
// transactions may be added or removed by miners, but it should provide a basis
|
// transactions may be added or removed by miners, but it should provide a basis
|
||||||
// for setting a reasonable default.
|
// for setting a reasonable default.
|
||||||
EstimateGas(ctx context.Context, call ethereum.CallMsg) (gas uint64, err error)
|
EstimateGas(ctx context.Context, call ethereum.CallMsg) (gas uint64, err error)
|
||||||
|
|
||||||
// SendTransaction injects the transaction into the pending pool for execution.
|
// SendTransaction injects the transaction into the pending pool for execution.
|
||||||
SendTransaction(ctx context.Context, tx *types.Transaction) error
|
SendTransaction(ctx context.Context, tx *types.Transaction) error
|
||||||
}
|
}
|
||||||
|
@ -25,8 +25,10 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum"
|
"github.com/ethereum/go-ethereum"
|
||||||
|
"github.com/ethereum/go-ethereum/accounts/abi"
|
||||||
"github.com/ethereum/go-ethereum/accounts/abi/bind"
|
"github.com/ethereum/go-ethereum/accounts/abi/bind"
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
|
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||||
"github.com/ethereum/go-ethereum/common/math"
|
"github.com/ethereum/go-ethereum/common/math"
|
||||||
"github.com/ethereum/go-ethereum/consensus/ethash"
|
"github.com/ethereum/go-ethereum/consensus/ethash"
|
||||||
"github.com/ethereum/go-ethereum/core"
|
"github.com/ethereum/go-ethereum/core"
|
||||||
@ -38,27 +40,32 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/eth/filters"
|
"github.com/ethereum/go-ethereum/eth/filters"
|
||||||
"github.com/ethereum/go-ethereum/ethdb"
|
"github.com/ethereum/go-ethereum/ethdb"
|
||||||
"github.com/ethereum/go-ethereum/event"
|
"github.com/ethereum/go-ethereum/event"
|
||||||
|
"github.com/ethereum/go-ethereum/log"
|
||||||
"github.com/ethereum/go-ethereum/params"
|
"github.com/ethereum/go-ethereum/params"
|
||||||
"github.com/ethereum/go-ethereum/rpc"
|
"github.com/ethereum/go-ethereum/rpc"
|
||||||
)
|
)
|
||||||
|
|
||||||
// This nil assignment ensures compile time that SimulatedBackend implements bind.ContractBackend.
|
// This nil assignment ensures at compile time that SimulatedBackend implements bind.ContractBackend.
|
||||||
var _ bind.ContractBackend = (*SimulatedBackend)(nil)
|
var _ bind.ContractBackend = (*SimulatedBackend)(nil)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
errBlockNumberUnsupported = errors.New("simulatedBackend cannot access blocks other than the latest block")
|
errBlockNumberUnsupported = errors.New("simulatedBackend cannot access blocks other than the latest block")
|
||||||
errGasEstimationFailed = errors.New("gas required exceeds allowance or always failing transaction")
|
errBlockDoesNotExist = errors.New("block does not exist in blockchain")
|
||||||
|
errTransactionDoesNotExist = errors.New("transaction does not exist")
|
||||||
)
|
)
|
||||||
|
|
||||||
// SimulatedBackend implements bind.ContractBackend, simulating a blockchain in
|
// SimulatedBackend implements bind.ContractBackend, simulating a blockchain in
|
||||||
// the background. Its main purpose is to allow easily testing contract bindings.
|
// the background. Its main purpose is to allow for easy testing of contract bindings.
|
||||||
|
// Simulated backend implements the following interfaces:
|
||||||
|
// ChainReader, ChainStateReader, ContractBackend, ContractCaller, ContractFilterer, ContractTransactor,
|
||||||
|
// DeployBackend, GasEstimator, GasPricer, LogFilterer, PendingContractCaller, TransactionReader, and TransactionSender
|
||||||
type SimulatedBackend struct {
|
type SimulatedBackend struct {
|
||||||
database ethdb.Database // In memory database to store our testing data
|
database ethdb.Database // In memory database to store our testing data
|
||||||
blockchain *core.BlockChain // Ethereum blockchain to handle the consensus
|
blockchain *core.BlockChain // Ethereum blockchain to handle the consensus
|
||||||
|
|
||||||
mu sync.Mutex
|
mu sync.Mutex
|
||||||
pendingBlock *types.Block // Currently pending block that will be imported on request
|
pendingBlock *types.Block // Currently pending block that will be imported on request
|
||||||
pendingState *state.StateDB // Currently pending state that will be the active on on request
|
pendingState *state.StateDB // Currently pending state that will be the active on request
|
||||||
|
|
||||||
events *filters.EventSystem // Event system for filtering log events live
|
events *filters.EventSystem // Event system for filtering log events live
|
||||||
|
|
||||||
@ -67,23 +74,25 @@ type SimulatedBackend struct {
|
|||||||
|
|
||||||
// NewSimulatedBackendWithDatabase creates a new binding backend based on the given database
|
// NewSimulatedBackendWithDatabase creates a new binding backend based on the given database
|
||||||
// and uses a simulated blockchain for testing purposes.
|
// and uses a simulated blockchain for testing purposes.
|
||||||
|
// A simulated backend always uses chainID 1337.
|
||||||
func NewSimulatedBackendWithDatabase(database ethdb.Database, alloc core.GenesisAlloc, gasLimit uint64) *SimulatedBackend {
|
func NewSimulatedBackendWithDatabase(database ethdb.Database, alloc core.GenesisAlloc, gasLimit uint64) *SimulatedBackend {
|
||||||
genesis := core.Genesis{Config: params.AllEthashProtocolChanges, GasLimit: gasLimit, Alloc: alloc}
|
genesis := core.Genesis{Config: params.AllEthashProtocolChanges, GasLimit: gasLimit, Alloc: alloc}
|
||||||
genesis.MustCommit(database)
|
genesis.MustCommit(database)
|
||||||
blockchain, _ := core.NewBlockChain(database, nil, genesis.Config, ethash.NewFaker(), vm.Config{}, nil)
|
blockchain, _ := core.NewBlockChain(database, nil, genesis.Config, ethash.NewFaker(), vm.Config{}, nil, nil)
|
||||||
|
|
||||||
backend := &SimulatedBackend{
|
backend := &SimulatedBackend{
|
||||||
database: database,
|
database: database,
|
||||||
blockchain: blockchain,
|
blockchain: blockchain,
|
||||||
config: genesis.Config,
|
config: genesis.Config,
|
||||||
events: filters.NewEventSystem(new(event.TypeMux), &filterBackend{database, blockchain}, false),
|
events: filters.NewEventSystem(&filterBackend{database, blockchain}, false),
|
||||||
}
|
}
|
||||||
backend.rollback()
|
backend.rollback(blockchain.CurrentBlock())
|
||||||
return backend
|
return backend
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewSimulatedBackend creates a new binding backend using a simulated blockchain
|
// NewSimulatedBackend creates a new binding backend using a simulated blockchain
|
||||||
// for testing purposes.
|
// for testing purposes.
|
||||||
|
// A simulated backend always uses chainID 1337.
|
||||||
func NewSimulatedBackend(alloc core.GenesisAlloc, gasLimit uint64) *SimulatedBackend {
|
func NewSimulatedBackend(alloc core.GenesisAlloc, gasLimit uint64) *SimulatedBackend {
|
||||||
return NewSimulatedBackendWithDatabase(rawdb.NewMemoryDatabase(), alloc, gasLimit)
|
return NewSimulatedBackendWithDatabase(rawdb.NewMemoryDatabase(), alloc, gasLimit)
|
||||||
}
|
}
|
||||||
@ -103,7 +112,9 @@ func (b *SimulatedBackend) Commit() {
|
|||||||
if _, err := b.blockchain.InsertChain([]*types.Block{b.pendingBlock}); err != nil {
|
if _, err := b.blockchain.InsertChain([]*types.Block{b.pendingBlock}); err != nil {
|
||||||
panic(err) // This cannot happen unless the simulator is wrong, fail in that case
|
panic(err) // This cannot happen unless the simulator is wrong, fail in that case
|
||||||
}
|
}
|
||||||
b.rollback()
|
// Using the last inserted block here makes it possible to build on a side
|
||||||
|
// chain after a fork.
|
||||||
|
b.rollback(b.pendingBlock)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rollback aborts all pending transactions, reverting to the last committed state.
|
// Rollback aborts all pending transactions, reverting to the last committed state.
|
||||||
@ -111,15 +122,53 @@ func (b *SimulatedBackend) Rollback() {
|
|||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
defer b.mu.Unlock()
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
b.rollback()
|
b.rollback(b.blockchain.CurrentBlock())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *SimulatedBackend) rollback() {
|
func (b *SimulatedBackend) rollback(parent *types.Block) {
|
||||||
blocks, _ := core.GenerateChain(b.config, b.blockchain.CurrentBlock(), ethash.NewFaker(), b.database, 1, func(int, *core.BlockGen) {})
|
blocks, _ := core.GenerateChain(b.config, parent, ethash.NewFaker(), b.database, 1, func(int, *core.BlockGen) {})
|
||||||
statedb, _ := b.blockchain.State()
|
|
||||||
|
|
||||||
b.pendingBlock = blocks[0]
|
b.pendingBlock = blocks[0]
|
||||||
b.pendingState, _ = state.New(b.pendingBlock.Root(), statedb.Database())
|
b.pendingState, _ = state.New(b.pendingBlock.Root(), b.blockchain.StateCache(), nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fork creates a side-chain that can be used to simulate reorgs.
|
||||||
|
//
|
||||||
|
// This function should be called with the ancestor block where the new side
|
||||||
|
// chain should be started. Transactions (old and new) can then be applied on
|
||||||
|
// top and Commit-ed.
|
||||||
|
//
|
||||||
|
// Note, the side-chain will only become canonical (and trigger the events) when
|
||||||
|
// it becomes longer. Until then CallContract will still operate on the current
|
||||||
|
// canonical chain.
|
||||||
|
//
|
||||||
|
// There is a % chance that the side chain becomes canonical at the same length
|
||||||
|
// to simulate live network behavior.
|
||||||
|
func (b *SimulatedBackend) Fork(ctx context.Context, parent common.Hash) error {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
|
if len(b.pendingBlock.Transactions()) != 0 {
|
||||||
|
return errors.New("pending block dirty")
|
||||||
|
}
|
||||||
|
block, err := b.blockByHash(ctx, parent)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
b.rollback(block)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// stateByBlockNumber retrieves a state by a given blocknumber.
|
||||||
|
func (b *SimulatedBackend) stateByBlockNumber(ctx context.Context, blockNumber *big.Int) (*state.StateDB, error) {
|
||||||
|
if blockNumber == nil || blockNumber.Cmp(b.blockchain.CurrentBlock().Number()) == 0 {
|
||||||
|
return b.blockchain.State()
|
||||||
|
}
|
||||||
|
block, err := b.blockByNumber(ctx, blockNumber)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return b.blockchain.StateAt(block.Root())
|
||||||
}
|
}
|
||||||
|
|
||||||
// CodeAt returns the code associated with a certain account in the blockchain.
|
// CodeAt returns the code associated with a certain account in the blockchain.
|
||||||
@ -127,11 +176,12 @@ func (b *SimulatedBackend) CodeAt(ctx context.Context, contract common.Address,
|
|||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
defer b.mu.Unlock()
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
if blockNumber != nil && blockNumber.Cmp(b.blockchain.CurrentBlock().Number()) != 0 {
|
stateDB, err := b.stateByBlockNumber(ctx, blockNumber)
|
||||||
return nil, errBlockNumberUnsupported
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
statedb, _ := b.blockchain.State()
|
|
||||||
return statedb.GetCode(contract), nil
|
return stateDB.GetCode(contract), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// BalanceAt returns the wei balance of a certain account in the blockchain.
|
// BalanceAt returns the wei balance of a certain account in the blockchain.
|
||||||
@ -139,11 +189,12 @@ func (b *SimulatedBackend) BalanceAt(ctx context.Context, contract common.Addres
|
|||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
defer b.mu.Unlock()
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
if blockNumber != nil && blockNumber.Cmp(b.blockchain.CurrentBlock().Number()) != 0 {
|
stateDB, err := b.stateByBlockNumber(ctx, blockNumber)
|
||||||
return nil, errBlockNumberUnsupported
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
statedb, _ := b.blockchain.State()
|
|
||||||
return statedb.GetBalance(contract), nil
|
return stateDB.GetBalance(contract), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// NonceAt returns the nonce of a certain account in the blockchain.
|
// NonceAt returns the nonce of a certain account in the blockchain.
|
||||||
@ -151,11 +202,12 @@ func (b *SimulatedBackend) NonceAt(ctx context.Context, contract common.Address,
|
|||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
defer b.mu.Unlock()
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
if blockNumber != nil && blockNumber.Cmp(b.blockchain.CurrentBlock().Number()) != 0 {
|
stateDB, err := b.stateByBlockNumber(ctx, blockNumber)
|
||||||
return 0, errBlockNumberUnsupported
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
}
|
}
|
||||||
statedb, _ := b.blockchain.State()
|
|
||||||
return statedb.GetNonce(contract), nil
|
return stateDB.GetNonce(contract), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// StorageAt returns the value of key in the storage of an account in the blockchain.
|
// StorageAt returns the value of key in the storage of an account in the blockchain.
|
||||||
@ -163,16 +215,20 @@ func (b *SimulatedBackend) StorageAt(ctx context.Context, contract common.Addres
|
|||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
defer b.mu.Unlock()
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
if blockNumber != nil && blockNumber.Cmp(b.blockchain.CurrentBlock().Number()) != 0 {
|
stateDB, err := b.stateByBlockNumber(ctx, blockNumber)
|
||||||
return nil, errBlockNumberUnsupported
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
statedb, _ := b.blockchain.State()
|
|
||||||
val := statedb.GetState(contract, key)
|
val := stateDB.GetState(contract, key)
|
||||||
return val[:], nil
|
return val[:], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// TransactionReceipt returns the receipt of a transaction.
|
// TransactionReceipt returns the receipt of a transaction.
|
||||||
func (b *SimulatedBackend) TransactionReceipt(ctx context.Context, txHash common.Hash) (*types.Receipt, error) {
|
func (b *SimulatedBackend) TransactionReceipt(ctx context.Context, txHash common.Hash) (*types.Receipt, error) {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
receipt, _, _, _ := rawdb.ReadReceipt(b.database, txHash, b.config)
|
receipt, _, _, _ := rawdb.ReadReceipt(b.database, txHash, b.config)
|
||||||
return receipt, nil
|
return receipt, nil
|
||||||
}
|
}
|
||||||
@ -196,6 +252,126 @@ func (b *SimulatedBackend) TransactionByHash(ctx context.Context, txHash common.
|
|||||||
return nil, false, ethereum.NotFound
|
return nil, false, ethereum.NotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// BlockByHash retrieves a block based on the block hash.
|
||||||
|
func (b *SimulatedBackend) BlockByHash(ctx context.Context, hash common.Hash) (*types.Block, error) {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
|
return b.blockByHash(ctx, hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
// blockByHash retrieves a block based on the block hash without Locking.
|
||||||
|
func (b *SimulatedBackend) blockByHash(ctx context.Context, hash common.Hash) (*types.Block, error) {
|
||||||
|
if hash == b.pendingBlock.Hash() {
|
||||||
|
return b.pendingBlock, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
block := b.blockchain.GetBlockByHash(hash)
|
||||||
|
if block != nil {
|
||||||
|
return block, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errBlockDoesNotExist
|
||||||
|
}
|
||||||
|
|
||||||
|
// BlockByNumber retrieves a block from the database by number, caching it
|
||||||
|
// (associated with its hash) if found.
|
||||||
|
func (b *SimulatedBackend) BlockByNumber(ctx context.Context, number *big.Int) (*types.Block, error) {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
|
return b.blockByNumber(ctx, number)
|
||||||
|
}
|
||||||
|
|
||||||
|
// blockByNumber retrieves a block from the database by number, caching it
|
||||||
|
// (associated with its hash) if found without Lock.
|
||||||
|
func (b *SimulatedBackend) blockByNumber(ctx context.Context, number *big.Int) (*types.Block, error) {
|
||||||
|
if number == nil || number.Cmp(b.pendingBlock.Number()) == 0 {
|
||||||
|
return b.blockchain.CurrentBlock(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
block := b.blockchain.GetBlockByNumber(uint64(number.Int64()))
|
||||||
|
if block == nil {
|
||||||
|
return nil, errBlockDoesNotExist
|
||||||
|
}
|
||||||
|
|
||||||
|
return block, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// HeaderByHash returns a block header from the current canonical chain.
|
||||||
|
func (b *SimulatedBackend) HeaderByHash(ctx context.Context, hash common.Hash) (*types.Header, error) {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
|
if hash == b.pendingBlock.Hash() {
|
||||||
|
return b.pendingBlock.Header(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
header := b.blockchain.GetHeaderByHash(hash)
|
||||||
|
if header == nil {
|
||||||
|
return nil, errBlockDoesNotExist
|
||||||
|
}
|
||||||
|
|
||||||
|
return header, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// HeaderByNumber returns a block header from the current canonical chain. If number is
|
||||||
|
// nil, the latest known header is returned.
|
||||||
|
func (b *SimulatedBackend) HeaderByNumber(ctx context.Context, block *big.Int) (*types.Header, error) {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
|
if block == nil || block.Cmp(b.pendingBlock.Number()) == 0 {
|
||||||
|
return b.blockchain.CurrentHeader(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return b.blockchain.GetHeaderByNumber(uint64(block.Int64())), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TransactionCount returns the number of transactions in a given block.
|
||||||
|
func (b *SimulatedBackend) TransactionCount(ctx context.Context, blockHash common.Hash) (uint, error) {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
|
if blockHash == b.pendingBlock.Hash() {
|
||||||
|
return uint(b.pendingBlock.Transactions().Len()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
block := b.blockchain.GetBlockByHash(blockHash)
|
||||||
|
if block == nil {
|
||||||
|
return uint(0), errBlockDoesNotExist
|
||||||
|
}
|
||||||
|
|
||||||
|
return uint(block.Transactions().Len()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TransactionInBlock returns the transaction for a specific block at a specific index.
|
||||||
|
func (b *SimulatedBackend) TransactionInBlock(ctx context.Context, blockHash common.Hash, index uint) (*types.Transaction, error) {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
|
if blockHash == b.pendingBlock.Hash() {
|
||||||
|
transactions := b.pendingBlock.Transactions()
|
||||||
|
if uint(len(transactions)) < index+1 {
|
||||||
|
return nil, errTransactionDoesNotExist
|
||||||
|
}
|
||||||
|
|
||||||
|
return transactions[index], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
block := b.blockchain.GetBlockByHash(blockHash)
|
||||||
|
if block == nil {
|
||||||
|
return nil, errBlockDoesNotExist
|
||||||
|
}
|
||||||
|
|
||||||
|
transactions := block.Transactions()
|
||||||
|
if uint(len(transactions)) < index+1 {
|
||||||
|
return nil, errTransactionDoesNotExist
|
||||||
|
}
|
||||||
|
|
||||||
|
return transactions[index], nil
|
||||||
|
}
|
||||||
|
|
||||||
// PendingCodeAt returns the code associated with an account in the pending state.
|
// PendingCodeAt returns the code associated with an account in the pending state.
|
||||||
func (b *SimulatedBackend) PendingCodeAt(ctx context.Context, contract common.Address) ([]byte, error) {
|
func (b *SimulatedBackend) PendingCodeAt(ctx context.Context, contract common.Address) ([]byte, error) {
|
||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
@ -204,6 +380,36 @@ func (b *SimulatedBackend) PendingCodeAt(ctx context.Context, contract common.Ad
|
|||||||
return b.pendingState.GetCode(contract), nil
|
return b.pendingState.GetCode(contract), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func newRevertError(result *core.ExecutionResult) *revertError {
|
||||||
|
reason, errUnpack := abi.UnpackRevert(result.Revert())
|
||||||
|
err := errors.New("execution reverted")
|
||||||
|
if errUnpack == nil {
|
||||||
|
err = fmt.Errorf("execution reverted: %v", reason)
|
||||||
|
}
|
||||||
|
return &revertError{
|
||||||
|
error: err,
|
||||||
|
reason: hexutil.Encode(result.Revert()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// revertError is an API error that encompasses an EVM revert with JSON error
|
||||||
|
// code and a binary data blob.
|
||||||
|
type revertError struct {
|
||||||
|
error
|
||||||
|
reason string // revert reason hex encoded
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCode returns the JSON error code for a revert.
|
||||||
|
// See: https://github.com/ethereum/wiki/wiki/JSON-RPC-Error-Codes-Improvement-Proposal
|
||||||
|
func (e *revertError) ErrorCode() int {
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorData returns the hex encoded revert reason.
|
||||||
|
func (e *revertError) ErrorData() interface{} {
|
||||||
|
return e.reason
|
||||||
|
}
|
||||||
|
|
||||||
// CallContract executes a contract call.
|
// CallContract executes a contract call.
|
||||||
func (b *SimulatedBackend) CallContract(ctx context.Context, call ethereum.CallMsg, blockNumber *big.Int) ([]byte, error) {
|
func (b *SimulatedBackend) CallContract(ctx context.Context, call ethereum.CallMsg, blockNumber *big.Int) ([]byte, error) {
|
||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
@ -212,12 +418,19 @@ func (b *SimulatedBackend) CallContract(ctx context.Context, call ethereum.CallM
|
|||||||
if blockNumber != nil && blockNumber.Cmp(b.blockchain.CurrentBlock().Number()) != 0 {
|
if blockNumber != nil && blockNumber.Cmp(b.blockchain.CurrentBlock().Number()) != 0 {
|
||||||
return nil, errBlockNumberUnsupported
|
return nil, errBlockNumberUnsupported
|
||||||
}
|
}
|
||||||
state, err := b.blockchain.State()
|
stateDB, err := b.blockchain.State()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
rval, _, _, err := b.callContract(ctx, call, b.blockchain.CurrentBlock(), state)
|
res, err := b.callContract(ctx, call, b.blockchain.CurrentBlock(), stateDB)
|
||||||
return rval, err
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// If the result contains a revert reason, try to unpack and return it.
|
||||||
|
if len(res.Revert()) > 0 {
|
||||||
|
return nil, newRevertError(res)
|
||||||
|
}
|
||||||
|
return res.Return(), res.Err
|
||||||
}
|
}
|
||||||
|
|
||||||
// PendingCallContract executes a contract call on the pending state.
|
// PendingCallContract executes a contract call on the pending state.
|
||||||
@ -226,8 +439,15 @@ func (b *SimulatedBackend) PendingCallContract(ctx context.Context, call ethereu
|
|||||||
defer b.mu.Unlock()
|
defer b.mu.Unlock()
|
||||||
defer b.pendingState.RevertToSnapshot(b.pendingState.Snapshot())
|
defer b.pendingState.RevertToSnapshot(b.pendingState.Snapshot())
|
||||||
|
|
||||||
rval, _, _, err := b.callContract(ctx, call, b.pendingBlock, b.pendingState)
|
res, err := b.callContract(ctx, call, b.pendingBlock, b.pendingState)
|
||||||
return rval, err
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// If the result contains a revert reason, try to unpack and return it.
|
||||||
|
if len(res.Revert()) > 0 {
|
||||||
|
return nil, newRevertError(res)
|
||||||
|
}
|
||||||
|
return res.Return(), res.Err
|
||||||
}
|
}
|
||||||
|
|
||||||
// PendingNonceAt implements PendingStateReader.PendingNonceAt, retrieving
|
// PendingNonceAt implements PendingStateReader.PendingNonceAt, retrieving
|
||||||
@ -242,6 +462,18 @@ func (b *SimulatedBackend) PendingNonceAt(ctx context.Context, account common.Ad
|
|||||||
// SuggestGasPrice implements ContractTransactor.SuggestGasPrice. Since the simulated
|
// SuggestGasPrice implements ContractTransactor.SuggestGasPrice. Since the simulated
|
||||||
// chain doesn't have miners, we just return a gas price of 1 for any call.
|
// chain doesn't have miners, we just return a gas price of 1 for any call.
|
||||||
func (b *SimulatedBackend) SuggestGasPrice(ctx context.Context) (*big.Int, error) {
|
func (b *SimulatedBackend) SuggestGasPrice(ctx context.Context) (*big.Int, error) {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
|
if b.pendingBlock.Header().BaseFee != nil {
|
||||||
|
return b.pendingBlock.Header().BaseFee, nil
|
||||||
|
}
|
||||||
|
return big.NewInt(1), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SuggestGasTipCap implements ContractTransactor.SuggestGasTipCap. Since the simulated
|
||||||
|
// chain doesn't have miners, we just return a gas tip of 1 for any call.
|
||||||
|
func (b *SimulatedBackend) SuggestGasTipCap(ctx context.Context) (*big.Int, error) {
|
||||||
return big.NewInt(1), nil
|
return big.NewInt(1), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -262,25 +494,68 @@ func (b *SimulatedBackend) EstimateGas(ctx context.Context, call ethereum.CallMs
|
|||||||
} else {
|
} else {
|
||||||
hi = b.pendingBlock.GasLimit()
|
hi = b.pendingBlock.GasLimit()
|
||||||
}
|
}
|
||||||
|
// Normalize the max fee per gas the call is willing to spend.
|
||||||
|
var feeCap *big.Int
|
||||||
|
if call.GasPrice != nil && (call.GasFeeCap != nil || call.GasTipCap != nil) {
|
||||||
|
return 0, errors.New("both gasPrice and (maxFeePerGas or maxPriorityFeePerGas) specified")
|
||||||
|
} else if call.GasPrice != nil {
|
||||||
|
feeCap = call.GasPrice
|
||||||
|
} else if call.GasFeeCap != nil {
|
||||||
|
feeCap = call.GasFeeCap
|
||||||
|
} else {
|
||||||
|
feeCap = common.Big0
|
||||||
|
}
|
||||||
|
// Recap the highest gas allowance with account's balance.
|
||||||
|
if feeCap.BitLen() != 0 {
|
||||||
|
balance := b.pendingState.GetBalance(call.From) // from can't be nil
|
||||||
|
available := new(big.Int).Set(balance)
|
||||||
|
if call.Value != nil {
|
||||||
|
if call.Value.Cmp(available) >= 0 {
|
||||||
|
return 0, errors.New("insufficient funds for transfer")
|
||||||
|
}
|
||||||
|
available.Sub(available, call.Value)
|
||||||
|
}
|
||||||
|
allowance := new(big.Int).Div(available, feeCap)
|
||||||
|
if allowance.IsUint64() && hi > allowance.Uint64() {
|
||||||
|
transfer := call.Value
|
||||||
|
if transfer == nil {
|
||||||
|
transfer = new(big.Int)
|
||||||
|
}
|
||||||
|
log.Warn("Gas estimation capped by limited funds", "original", hi, "balance", balance,
|
||||||
|
"sent", transfer, "feecap", feeCap, "fundable", allowance)
|
||||||
|
hi = allowance.Uint64()
|
||||||
|
}
|
||||||
|
}
|
||||||
cap = hi
|
cap = hi
|
||||||
|
|
||||||
// Create a helper to check if a gas allowance results in an executable transaction
|
// Create a helper to check if a gas allowance results in an executable transaction
|
||||||
executable := func(gas uint64) bool {
|
executable := func(gas uint64) (bool, *core.ExecutionResult, error) {
|
||||||
call.Gas = gas
|
call.Gas = gas
|
||||||
|
|
||||||
snapshot := b.pendingState.Snapshot()
|
snapshot := b.pendingState.Snapshot()
|
||||||
_, _, failed, err := b.callContract(ctx, call, b.pendingBlock, b.pendingState)
|
res, err := b.callContract(ctx, call, b.pendingBlock, b.pendingState)
|
||||||
b.pendingState.RevertToSnapshot(snapshot)
|
b.pendingState.RevertToSnapshot(snapshot)
|
||||||
|
|
||||||
if err != nil || failed {
|
if err != nil {
|
||||||
return false
|
if errors.Is(err, core.ErrIntrinsicGas) {
|
||||||
|
return true, nil, nil // Special case, raise gas limit
|
||||||
|
}
|
||||||
|
return true, nil, err // Bail out
|
||||||
}
|
}
|
||||||
return true
|
return res.Failed(), res, nil
|
||||||
}
|
}
|
||||||
// Execute the binary search and hone in on an executable gas limit
|
// Execute the binary search and hone in on an executable gas limit
|
||||||
for lo+1 < hi {
|
for lo+1 < hi {
|
||||||
mid := (hi + lo) / 2
|
mid := (hi + lo) / 2
|
||||||
if !executable(mid) {
|
failed, _, err := executable(mid)
|
||||||
|
|
||||||
|
// If the error is not nil(consensus error), it means the provided message
|
||||||
|
// call or transaction will never be accepted no matter how much gas it is
|
||||||
|
// assigned. Return the error directly, don't struggle any more
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if failed {
|
||||||
lo = mid
|
lo = mid
|
||||||
} else {
|
} else {
|
||||||
hi = mid
|
hi = mid
|
||||||
@ -288,8 +563,19 @@ func (b *SimulatedBackend) EstimateGas(ctx context.Context, call ethereum.CallMs
|
|||||||
}
|
}
|
||||||
// Reject the transaction as invalid if it still fails at the highest allowance
|
// Reject the transaction as invalid if it still fails at the highest allowance
|
||||||
if hi == cap {
|
if hi == cap {
|
||||||
if !executable(hi) {
|
failed, result, err := executable(hi)
|
||||||
return 0, errGasEstimationFailed
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if failed {
|
||||||
|
if result != nil && result.Err != vm.ErrOutOfGas {
|
||||||
|
if len(result.Revert()) > 0 {
|
||||||
|
return 0, newRevertError(result)
|
||||||
|
}
|
||||||
|
return 0, result.Err
|
||||||
|
}
|
||||||
|
// Otherwise, the specified gas cap is too low
|
||||||
|
return 0, fmt.Errorf("gas required exceeds allowance (%d)", cap)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return hi, nil
|
return hi, nil
|
||||||
@ -297,11 +583,39 @@ func (b *SimulatedBackend) EstimateGas(ctx context.Context, call ethereum.CallMs
|
|||||||
|
|
||||||
// callContract implements common code between normal and pending contract calls.
|
// callContract implements common code between normal and pending contract calls.
|
||||||
// state is modified during execution, make sure to copy it if necessary.
|
// state is modified during execution, make sure to copy it if necessary.
|
||||||
func (b *SimulatedBackend) callContract(ctx context.Context, call ethereum.CallMsg, block *types.Block, statedb *state.StateDB) ([]byte, uint64, bool, error) {
|
func (b *SimulatedBackend) callContract(ctx context.Context, call ethereum.CallMsg, block *types.Block, stateDB *state.StateDB) (*core.ExecutionResult, error) {
|
||||||
// Ensure message is initialized properly.
|
// Gas prices post 1559 need to be initialized
|
||||||
if call.GasPrice == nil {
|
if call.GasPrice != nil && (call.GasFeeCap != nil || call.GasTipCap != nil) {
|
||||||
call.GasPrice = big.NewInt(1)
|
return nil, errors.New("both gasPrice and (maxFeePerGas or maxPriorityFeePerGas) specified")
|
||||||
}
|
}
|
||||||
|
head := b.blockchain.CurrentHeader()
|
||||||
|
if !b.blockchain.Config().IsLondon(head.Number) {
|
||||||
|
// If there's no basefee, then it must be a non-1559 execution
|
||||||
|
if call.GasPrice == nil {
|
||||||
|
call.GasPrice = new(big.Int)
|
||||||
|
}
|
||||||
|
call.GasFeeCap, call.GasTipCap = call.GasPrice, call.GasPrice
|
||||||
|
} else {
|
||||||
|
// A basefee is provided, necessitating 1559-type execution
|
||||||
|
if call.GasPrice != nil {
|
||||||
|
// User specified the legacy gas field, convert to 1559 gas typing
|
||||||
|
call.GasFeeCap, call.GasTipCap = call.GasPrice, call.GasPrice
|
||||||
|
} else {
|
||||||
|
// User specified 1559 gas feilds (or none), use those
|
||||||
|
if call.GasFeeCap == nil {
|
||||||
|
call.GasFeeCap = new(big.Int)
|
||||||
|
}
|
||||||
|
if call.GasTipCap == nil {
|
||||||
|
call.GasTipCap = new(big.Int)
|
||||||
|
}
|
||||||
|
// Backfill the legacy gasPrice for EVM execution, unless we're all zeroes
|
||||||
|
call.GasPrice = new(big.Int)
|
||||||
|
if call.GasFeeCap.BitLen() > 0 || call.GasTipCap.BitLen() > 0 {
|
||||||
|
call.GasPrice = math.BigMin(new(big.Int).Add(call.GasTipCap, head.BaseFee), call.GasFeeCap)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Ensure message is initialized properly.
|
||||||
if call.Gas == 0 {
|
if call.Gas == 0 {
|
||||||
call.Gas = 50000000
|
call.Gas = 50000000
|
||||||
}
|
}
|
||||||
@ -309,18 +623,19 @@ func (b *SimulatedBackend) callContract(ctx context.Context, call ethereum.CallM
|
|||||||
call.Value = new(big.Int)
|
call.Value = new(big.Int)
|
||||||
}
|
}
|
||||||
// Set infinite balance to the fake caller account.
|
// Set infinite balance to the fake caller account.
|
||||||
from := statedb.GetOrNewStateObject(call.From)
|
from := stateDB.GetOrNewStateObject(call.From)
|
||||||
from.SetBalance(math.MaxBig256)
|
from.SetBalance(math.MaxBig256)
|
||||||
// Execute the call.
|
// Execute the call.
|
||||||
msg := callmsg{call}
|
msg := callMsg{call}
|
||||||
|
|
||||||
evmContext := core.NewEVMContext(msg, block.Header(), b.blockchain, nil)
|
txContext := core.NewEVMTxContext(msg)
|
||||||
|
evmContext := core.NewEVMBlockContext(block.Header(), b.blockchain, nil)
|
||||||
// Create a new environment which holds all relevant information
|
// Create a new environment which holds all relevant information
|
||||||
// about the transaction and calling mechanisms.
|
// about the transaction and calling mechanisms.
|
||||||
vmenv := vm.NewEVM(evmContext, statedb, b.config, vm.Config{})
|
vmEnv := vm.NewEVM(evmContext, txContext, stateDB, b.config, vm.Config{NoBaseFee: true})
|
||||||
gaspool := new(core.GasPool).AddGas(math.MaxUint64)
|
gasPool := new(core.GasPool).AddGas(math.MaxUint64)
|
||||||
|
|
||||||
return core.NewStateTransition(vmenv, msg, gaspool).TransitionDb()
|
return core.NewStateTransition(vmEnv, msg, gasPool).TransitionDb()
|
||||||
}
|
}
|
||||||
|
|
||||||
// SendTransaction updates the pending block to include the given transaction.
|
// SendTransaction updates the pending block to include the given transaction.
|
||||||
@ -329,7 +644,14 @@ func (b *SimulatedBackend) SendTransaction(ctx context.Context, tx *types.Transa
|
|||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
defer b.mu.Unlock()
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
sender, err := types.Sender(types.NewEIP155Signer(b.config.ChainID), tx)
|
// Get the last block
|
||||||
|
block, err := b.blockByHash(ctx, b.pendingBlock.ParentHash())
|
||||||
|
if err != nil {
|
||||||
|
panic("could not fetch parent")
|
||||||
|
}
|
||||||
|
// Check transaction validity
|
||||||
|
signer := types.MakeSigner(b.blockchain.Config(), block.Number())
|
||||||
|
sender, err := types.Sender(signer, tx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(fmt.Errorf("invalid transaction: %v", err))
|
panic(fmt.Errorf("invalid transaction: %v", err))
|
||||||
}
|
}
|
||||||
@ -337,17 +659,17 @@ func (b *SimulatedBackend) SendTransaction(ctx context.Context, tx *types.Transa
|
|||||||
if tx.Nonce() != nonce {
|
if tx.Nonce() != nonce {
|
||||||
panic(fmt.Errorf("invalid transaction nonce: got %d, want %d", tx.Nonce(), nonce))
|
panic(fmt.Errorf("invalid transaction nonce: got %d, want %d", tx.Nonce(), nonce))
|
||||||
}
|
}
|
||||||
|
// Include tx in chain
|
||||||
blocks, _ := core.GenerateChain(b.config, b.blockchain.CurrentBlock(), ethash.NewFaker(), b.database, 1, func(number int, block *core.BlockGen) {
|
blocks, _ := core.GenerateChain(b.config, block, ethash.NewFaker(), b.database, 1, func(number int, block *core.BlockGen) {
|
||||||
for _, tx := range b.pendingBlock.Transactions() {
|
for _, tx := range b.pendingBlock.Transactions() {
|
||||||
block.AddTxWithChain(b.blockchain, tx)
|
block.AddTxWithChain(b.blockchain, tx)
|
||||||
}
|
}
|
||||||
block.AddTxWithChain(b.blockchain, tx)
|
block.AddTxWithChain(b.blockchain, tx)
|
||||||
})
|
})
|
||||||
statedb, _ := b.blockchain.State()
|
stateDB, _ := b.blockchain.State()
|
||||||
|
|
||||||
b.pendingBlock = blocks[0]
|
b.pendingBlock = blocks[0]
|
||||||
b.pendingState, _ = state.New(b.pendingBlock.Root(), statedb.Database())
|
b.pendingState, _ = state.New(b.pendingBlock.Root(), stateDB.Database(), nil)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -361,7 +683,7 @@ func (b *SimulatedBackend) FilterLogs(ctx context.Context, query ethereum.Filter
|
|||||||
// Block filter requested, construct a single-shot filter
|
// Block filter requested, construct a single-shot filter
|
||||||
filter = filters.NewBlockFilter(&filterBackend{b.database, b.blockchain}, *query.BlockHash, query.Addresses, query.Topics)
|
filter = filters.NewBlockFilter(&filterBackend{b.database, b.blockchain}, *query.BlockHash, query.Addresses, query.Topics)
|
||||||
} else {
|
} else {
|
||||||
// Initialize unset filter boundaried to run from genesis to chain head
|
// Initialize unset filter boundaries to run from genesis to chain head
|
||||||
from := int64(0)
|
from := int64(0)
|
||||||
if query.FromBlock != nil {
|
if query.FromBlock != nil {
|
||||||
from = query.FromBlock.Int64()
|
from = query.FromBlock.Int64()
|
||||||
@ -379,8 +701,8 @@ func (b *SimulatedBackend) FilterLogs(ctx context.Context, query ethereum.Filter
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
res := make([]types.Log, len(logs))
|
res := make([]types.Log, len(logs))
|
||||||
for i, log := range logs {
|
for i, nLog := range logs {
|
||||||
res[i] = *log
|
res[i] = *nLog
|
||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
@ -401,9 +723,9 @@ func (b *SimulatedBackend) SubscribeFilterLogs(ctx context.Context, query ethere
|
|||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case logs := <-sink:
|
case logs := <-sink:
|
||||||
for _, log := range logs {
|
for _, nlog := range logs {
|
||||||
select {
|
select {
|
||||||
case ch <- *log:
|
case ch <- *nlog:
|
||||||
case err := <-sub.Err():
|
case err := <-sub.Err():
|
||||||
return err
|
return err
|
||||||
case <-quit:
|
case <-quit:
|
||||||
@ -419,20 +741,50 @@ func (b *SimulatedBackend) SubscribeFilterLogs(ctx context.Context, query ethere
|
|||||||
}), nil
|
}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SubscribeNewHead returns an event subscription for a new header.
|
||||||
|
func (b *SimulatedBackend) SubscribeNewHead(ctx context.Context, ch chan<- *types.Header) (ethereum.Subscription, error) {
|
||||||
|
// subscribe to a new head
|
||||||
|
sink := make(chan *types.Header)
|
||||||
|
sub := b.events.SubscribeNewHeads(sink)
|
||||||
|
|
||||||
|
return event.NewSubscription(func(quit <-chan struct{}) error {
|
||||||
|
defer sub.Unsubscribe()
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case head := <-sink:
|
||||||
|
select {
|
||||||
|
case ch <- head:
|
||||||
|
case err := <-sub.Err():
|
||||||
|
return err
|
||||||
|
case <-quit:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
case err := <-sub.Err():
|
||||||
|
return err
|
||||||
|
case <-quit:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
// AdjustTime adds a time shift to the simulated clock.
|
// AdjustTime adds a time shift to the simulated clock.
|
||||||
|
// It can only be called on empty blocks.
|
||||||
func (b *SimulatedBackend) AdjustTime(adjustment time.Duration) error {
|
func (b *SimulatedBackend) AdjustTime(adjustment time.Duration) error {
|
||||||
b.mu.Lock()
|
b.mu.Lock()
|
||||||
defer b.mu.Unlock()
|
defer b.mu.Unlock()
|
||||||
|
|
||||||
|
if len(b.pendingBlock.Transactions()) != 0 {
|
||||||
|
return errors.New("Could not adjust time on non-empty block")
|
||||||
|
}
|
||||||
|
|
||||||
blocks, _ := core.GenerateChain(b.config, b.blockchain.CurrentBlock(), ethash.NewFaker(), b.database, 1, func(number int, block *core.BlockGen) {
|
blocks, _ := core.GenerateChain(b.config, b.blockchain.CurrentBlock(), ethash.NewFaker(), b.database, 1, func(number int, block *core.BlockGen) {
|
||||||
for _, tx := range b.pendingBlock.Transactions() {
|
|
||||||
block.AddTx(tx)
|
|
||||||
}
|
|
||||||
block.OffsetTime(int64(adjustment.Seconds()))
|
block.OffsetTime(int64(adjustment.Seconds()))
|
||||||
})
|
})
|
||||||
statedb, _ := b.blockchain.State()
|
stateDB, _ := b.blockchain.State()
|
||||||
|
|
||||||
b.pendingBlock = blocks[0]
|
b.pendingBlock = blocks[0]
|
||||||
b.pendingState, _ = state.New(b.pendingBlock.Root(), statedb.Database())
|
b.pendingState, _ = state.New(b.pendingBlock.Root(), stateDB.Database(), nil)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -442,19 +794,22 @@ func (b *SimulatedBackend) Blockchain() *core.BlockChain {
|
|||||||
return b.blockchain
|
return b.blockchain
|
||||||
}
|
}
|
||||||
|
|
||||||
// callmsg implements core.Message to allow passing it as a transaction simulator.
|
// callMsg implements core.Message to allow passing it as a transaction simulator.
|
||||||
type callmsg struct {
|
type callMsg struct {
|
||||||
ethereum.CallMsg
|
ethereum.CallMsg
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m callmsg) From() common.Address { return m.CallMsg.From }
|
func (m callMsg) From() common.Address { return m.CallMsg.From }
|
||||||
func (m callmsg) Nonce() uint64 { return 0 }
|
func (m callMsg) Nonce() uint64 { return 0 }
|
||||||
func (m callmsg) CheckNonce() bool { return false }
|
func (m callMsg) IsFake() bool { return true }
|
||||||
func (m callmsg) To() *common.Address { return m.CallMsg.To }
|
func (m callMsg) To() *common.Address { return m.CallMsg.To }
|
||||||
func (m callmsg) GasPrice() *big.Int { return m.CallMsg.GasPrice }
|
func (m callMsg) GasPrice() *big.Int { return m.CallMsg.GasPrice }
|
||||||
func (m callmsg) Gas() uint64 { return m.CallMsg.Gas }
|
func (m callMsg) GasFeeCap() *big.Int { return m.CallMsg.GasFeeCap }
|
||||||
func (m callmsg) Value() *big.Int { return m.CallMsg.Value }
|
func (m callMsg) GasTipCap() *big.Int { return m.CallMsg.GasTipCap }
|
||||||
func (m callmsg) Data() []byte { return m.CallMsg.Data }
|
func (m callMsg) Gas() uint64 { return m.CallMsg.Gas }
|
||||||
|
func (m callMsg) Value() *big.Int { return m.CallMsg.Value }
|
||||||
|
func (m callMsg) Data() []byte { return m.CallMsg.Data }
|
||||||
|
func (m callMsg) AccessList() types.AccessList { return m.CallMsg.AccessList }
|
||||||
|
|
||||||
// filterBackend implements filters.Backend to support filtering for logs without
|
// filterBackend implements filters.Backend to support filtering for logs without
|
||||||
// taking bloom-bits acceleration structures into account.
|
// taking bloom-bits acceleration structures into account.
|
||||||
@ -502,22 +857,34 @@ func (fb *filterBackend) GetLogs(ctx context.Context, hash common.Hash) ([][]*ty
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (fb *filterBackend) SubscribeNewTxsEvent(ch chan<- core.NewTxsEvent) event.Subscription {
|
func (fb *filterBackend) SubscribeNewTxsEvent(ch chan<- core.NewTxsEvent) event.Subscription {
|
||||||
|
return nullSubscription()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fb *filterBackend) SubscribeChainEvent(ch chan<- core.ChainEvent) event.Subscription {
|
||||||
|
return fb.bc.SubscribeChainEvent(ch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fb *filterBackend) SubscribeRemovedLogsEvent(ch chan<- core.RemovedLogsEvent) event.Subscription {
|
||||||
|
return fb.bc.SubscribeRemovedLogsEvent(ch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fb *filterBackend) SubscribeLogsEvent(ch chan<- []*types.Log) event.Subscription {
|
||||||
|
return fb.bc.SubscribeLogsEvent(ch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fb *filterBackend) SubscribePendingLogsEvent(ch chan<- []*types.Log) event.Subscription {
|
||||||
|
return nullSubscription()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fb *filterBackend) BloomStatus() (uint64, uint64) { return 4096, 0 }
|
||||||
|
|
||||||
|
func (fb *filterBackend) ServiceFilter(ctx context.Context, ms *bloombits.MatcherSession) {
|
||||||
|
panic("not supported")
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullSubscription() event.Subscription {
|
||||||
return event.NewSubscription(func(quit <-chan struct{}) error {
|
return event.NewSubscription(func(quit <-chan struct{}) error {
|
||||||
<-quit
|
<-quit
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
func (fb *filterBackend) SubscribeChainEvent(ch chan<- core.ChainEvent) event.Subscription {
|
|
||||||
return fb.bc.SubscribeChainEvent(ch)
|
|
||||||
}
|
|
||||||
func (fb *filterBackend) SubscribeRemovedLogsEvent(ch chan<- core.RemovedLogsEvent) event.Subscription {
|
|
||||||
return fb.bc.SubscribeRemovedLogsEvent(ch)
|
|
||||||
}
|
|
||||||
func (fb *filterBackend) SubscribeLogsEvent(ch chan<- []*types.Log) event.Subscription {
|
|
||||||
return fb.bc.SubscribeLogsEvent(ch)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fb *filterBackend) BloomStatus() (uint64, uint64) { return 4096, 0 }
|
|
||||||
func (fb *filterBackend) ServiceFilter(ctx context.Context, ms *bloombits.MatcherSession) {
|
|
||||||
panic("not supported")
|
|
||||||
}
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -21,6 +21,8 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/big"
|
"math/big"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum"
|
"github.com/ethereum/go-ethereum"
|
||||||
"github.com/ethereum/go-ethereum/accounts/abi"
|
"github.com/ethereum/go-ethereum/accounts/abi"
|
||||||
@ -32,7 +34,7 @@ import (
|
|||||||
|
|
||||||
// SignerFn is a signer function callback when a contract requires a method to
|
// SignerFn is a signer function callback when a contract requires a method to
|
||||||
// sign the transaction before submission.
|
// sign the transaction before submission.
|
||||||
type SignerFn func(types.Signer, common.Address, *types.Transaction) (*types.Transaction, error)
|
type SignerFn func(common.Address, *types.Transaction) (*types.Transaction, error)
|
||||||
|
|
||||||
// CallOpts is the collection of options to fine tune a contract call request.
|
// CallOpts is the collection of options to fine tune a contract call request.
|
||||||
type CallOpts struct {
|
type CallOpts struct {
|
||||||
@ -49,11 +51,15 @@ type TransactOpts struct {
|
|||||||
Nonce *big.Int // Nonce to use for the transaction execution (nil = use pending state)
|
Nonce *big.Int // Nonce to use for the transaction execution (nil = use pending state)
|
||||||
Signer SignerFn // Method to use for signing the transaction (mandatory)
|
Signer SignerFn // Method to use for signing the transaction (mandatory)
|
||||||
|
|
||||||
Value *big.Int // Funds to transfer along along the transaction (nil = 0 = no funds)
|
Value *big.Int // Funds to transfer along the transaction (nil = 0 = no funds)
|
||||||
GasPrice *big.Int // Gas price to use for the transaction execution (nil = gas price oracle)
|
GasPrice *big.Int // Gas price to use for the transaction execution (nil = gas price oracle)
|
||||||
GasLimit uint64 // Gas limit to set for the transaction execution (0 = estimate)
|
GasFeeCap *big.Int // Gas fee cap to use for the 1559 transaction execution (nil = gas price oracle)
|
||||||
|
GasTipCap *big.Int // Gas priority fee cap to use for the 1559 transaction execution (nil = gas price oracle)
|
||||||
|
GasLimit uint64 // Gas limit to set for the transaction execution (0 = estimate)
|
||||||
|
|
||||||
Context context.Context // Network context to support cancellation and timeouts (nil = no timeout)
|
Context context.Context // Network context to support cancellation and timeouts (nil = no timeout)
|
||||||
|
|
||||||
|
NoSend bool // Do all transact steps but do not send the transaction
|
||||||
}
|
}
|
||||||
|
|
||||||
// FilterOpts is the collection of options to fine tune filtering for events
|
// FilterOpts is the collection of options to fine tune filtering for events
|
||||||
@ -72,6 +78,29 @@ type WatchOpts struct {
|
|||||||
Context context.Context // Network context to support cancellation and timeouts (nil = no timeout)
|
Context context.Context // Network context to support cancellation and timeouts (nil = no timeout)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MetaData collects all metadata for a bound contract.
|
||||||
|
type MetaData struct {
|
||||||
|
mu sync.Mutex
|
||||||
|
Sigs map[string]string
|
||||||
|
Bin string
|
||||||
|
ABI string
|
||||||
|
ab *abi.ABI
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MetaData) GetAbi() (*abi.ABI, error) {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
if m.ab != nil {
|
||||||
|
return m.ab, nil
|
||||||
|
}
|
||||||
|
if parsed, err := abi.JSON(strings.NewReader(m.ABI)); err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else {
|
||||||
|
m.ab = &parsed
|
||||||
|
}
|
||||||
|
return m.ab, nil
|
||||||
|
}
|
||||||
|
|
||||||
// BoundContract is the base wrapper object that reflects a contract on the
|
// BoundContract is the base wrapper object that reflects a contract on the
|
||||||
// Ethereum network. It contains a collection of methods that are used by the
|
// Ethereum network. It contains a collection of methods that are used by the
|
||||||
// higher level contract bindings to operate.
|
// higher level contract bindings to operate.
|
||||||
@ -117,11 +146,14 @@ func DeployContract(opts *TransactOpts, abi abi.ABI, bytecode []byte, backend Co
|
|||||||
// sets the output to result. The result type might be a single field for simple
|
// sets the output to result. The result type might be a single field for simple
|
||||||
// returns, a slice of interfaces for anonymous returns and a struct for named
|
// returns, a slice of interfaces for anonymous returns and a struct for named
|
||||||
// returns.
|
// returns.
|
||||||
func (c *BoundContract) Call(opts *CallOpts, result interface{}, method string, params ...interface{}) error {
|
func (c *BoundContract) Call(opts *CallOpts, results *[]interface{}, method string, params ...interface{}) error {
|
||||||
// Don't crash on a lazy user
|
// Don't crash on a lazy user
|
||||||
if opts == nil {
|
if opts == nil {
|
||||||
opts = new(CallOpts)
|
opts = new(CallOpts)
|
||||||
}
|
}
|
||||||
|
if results == nil {
|
||||||
|
results = new([]interface{})
|
||||||
|
}
|
||||||
// Pack the input, call and unpack the results
|
// Pack the input, call and unpack the results
|
||||||
input, err := c.abi.Pack(method, params...)
|
input, err := c.abi.Pack(method, params...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -149,7 +181,10 @@ func (c *BoundContract) Call(opts *CallOpts, result interface{}, method string,
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
output, err = c.caller.CallContract(ctx, msg, opts.BlockNumber)
|
output, err = c.caller.CallContract(ctx, msg, opts.BlockNumber)
|
||||||
if err == nil && len(output) == 0 {
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if len(output) == 0 {
|
||||||
// Make sure we have a contract to operate on, and bail out otherwise.
|
// Make sure we have a contract to operate on, and bail out otherwise.
|
||||||
if code, err = c.caller.CodeAt(ctx, c.address, opts.BlockNumber); err != nil {
|
if code, err = c.caller.CodeAt(ctx, c.address, opts.BlockNumber); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -158,10 +193,14 @@ func (c *BoundContract) Call(opts *CallOpts, result interface{}, method string,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err != nil {
|
|
||||||
|
if len(*results) == 0 {
|
||||||
|
res, err := c.abi.Unpack(method, output)
|
||||||
|
*results = res
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return c.abi.Unpack(result, method, output)
|
res := *results
|
||||||
|
return c.abi.UnpackIntoInterface(res[0], method, output)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transact invokes the (paid) contract method with params as input values.
|
// Transact invokes the (paid) contract method with params as input values.
|
||||||
@ -171,73 +210,189 @@ func (c *BoundContract) Transact(opts *TransactOpts, method string, params ...in
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
// todo(rjl493456442) check the method is payable or not,
|
||||||
|
// reject invalid transaction at the first place
|
||||||
return c.transact(opts, &c.address, input)
|
return c.transact(opts, &c.address, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RawTransact initiates a transaction with the given raw calldata as the input.
|
||||||
|
// It's usually used to initiate transactions for invoking **Fallback** function.
|
||||||
|
func (c *BoundContract) RawTransact(opts *TransactOpts, calldata []byte) (*types.Transaction, error) {
|
||||||
|
// todo(rjl493456442) check the method is payable or not,
|
||||||
|
// reject invalid transaction at the first place
|
||||||
|
return c.transact(opts, &c.address, calldata)
|
||||||
|
}
|
||||||
|
|
||||||
// Transfer initiates a plain transaction to move funds to the contract, calling
|
// Transfer initiates a plain transaction to move funds to the contract, calling
|
||||||
// its default method if one is available.
|
// its default method if one is available.
|
||||||
func (c *BoundContract) Transfer(opts *TransactOpts) (*types.Transaction, error) {
|
func (c *BoundContract) Transfer(opts *TransactOpts) (*types.Transaction, error) {
|
||||||
|
// todo(rjl493456442) check the payable fallback or receive is defined
|
||||||
|
// or not, reject invalid transaction at the first place
|
||||||
return c.transact(opts, &c.address, nil)
|
return c.transact(opts, &c.address, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *BoundContract) createDynamicTx(opts *TransactOpts, contract *common.Address, input []byte, head *types.Header) (*types.Transaction, error) {
|
||||||
|
// Normalize value
|
||||||
|
value := opts.Value
|
||||||
|
if value == nil {
|
||||||
|
value = new(big.Int)
|
||||||
|
}
|
||||||
|
// Estimate TipCap
|
||||||
|
gasTipCap := opts.GasTipCap
|
||||||
|
if gasTipCap == nil {
|
||||||
|
tip, err := c.transactor.SuggestGasTipCap(ensureContext(opts.Context))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
gasTipCap = tip
|
||||||
|
}
|
||||||
|
// Estimate FeeCap
|
||||||
|
gasFeeCap := opts.GasFeeCap
|
||||||
|
if gasFeeCap == nil {
|
||||||
|
gasFeeCap = new(big.Int).Add(
|
||||||
|
gasTipCap,
|
||||||
|
new(big.Int).Mul(head.BaseFee, big.NewInt(2)),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if gasFeeCap.Cmp(gasTipCap) < 0 {
|
||||||
|
return nil, fmt.Errorf("maxFeePerGas (%v) < maxPriorityFeePerGas (%v)", gasFeeCap, gasTipCap)
|
||||||
|
}
|
||||||
|
// Estimate GasLimit
|
||||||
|
gasLimit := opts.GasLimit
|
||||||
|
if opts.GasLimit == 0 {
|
||||||
|
var err error
|
||||||
|
gasLimit, err = c.estimateGasLimit(opts, contract, input, nil, gasTipCap, gasFeeCap, value)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// create the transaction
|
||||||
|
nonce, err := c.getNonce(opts)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
baseTx := &types.DynamicFeeTx{
|
||||||
|
To: contract,
|
||||||
|
Nonce: nonce,
|
||||||
|
GasFeeCap: gasFeeCap,
|
||||||
|
GasTipCap: gasTipCap,
|
||||||
|
Gas: gasLimit,
|
||||||
|
Value: value,
|
||||||
|
Data: input,
|
||||||
|
}
|
||||||
|
return types.NewTx(baseTx), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *BoundContract) createLegacyTx(opts *TransactOpts, contract *common.Address, input []byte) (*types.Transaction, error) {
|
||||||
|
if opts.GasFeeCap != nil || opts.GasTipCap != nil {
|
||||||
|
return nil, errors.New("maxFeePerGas or maxPriorityFeePerGas specified but london is not active yet")
|
||||||
|
}
|
||||||
|
// Normalize value
|
||||||
|
value := opts.Value
|
||||||
|
if value == nil {
|
||||||
|
value = new(big.Int)
|
||||||
|
}
|
||||||
|
// Estimate GasPrice
|
||||||
|
gasPrice := opts.GasPrice
|
||||||
|
if gasPrice == nil {
|
||||||
|
price, err := c.transactor.SuggestGasPrice(ensureContext(opts.Context))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
gasPrice = price
|
||||||
|
}
|
||||||
|
// Estimate GasLimit
|
||||||
|
gasLimit := opts.GasLimit
|
||||||
|
if opts.GasLimit == 0 {
|
||||||
|
var err error
|
||||||
|
gasLimit, err = c.estimateGasLimit(opts, contract, input, gasPrice, nil, nil, value)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// create the transaction
|
||||||
|
nonce, err := c.getNonce(opts)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
baseTx := &types.LegacyTx{
|
||||||
|
To: contract,
|
||||||
|
Nonce: nonce,
|
||||||
|
GasPrice: gasPrice,
|
||||||
|
Gas: gasLimit,
|
||||||
|
Value: value,
|
||||||
|
Data: input,
|
||||||
|
}
|
||||||
|
return types.NewTx(baseTx), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *BoundContract) estimateGasLimit(opts *TransactOpts, contract *common.Address, input []byte, gasPrice, gasTipCap, gasFeeCap, value *big.Int) (uint64, error) {
|
||||||
|
if contract != nil {
|
||||||
|
// Gas estimation cannot succeed without code for method invocations.
|
||||||
|
if code, err := c.transactor.PendingCodeAt(ensureContext(opts.Context), c.address); err != nil {
|
||||||
|
return 0, err
|
||||||
|
} else if len(code) == 0 {
|
||||||
|
return 0, ErrNoCode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
msg := ethereum.CallMsg{
|
||||||
|
From: opts.From,
|
||||||
|
To: contract,
|
||||||
|
GasPrice: gasPrice,
|
||||||
|
GasTipCap: gasTipCap,
|
||||||
|
GasFeeCap: gasFeeCap,
|
||||||
|
Value: value,
|
||||||
|
Data: input,
|
||||||
|
}
|
||||||
|
return c.transactor.EstimateGas(ensureContext(opts.Context), msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *BoundContract) getNonce(opts *TransactOpts) (uint64, error) {
|
||||||
|
if opts.Nonce == nil {
|
||||||
|
return c.transactor.PendingNonceAt(ensureContext(opts.Context), opts.From)
|
||||||
|
} else {
|
||||||
|
return opts.Nonce.Uint64(), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// transact executes an actual transaction invocation, first deriving any missing
|
// transact executes an actual transaction invocation, first deriving any missing
|
||||||
// authorization fields, and then scheduling the transaction for execution.
|
// authorization fields, and then scheduling the transaction for execution.
|
||||||
func (c *BoundContract) transact(opts *TransactOpts, contract *common.Address, input []byte) (*types.Transaction, error) {
|
func (c *BoundContract) transact(opts *TransactOpts, contract *common.Address, input []byte) (*types.Transaction, error) {
|
||||||
var err error
|
if opts.GasPrice != nil && (opts.GasFeeCap != nil || opts.GasTipCap != nil) {
|
||||||
|
return nil, errors.New("both gasPrice and (maxFeePerGas or maxPriorityFeePerGas) specified")
|
||||||
// Ensure a valid value field and resolve the account nonce
|
|
||||||
value := opts.Value
|
|
||||||
if value == nil {
|
|
||||||
value = new(big.Int)
|
|
||||||
}
|
}
|
||||||
var nonce uint64
|
// Create the transaction
|
||||||
if opts.Nonce == nil {
|
var (
|
||||||
nonce, err = c.transactor.PendingNonceAt(ensureContext(opts.Context), opts.From)
|
rawTx *types.Transaction
|
||||||
if err != nil {
|
err error
|
||||||
return nil, fmt.Errorf("failed to retrieve account nonce: %v", err)
|
)
|
||||||
}
|
if opts.GasPrice != nil {
|
||||||
|
rawTx, err = c.createLegacyTx(opts, contract, input)
|
||||||
} else {
|
} else {
|
||||||
nonce = opts.Nonce.Uint64()
|
// Only query for basefee if gasPrice not specified
|
||||||
}
|
if head, errHead := c.transactor.HeaderByNumber(ensureContext(opts.Context), nil); errHead != nil {
|
||||||
// Figure out the gas allowance and gas price values
|
return nil, errHead
|
||||||
gasPrice := opts.GasPrice
|
} else if head.BaseFee != nil {
|
||||||
if gasPrice == nil {
|
rawTx, err = c.createDynamicTx(opts, contract, input, head)
|
||||||
gasPrice, err = c.transactor.SuggestGasPrice(ensureContext(opts.Context))
|
} else {
|
||||||
if err != nil {
|
// Chain is not London ready -> use legacy transaction
|
||||||
return nil, fmt.Errorf("failed to suggest gas price: %v", err)
|
rawTx, err = c.createLegacyTx(opts, contract, input)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
gasLimit := opts.GasLimit
|
if err != nil {
|
||||||
if gasLimit == 0 {
|
return nil, err
|
||||||
// Gas estimation cannot succeed without code for method invocations
|
|
||||||
if contract != nil {
|
|
||||||
if code, err := c.transactor.PendingCodeAt(ensureContext(opts.Context), c.address); err != nil {
|
|
||||||
return nil, err
|
|
||||||
} else if len(code) == 0 {
|
|
||||||
return nil, ErrNoCode
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If the contract surely has code (or code is not needed), estimate the transaction
|
|
||||||
msg := ethereum.CallMsg{From: opts.From, To: contract, GasPrice: gasPrice, Value: value, Data: input}
|
|
||||||
gasLimit, err = c.transactor.EstimateGas(ensureContext(opts.Context), msg)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to estimate gas needed: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Create the transaction, sign it and schedule it for execution
|
|
||||||
var rawTx *types.Transaction
|
|
||||||
if contract == nil {
|
|
||||||
rawTx = types.NewContractCreation(nonce, value, gasLimit, gasPrice, input)
|
|
||||||
} else {
|
|
||||||
rawTx = types.NewTransaction(nonce, c.address, value, gasLimit, gasPrice, input)
|
|
||||||
}
|
}
|
||||||
|
// Sign the transaction and schedule it for execution
|
||||||
if opts.Signer == nil {
|
if opts.Signer == nil {
|
||||||
return nil, errors.New("no signer to authorize the transaction with")
|
return nil, errors.New("no signer to authorize the transaction with")
|
||||||
}
|
}
|
||||||
signedTx, err := opts.Signer(types.HomesteadSigner{}, opts.From, rawTx)
|
signedTx, err := opts.Signer(opts.From, rawTx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if opts.NoSend {
|
||||||
|
return signedTx, nil
|
||||||
|
}
|
||||||
if err := c.transactor.SendTransaction(ensureContext(opts.Context), signedTx); err != nil {
|
if err := c.transactor.SendTransaction(ensureContext(opts.Context), signedTx); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -252,9 +407,9 @@ func (c *BoundContract) FilterLogs(opts *FilterOpts, name string, query ...[]int
|
|||||||
opts = new(FilterOpts)
|
opts = new(FilterOpts)
|
||||||
}
|
}
|
||||||
// Append the event selector to the query parameters and construct the topic set
|
// Append the event selector to the query parameters and construct the topic set
|
||||||
query = append([][]interface{}{{c.abi.Events[name].ID()}}, query...)
|
query = append([][]interface{}{{c.abi.Events[name].ID}}, query...)
|
||||||
|
|
||||||
topics, err := makeTopics(query...)
|
topics, err := abi.MakeTopics(query...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
@ -301,9 +456,9 @@ func (c *BoundContract) WatchLogs(opts *WatchOpts, name string, query ...[]inter
|
|||||||
opts = new(WatchOpts)
|
opts = new(WatchOpts)
|
||||||
}
|
}
|
||||||
// Append the event selector to the query parameters and construct the topic set
|
// Append the event selector to the query parameters and construct the topic set
|
||||||
query = append([][]interface{}{{c.abi.Events[name].ID()}}, query...)
|
query = append([][]interface{}{{c.abi.Events[name].ID}}, query...)
|
||||||
|
|
||||||
topics, err := makeTopics(query...)
|
topics, err := abi.MakeTopics(query...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
@ -326,8 +481,11 @@ func (c *BoundContract) WatchLogs(opts *WatchOpts, name string, query ...[]inter
|
|||||||
|
|
||||||
// UnpackLog unpacks a retrieved log into the provided output structure.
|
// UnpackLog unpacks a retrieved log into the provided output structure.
|
||||||
func (c *BoundContract) UnpackLog(out interface{}, event string, log types.Log) error {
|
func (c *BoundContract) UnpackLog(out interface{}, event string, log types.Log) error {
|
||||||
|
if log.Topics[0] != c.abi.Events[event].ID {
|
||||||
|
return fmt.Errorf("event signature mismatch")
|
||||||
|
}
|
||||||
if len(log.Data) > 0 {
|
if len(log.Data) > 0 {
|
||||||
if err := c.abi.Unpack(out, event, log.Data); err != nil {
|
if err := c.abi.UnpackIntoInterface(out, event, log.Data); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -337,11 +495,14 @@ func (c *BoundContract) UnpackLog(out interface{}, event string, log types.Log)
|
|||||||
indexed = append(indexed, arg)
|
indexed = append(indexed, arg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return parseTopics(out, indexed, log.Topics[1:])
|
return abi.ParseTopics(out, indexed, log.Topics[1:])
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnpackLogIntoMap unpacks a retrieved log into the provided map.
|
// UnpackLogIntoMap unpacks a retrieved log into the provided map.
|
||||||
func (c *BoundContract) UnpackLogIntoMap(out map[string]interface{}, event string, log types.Log) error {
|
func (c *BoundContract) UnpackLogIntoMap(out map[string]interface{}, event string, log types.Log) error {
|
||||||
|
if log.Topics[0] != c.abi.Events[event].ID {
|
||||||
|
return fmt.Errorf("event signature mismatch")
|
||||||
|
}
|
||||||
if len(log.Data) > 0 {
|
if len(log.Data) > 0 {
|
||||||
if err := c.abi.UnpackIntoMap(out, event, log.Data); err != nil {
|
if err := c.abi.UnpackIntoMap(out, event, log.Data); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -353,14 +514,14 @@ func (c *BoundContract) UnpackLogIntoMap(out map[string]interface{}, event strin
|
|||||||
indexed = append(indexed, arg)
|
indexed = append(indexed, arg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return parseTopicsIntoMap(out, indexed, log.Topics[1:])
|
return abi.ParseTopicsIntoMap(out, indexed, log.Topics[1:])
|
||||||
}
|
}
|
||||||
|
|
||||||
// ensureContext is a helper method to ensure a context is not nil, even if the
|
// ensureContext is a helper method to ensure a context is not nil, even if the
|
||||||
// user specified it as such.
|
// user specified it as such.
|
||||||
func ensureContext(ctx context.Context) context.Context {
|
func ensureContext(ctx context.Context) context.Context {
|
||||||
if ctx == nil {
|
if ctx == nil {
|
||||||
return context.TODO()
|
return context.Background()
|
||||||
}
|
}
|
||||||
return ctx
|
return ctx
|
||||||
}
|
}
|
||||||
|
@ -17,9 +17,9 @@
|
|||||||
package bind_test
|
package bind_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"math/big"
|
"math/big"
|
||||||
|
"reflect"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@ -31,11 +31,54 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/core/types"
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
"github.com/ethereum/go-ethereum/rlp"
|
"github.com/ethereum/go-ethereum/rlp"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func mockSign(addr common.Address, tx *types.Transaction) (*types.Transaction, error) { return tx, nil }
|
||||||
|
|
||||||
|
type mockTransactor struct {
|
||||||
|
baseFee *big.Int
|
||||||
|
gasTipCap *big.Int
|
||||||
|
gasPrice *big.Int
|
||||||
|
suggestGasTipCapCalled bool
|
||||||
|
suggestGasPriceCalled bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mt *mockTransactor) HeaderByNumber(ctx context.Context, number *big.Int) (*types.Header, error) {
|
||||||
|
return &types.Header{BaseFee: mt.baseFee}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mt *mockTransactor) PendingCodeAt(ctx context.Context, account common.Address) ([]byte, error) {
|
||||||
|
return []byte{1}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mt *mockTransactor) PendingNonceAt(ctx context.Context, account common.Address) (uint64, error) {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mt *mockTransactor) SuggestGasPrice(ctx context.Context) (*big.Int, error) {
|
||||||
|
mt.suggestGasPriceCalled = true
|
||||||
|
return mt.gasPrice, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mt *mockTransactor) SuggestGasTipCap(ctx context.Context) (*big.Int, error) {
|
||||||
|
mt.suggestGasTipCapCalled = true
|
||||||
|
return mt.gasTipCap, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mt *mockTransactor) EstimateGas(ctx context.Context, call ethereum.CallMsg) (gas uint64, err error) {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mt *mockTransactor) SendTransaction(ctx context.Context, tx *types.Transaction) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
type mockCaller struct {
|
type mockCaller struct {
|
||||||
codeAtBlockNumber *big.Int
|
codeAtBlockNumber *big.Int
|
||||||
callContractBlockNumber *big.Int
|
callContractBlockNumber *big.Int
|
||||||
|
pendingCodeAtCalled bool
|
||||||
|
pendingCallContractCalled bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mc *mockCaller) CodeAt(ctx context.Context, contract common.Address, blockNumber *big.Int) ([]byte, error) {
|
func (mc *mockCaller) CodeAt(ctx context.Context, contract common.Address, blockNumber *big.Int) ([]byte, error) {
|
||||||
@ -47,6 +90,16 @@ func (mc *mockCaller) CallContract(ctx context.Context, call ethereum.CallMsg, b
|
|||||||
mc.callContractBlockNumber = blockNumber
|
mc.callContractBlockNumber = blockNumber
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (mc *mockCaller) PendingCodeAt(ctx context.Context, contract common.Address) ([]byte, error) {
|
||||||
|
mc.pendingCodeAtCalled = true
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mc *mockCaller) PendingCallContract(ctx context.Context, call ethereum.CallMsg) ([]byte, error) {
|
||||||
|
mc.pendingCallContractCalled = true
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
func TestPassingBlockNumber(t *testing.T) {
|
func TestPassingBlockNumber(t *testing.T) {
|
||||||
|
|
||||||
mc := &mockCaller{}
|
mc := &mockCaller{}
|
||||||
@ -59,11 +112,10 @@ func TestPassingBlockNumber(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}, mc, nil, nil)
|
}, mc, nil, nil)
|
||||||
var ret string
|
|
||||||
|
|
||||||
blockNumber := big.NewInt(42)
|
blockNumber := big.NewInt(42)
|
||||||
|
|
||||||
bc.Call(&bind.CallOpts{BlockNumber: blockNumber}, &ret, "something")
|
bc.Call(&bind.CallOpts{BlockNumber: blockNumber}, nil, "something")
|
||||||
|
|
||||||
if mc.callContractBlockNumber != blockNumber {
|
if mc.callContractBlockNumber != blockNumber {
|
||||||
t.Fatalf("CallContract() was not passed the block number")
|
t.Fatalf("CallContract() was not passed the block number")
|
||||||
@ -73,7 +125,7 @@ func TestPassingBlockNumber(t *testing.T) {
|
|||||||
t.Fatalf("CodeAt() was not passed the block number")
|
t.Fatalf("CodeAt() was not passed the block number")
|
||||||
}
|
}
|
||||||
|
|
||||||
bc.Call(&bind.CallOpts{}, &ret, "something")
|
bc.Call(&bind.CallOpts{}, nil, "something")
|
||||||
|
|
||||||
if mc.callContractBlockNumber != nil {
|
if mc.callContractBlockNumber != nil {
|
||||||
t.Fatalf("CallContract() was passed a block number when it should not have been")
|
t.Fatalf("CallContract() was passed a block number when it should not have been")
|
||||||
@ -82,57 +134,39 @@ func TestPassingBlockNumber(t *testing.T) {
|
|||||||
if mc.codeAtBlockNumber != nil {
|
if mc.codeAtBlockNumber != nil {
|
||||||
t.Fatalf("CodeAt() was passed a block number when it should not have been")
|
t.Fatalf("CodeAt() was passed a block number when it should not have been")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bc.Call(&bind.CallOpts{BlockNumber: blockNumber, Pending: true}, nil, "something")
|
||||||
|
|
||||||
|
if !mc.pendingCallContractCalled {
|
||||||
|
t.Fatalf("CallContract() was not passed the block number")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !mc.pendingCodeAtCalled {
|
||||||
|
t.Fatalf("CodeAt() was not passed the block number")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const hexData = "0x000000000000000000000000376c47978271565f56deb45495afa69e59c16ab200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000158"
|
const hexData = "0x000000000000000000000000376c47978271565f56deb45495afa69e59c16ab200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000158"
|
||||||
|
|
||||||
func TestUnpackIndexedStringTyLogIntoMap(t *testing.T) {
|
func TestUnpackIndexedStringTyLogIntoMap(t *testing.T) {
|
||||||
hash := crypto.Keccak256Hash([]byte("testName"))
|
hash := crypto.Keccak256Hash([]byte("testName"))
|
||||||
mockLog := types.Log{
|
topics := []common.Hash{
|
||||||
Address: common.HexToAddress("0x0"),
|
crypto.Keccak256Hash([]byte("received(string,address,uint256,bytes)")),
|
||||||
Topics: []common.Hash{
|
hash,
|
||||||
common.HexToHash("0x0"),
|
|
||||||
hash,
|
|
||||||
},
|
|
||||||
Data: hexutil.MustDecode(hexData),
|
|
||||||
BlockNumber: uint64(26),
|
|
||||||
TxHash: common.HexToHash("0x0"),
|
|
||||||
TxIndex: 111,
|
|
||||||
BlockHash: common.BytesToHash([]byte{1, 2, 3, 4, 5}),
|
|
||||||
Index: 7,
|
|
||||||
Removed: false,
|
|
||||||
}
|
}
|
||||||
|
mockLog := newMockLog(topics, common.HexToHash("0x0"))
|
||||||
|
|
||||||
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"string"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"string"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
||||||
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
||||||
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
||||||
|
|
||||||
receivedMap := make(map[string]interface{})
|
|
||||||
expectedReceivedMap := map[string]interface{}{
|
expectedReceivedMap := map[string]interface{}{
|
||||||
"name": hash,
|
"name": hash,
|
||||||
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
||||||
"amount": big.NewInt(1),
|
"amount": big.NewInt(1),
|
||||||
"memo": []byte{88},
|
"memo": []byte{88},
|
||||||
}
|
}
|
||||||
if err := bc.UnpackLogIntoMap(receivedMap, "received", mockLog); err != nil {
|
unpackAndCheck(t, bc, expectedReceivedMap, mockLog)
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(receivedMap) != 4 {
|
|
||||||
t.Fatal("unpacked map expected to have length 4")
|
|
||||||
}
|
|
||||||
if receivedMap["name"] != expectedReceivedMap["name"] {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if receivedMap["sender"] != expectedReceivedMap["sender"] {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if receivedMap["amount"].(*big.Int).Cmp(expectedReceivedMap["amount"].(*big.Int)) != 0 {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if !bytes.Equal(receivedMap["memo"].([]byte), expectedReceivedMap["memo"].([]byte)) {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnpackIndexedSliceTyLogIntoMap(t *testing.T) {
|
func TestUnpackIndexedSliceTyLogIntoMap(t *testing.T) {
|
||||||
@ -141,51 +175,23 @@ func TestUnpackIndexedSliceTyLogIntoMap(t *testing.T) {
|
|||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
hash := crypto.Keccak256Hash(sliceBytes)
|
hash := crypto.Keccak256Hash(sliceBytes)
|
||||||
mockLog := types.Log{
|
topics := []common.Hash{
|
||||||
Address: common.HexToAddress("0x0"),
|
crypto.Keccak256Hash([]byte("received(string[],address,uint256,bytes)")),
|
||||||
Topics: []common.Hash{
|
hash,
|
||||||
common.HexToHash("0x0"),
|
|
||||||
hash,
|
|
||||||
},
|
|
||||||
Data: hexutil.MustDecode(hexData),
|
|
||||||
BlockNumber: uint64(26),
|
|
||||||
TxHash: common.HexToHash("0x0"),
|
|
||||||
TxIndex: 111,
|
|
||||||
BlockHash: common.BytesToHash([]byte{1, 2, 3, 4, 5}),
|
|
||||||
Index: 7,
|
|
||||||
Removed: false,
|
|
||||||
}
|
}
|
||||||
|
mockLog := newMockLog(topics, common.HexToHash("0x0"))
|
||||||
|
|
||||||
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"names","type":"string[]"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"names","type":"string[]"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
||||||
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
||||||
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
||||||
|
|
||||||
receivedMap := make(map[string]interface{})
|
|
||||||
expectedReceivedMap := map[string]interface{}{
|
expectedReceivedMap := map[string]interface{}{
|
||||||
"names": hash,
|
"names": hash,
|
||||||
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
||||||
"amount": big.NewInt(1),
|
"amount": big.NewInt(1),
|
||||||
"memo": []byte{88},
|
"memo": []byte{88},
|
||||||
}
|
}
|
||||||
if err := bc.UnpackLogIntoMap(receivedMap, "received", mockLog); err != nil {
|
unpackAndCheck(t, bc, expectedReceivedMap, mockLog)
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(receivedMap) != 4 {
|
|
||||||
t.Fatal("unpacked map expected to have length 4")
|
|
||||||
}
|
|
||||||
if receivedMap["names"] != expectedReceivedMap["names"] {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if receivedMap["sender"] != expectedReceivedMap["sender"] {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if receivedMap["amount"].(*big.Int).Cmp(expectedReceivedMap["amount"].(*big.Int)) != 0 {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if !bytes.Equal(receivedMap["memo"].([]byte), expectedReceivedMap["memo"].([]byte)) {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnpackIndexedArrayTyLogIntoMap(t *testing.T) {
|
func TestUnpackIndexedArrayTyLogIntoMap(t *testing.T) {
|
||||||
@ -194,51 +200,23 @@ func TestUnpackIndexedArrayTyLogIntoMap(t *testing.T) {
|
|||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
hash := crypto.Keccak256Hash(arrBytes)
|
hash := crypto.Keccak256Hash(arrBytes)
|
||||||
mockLog := types.Log{
|
topics := []common.Hash{
|
||||||
Address: common.HexToAddress("0x0"),
|
crypto.Keccak256Hash([]byte("received(address[2],address,uint256,bytes)")),
|
||||||
Topics: []common.Hash{
|
hash,
|
||||||
common.HexToHash("0x0"),
|
|
||||||
hash,
|
|
||||||
},
|
|
||||||
Data: hexutil.MustDecode(hexData),
|
|
||||||
BlockNumber: uint64(26),
|
|
||||||
TxHash: common.HexToHash("0x0"),
|
|
||||||
TxIndex: 111,
|
|
||||||
BlockHash: common.BytesToHash([]byte{1, 2, 3, 4, 5}),
|
|
||||||
Index: 7,
|
|
||||||
Removed: false,
|
|
||||||
}
|
}
|
||||||
|
mockLog := newMockLog(topics, common.HexToHash("0x0"))
|
||||||
|
|
||||||
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"addresses","type":"address[2]"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"addresses","type":"address[2]"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
||||||
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
||||||
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
||||||
|
|
||||||
receivedMap := make(map[string]interface{})
|
|
||||||
expectedReceivedMap := map[string]interface{}{
|
expectedReceivedMap := map[string]interface{}{
|
||||||
"addresses": hash,
|
"addresses": hash,
|
||||||
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
||||||
"amount": big.NewInt(1),
|
"amount": big.NewInt(1),
|
||||||
"memo": []byte{88},
|
"memo": []byte{88},
|
||||||
}
|
}
|
||||||
if err := bc.UnpackLogIntoMap(receivedMap, "received", mockLog); err != nil {
|
unpackAndCheck(t, bc, expectedReceivedMap, mockLog)
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(receivedMap) != 4 {
|
|
||||||
t.Fatal("unpacked map expected to have length 4")
|
|
||||||
}
|
|
||||||
if receivedMap["addresses"] != expectedReceivedMap["addresses"] {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if receivedMap["sender"] != expectedReceivedMap["sender"] {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if receivedMap["amount"].(*big.Int).Cmp(expectedReceivedMap["amount"].(*big.Int)) != 0 {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if !bytes.Equal(receivedMap["memo"].([]byte), expectedReceivedMap["memo"].([]byte)) {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnpackIndexedFuncTyLogIntoMap(t *testing.T) {
|
func TestUnpackIndexedFuncTyLogIntoMap(t *testing.T) {
|
||||||
@ -249,99 +227,117 @@ func TestUnpackIndexedFuncTyLogIntoMap(t *testing.T) {
|
|||||||
functionTyBytes := append(addrBytes, functionSelector...)
|
functionTyBytes := append(addrBytes, functionSelector...)
|
||||||
var functionTy [24]byte
|
var functionTy [24]byte
|
||||||
copy(functionTy[:], functionTyBytes[0:24])
|
copy(functionTy[:], functionTyBytes[0:24])
|
||||||
mockLog := types.Log{
|
topics := []common.Hash{
|
||||||
Address: common.HexToAddress("0x0"),
|
crypto.Keccak256Hash([]byte("received(function,address,uint256,bytes)")),
|
||||||
Topics: []common.Hash{
|
common.BytesToHash(functionTyBytes),
|
||||||
common.HexToHash("0x99b5620489b6ef926d4518936cfec15d305452712b88bd59da2d9c10fb0953e8"),
|
|
||||||
common.BytesToHash(functionTyBytes),
|
|
||||||
},
|
|
||||||
Data: hexutil.MustDecode(hexData),
|
|
||||||
BlockNumber: uint64(26),
|
|
||||||
TxHash: common.HexToHash("0x5c698f13940a2153440c6d19660878bc90219d9298fdcf37365aa8d88d40fc42"),
|
|
||||||
TxIndex: 111,
|
|
||||||
BlockHash: common.BytesToHash([]byte{1, 2, 3, 4, 5}),
|
|
||||||
Index: 7,
|
|
||||||
Removed: false,
|
|
||||||
}
|
}
|
||||||
|
mockLog := newMockLog(topics, common.HexToHash("0x5c698f13940a2153440c6d19660878bc90219d9298fdcf37365aa8d88d40fc42"))
|
||||||
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"function","type":"function"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"function","type":"function"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
||||||
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
||||||
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
||||||
|
|
||||||
receivedMap := make(map[string]interface{})
|
|
||||||
expectedReceivedMap := map[string]interface{}{
|
expectedReceivedMap := map[string]interface{}{
|
||||||
"function": functionTy,
|
"function": functionTy,
|
||||||
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
||||||
"amount": big.NewInt(1),
|
"amount": big.NewInt(1),
|
||||||
"memo": []byte{88},
|
"memo": []byte{88},
|
||||||
}
|
}
|
||||||
if err := bc.UnpackLogIntoMap(receivedMap, "received", mockLog); err != nil {
|
unpackAndCheck(t, bc, expectedReceivedMap, mockLog)
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(receivedMap) != 4 {
|
|
||||||
t.Fatal("unpacked map expected to have length 4")
|
|
||||||
}
|
|
||||||
if receivedMap["function"] != expectedReceivedMap["function"] {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if receivedMap["sender"] != expectedReceivedMap["sender"] {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if receivedMap["amount"].(*big.Int).Cmp(expectedReceivedMap["amount"].(*big.Int)) != 0 {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
if !bytes.Equal(receivedMap["memo"].([]byte), expectedReceivedMap["memo"].([]byte)) {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnpackIndexedBytesTyLogIntoMap(t *testing.T) {
|
func TestUnpackIndexedBytesTyLogIntoMap(t *testing.T) {
|
||||||
byts := []byte{1, 2, 3, 4, 5}
|
bytes := []byte{1, 2, 3, 4, 5}
|
||||||
hash := crypto.Keccak256Hash(byts)
|
hash := crypto.Keccak256Hash(bytes)
|
||||||
mockLog := types.Log{
|
topics := []common.Hash{
|
||||||
Address: common.HexToAddress("0x0"),
|
crypto.Keccak256Hash([]byte("received(bytes,address,uint256,bytes)")),
|
||||||
Topics: []common.Hash{
|
hash,
|
||||||
common.HexToHash("0x99b5620489b6ef926d4518936cfec15d305452712b88bd59da2d9c10fb0953e8"),
|
|
||||||
hash,
|
|
||||||
},
|
|
||||||
Data: hexutil.MustDecode(hexData),
|
|
||||||
BlockNumber: uint64(26),
|
|
||||||
TxHash: common.HexToHash("0x5c698f13940a2153440c6d19660878bc90219d9298fdcf37365aa8d88d40fc42"),
|
|
||||||
TxIndex: 111,
|
|
||||||
BlockHash: common.BytesToHash([]byte{1, 2, 3, 4, 5}),
|
|
||||||
Index: 7,
|
|
||||||
Removed: false,
|
|
||||||
}
|
}
|
||||||
|
mockLog := newMockLog(topics, common.HexToHash("0x5c698f13940a2153440c6d19660878bc90219d9298fdcf37365aa8d88d40fc42"))
|
||||||
|
|
||||||
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"content","type":"bytes"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
abiString := `[{"anonymous":false,"inputs":[{"indexed":true,"name":"content","type":"bytes"},{"indexed":false,"name":"sender","type":"address"},{"indexed":false,"name":"amount","type":"uint256"},{"indexed":false,"name":"memo","type":"bytes"}],"name":"received","type":"event"}]`
|
||||||
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
parsedAbi, _ := abi.JSON(strings.NewReader(abiString))
|
||||||
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
bc := bind.NewBoundContract(common.HexToAddress("0x0"), parsedAbi, nil, nil, nil)
|
||||||
|
|
||||||
receivedMap := make(map[string]interface{})
|
|
||||||
expectedReceivedMap := map[string]interface{}{
|
expectedReceivedMap := map[string]interface{}{
|
||||||
"content": hash,
|
"content": hash,
|
||||||
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
"sender": common.HexToAddress("0x376c47978271565f56DEB45495afa69E59c16Ab2"),
|
||||||
"amount": big.NewInt(1),
|
"amount": big.NewInt(1),
|
||||||
"memo": []byte{88},
|
"memo": []byte{88},
|
||||||
}
|
}
|
||||||
if err := bc.UnpackLogIntoMap(receivedMap, "received", mockLog); err != nil {
|
unpackAndCheck(t, bc, expectedReceivedMap, mockLog)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTransactGasFee(t *testing.T) {
|
||||||
|
assert := assert.New(t)
|
||||||
|
|
||||||
|
// GasTipCap and GasFeeCap
|
||||||
|
// When opts.GasTipCap and opts.GasFeeCap are nil
|
||||||
|
mt := &mockTransactor{baseFee: big.NewInt(100), gasTipCap: big.NewInt(5)}
|
||||||
|
bc := bind.NewBoundContract(common.Address{}, abi.ABI{}, nil, mt, nil)
|
||||||
|
opts := &bind.TransactOpts{Signer: mockSign}
|
||||||
|
tx, err := bc.Transact(opts, "")
|
||||||
|
assert.Nil(err)
|
||||||
|
assert.Equal(big.NewInt(5), tx.GasTipCap())
|
||||||
|
assert.Equal(big.NewInt(205), tx.GasFeeCap())
|
||||||
|
assert.Nil(opts.GasTipCap)
|
||||||
|
assert.Nil(opts.GasFeeCap)
|
||||||
|
assert.True(mt.suggestGasTipCapCalled)
|
||||||
|
|
||||||
|
// Second call to Transact should use latest suggested GasTipCap
|
||||||
|
mt.gasTipCap = big.NewInt(6)
|
||||||
|
mt.suggestGasTipCapCalled = false
|
||||||
|
tx, err = bc.Transact(opts, "")
|
||||||
|
assert.Nil(err)
|
||||||
|
assert.Equal(big.NewInt(6), tx.GasTipCap())
|
||||||
|
assert.Equal(big.NewInt(206), tx.GasFeeCap())
|
||||||
|
assert.True(mt.suggestGasTipCapCalled)
|
||||||
|
|
||||||
|
// GasPrice
|
||||||
|
// When opts.GasPrice is nil
|
||||||
|
mt = &mockTransactor{gasPrice: big.NewInt(5)}
|
||||||
|
bc = bind.NewBoundContract(common.Address{}, abi.ABI{}, nil, mt, nil)
|
||||||
|
opts = &bind.TransactOpts{Signer: mockSign}
|
||||||
|
tx, err = bc.Transact(opts, "")
|
||||||
|
assert.Nil(err)
|
||||||
|
assert.Equal(big.NewInt(5), tx.GasPrice())
|
||||||
|
assert.Nil(opts.GasPrice)
|
||||||
|
assert.True(mt.suggestGasPriceCalled)
|
||||||
|
|
||||||
|
// Second call to Transact should use latest suggested GasPrice
|
||||||
|
mt.gasPrice = big.NewInt(6)
|
||||||
|
mt.suggestGasPriceCalled = false
|
||||||
|
tx, err = bc.Transact(opts, "")
|
||||||
|
assert.Nil(err)
|
||||||
|
assert.Equal(big.NewInt(6), tx.GasPrice())
|
||||||
|
assert.True(mt.suggestGasPriceCalled)
|
||||||
|
}
|
||||||
|
|
||||||
|
func unpackAndCheck(t *testing.T, bc *bind.BoundContract, expected map[string]interface{}, mockLog types.Log) {
|
||||||
|
received := make(map[string]interface{})
|
||||||
|
if err := bc.UnpackLogIntoMap(received, "received", mockLog); err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(receivedMap) != 4 {
|
if len(received) != len(expected) {
|
||||||
t.Fatal("unpacked map expected to have length 4")
|
t.Fatalf("unpacked map length %v not equal expected length of %v", len(received), len(expected))
|
||||||
}
|
}
|
||||||
if receivedMap["content"] != expectedReceivedMap["content"] {
|
for name, elem := range expected {
|
||||||
t.Error("unpacked map does not match expected map")
|
if !reflect.DeepEqual(elem, received[name]) {
|
||||||
}
|
t.Errorf("field %v does not match expected, want %v, got %v", name, elem, received[name])
|
||||||
if receivedMap["sender"] != expectedReceivedMap["sender"] {
|
}
|
||||||
t.Error("unpacked map does not match expected map")
|
}
|
||||||
}
|
}
|
||||||
if receivedMap["amount"].(*big.Int).Cmp(expectedReceivedMap["amount"].(*big.Int)) != 0 {
|
|
||||||
t.Error("unpacked map does not match expected map")
|
func newMockLog(topics []common.Hash, txHash common.Hash) types.Log {
|
||||||
}
|
return types.Log{
|
||||||
if !bytes.Equal(receivedMap["memo"].([]byte), expectedReceivedMap["memo"].([]byte)) {
|
Address: common.HexToAddress("0x0"),
|
||||||
t.Error("unpacked map does not match expected map")
|
Topics: topics,
|
||||||
|
Data: hexutil.MustDecode(hexData),
|
||||||
|
BlockNumber: uint64(26),
|
||||||
|
TxHash: txHash,
|
||||||
|
TxIndex: 111,
|
||||||
|
BlockHash: common.BytesToHash([]byte{1, 2, 3, 4, 5}),
|
||||||
|
Index: 7,
|
||||||
|
Removed: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -47,13 +47,17 @@ const (
|
|||||||
// to be used as is in client code, but rather as an intermediate struct which
|
// to be used as is in client code, but rather as an intermediate struct which
|
||||||
// enforces compile time type safety and naming convention opposed to having to
|
// enforces compile time type safety and naming convention opposed to having to
|
||||||
// manually maintain hard coded strings that break on runtime.
|
// manually maintain hard coded strings that break on runtime.
|
||||||
func Bind(types []string, abis []string, bytecodes []string, fsigs []map[string]string, pkg string, lang Lang, libs map[string]string) (string, error) {
|
func Bind(types []string, abis []string, bytecodes []string, fsigs []map[string]string, pkg string, lang Lang, libs map[string]string, aliases map[string]string) (string, error) {
|
||||||
// Process each individual contract requested binding
|
var (
|
||||||
contracts := make(map[string]*tmplContract)
|
// contracts is the map of each individual contract requested binding
|
||||||
|
contracts = make(map[string]*tmplContract)
|
||||||
|
|
||||||
// Map used to flag each encountered library as such
|
// structs is the map of all redeclared structs shared by passed contracts.
|
||||||
isLib := make(map[string]struct{})
|
structs = make(map[string]*tmplStruct)
|
||||||
|
|
||||||
|
// isLib is the map used to flag each encountered library as such
|
||||||
|
isLib = make(map[string]struct{})
|
||||||
|
)
|
||||||
for i := 0; i < len(types); i++ {
|
for i := 0; i < len(types); i++ {
|
||||||
// Parse the actual ABI to generate the binding for
|
// Parse the actual ABI to generate the binding for
|
||||||
evmABI, err := abi.JSON(strings.NewReader(abis[i]))
|
evmABI, err := abi.JSON(strings.NewReader(abis[i]))
|
||||||
@ -73,13 +77,31 @@ func Bind(types []string, abis []string, bytecodes []string, fsigs []map[string]
|
|||||||
calls = make(map[string]*tmplMethod)
|
calls = make(map[string]*tmplMethod)
|
||||||
transacts = make(map[string]*tmplMethod)
|
transacts = make(map[string]*tmplMethod)
|
||||||
events = make(map[string]*tmplEvent)
|
events = make(map[string]*tmplEvent)
|
||||||
structs = make(map[string]*tmplStruct)
|
fallback *tmplMethod
|
||||||
|
receive *tmplMethod
|
||||||
|
|
||||||
|
// identifiers are used to detect duplicated identifiers of functions
|
||||||
|
// and events. For all calls, transacts and events, abigen will generate
|
||||||
|
// corresponding bindings. However we have to ensure there is no
|
||||||
|
// identifier collisions in the bindings of these categories.
|
||||||
|
callIdentifiers = make(map[string]bool)
|
||||||
|
transactIdentifiers = make(map[string]bool)
|
||||||
|
eventIdentifiers = make(map[string]bool)
|
||||||
)
|
)
|
||||||
for _, original := range evmABI.Methods {
|
for _, original := range evmABI.Methods {
|
||||||
// Normalize the method for capital cases and non-anonymous inputs/outputs
|
// Normalize the method for capital cases and non-anonymous inputs/outputs
|
||||||
normalized := original
|
normalized := original
|
||||||
normalized.Name = methodNormalizer[lang](original.Name)
|
normalizedName := methodNormalizer[lang](alias(aliases, original.Name))
|
||||||
|
// Ensure there is no duplicated identifier
|
||||||
|
var identifiers = callIdentifiers
|
||||||
|
if !original.IsConstant() {
|
||||||
|
identifiers = transactIdentifiers
|
||||||
|
}
|
||||||
|
if identifiers[normalizedName] {
|
||||||
|
return "", fmt.Errorf("duplicated identifier \"%s\"(normalized \"%s\"), use --alias for renaming", original.Name, normalizedName)
|
||||||
|
}
|
||||||
|
identifiers[normalizedName] = true
|
||||||
|
normalized.Name = normalizedName
|
||||||
normalized.Inputs = make([]abi.Argument, len(original.Inputs))
|
normalized.Inputs = make([]abi.Argument, len(original.Inputs))
|
||||||
copy(normalized.Inputs, original.Inputs)
|
copy(normalized.Inputs, original.Inputs)
|
||||||
for j, input := range normalized.Inputs {
|
for j, input := range normalized.Inputs {
|
||||||
@ -101,7 +123,7 @@ func Bind(types []string, abis []string, bytecodes []string, fsigs []map[string]
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Append the methods to the call or transact lists
|
// Append the methods to the call or transact lists
|
||||||
if original.Const {
|
if original.IsConstant() {
|
||||||
calls[original.Name] = &tmplMethod{Original: original, Normalized: normalized, Structured: structured(original.Outputs)}
|
calls[original.Name] = &tmplMethod{Original: original, Normalized: normalized, Structured: structured(original.Outputs)}
|
||||||
} else {
|
} else {
|
||||||
transacts[original.Name] = &tmplMethod{Original: original, Normalized: normalized, Structured: structured(original.Outputs)}
|
transacts[original.Name] = &tmplMethod{Original: original, Normalized: normalized, Structured: structured(original.Outputs)}
|
||||||
@ -114,7 +136,14 @@ func Bind(types []string, abis []string, bytecodes []string, fsigs []map[string]
|
|||||||
}
|
}
|
||||||
// Normalize the event for capital cases and non-anonymous outputs
|
// Normalize the event for capital cases and non-anonymous outputs
|
||||||
normalized := original
|
normalized := original
|
||||||
normalized.Name = methodNormalizer[lang](original.Name)
|
|
||||||
|
// Ensure there is no duplicated identifier
|
||||||
|
normalizedName := methodNormalizer[lang](alias(aliases, original.Name))
|
||||||
|
if eventIdentifiers[normalizedName] {
|
||||||
|
return "", fmt.Errorf("duplicated identifier \"%s\"(normalized \"%s\"), use --alias for renaming", original.Name, normalizedName)
|
||||||
|
}
|
||||||
|
eventIdentifiers[normalizedName] = true
|
||||||
|
normalized.Name = normalizedName
|
||||||
|
|
||||||
normalized.Inputs = make([]abi.Argument, len(original.Inputs))
|
normalized.Inputs = make([]abi.Argument, len(original.Inputs))
|
||||||
copy(normalized.Inputs, original.Inputs)
|
copy(normalized.Inputs, original.Inputs)
|
||||||
@ -129,7 +158,13 @@ func Bind(types []string, abis []string, bytecodes []string, fsigs []map[string]
|
|||||||
// Append the event to the accumulator list
|
// Append the event to the accumulator list
|
||||||
events[original.Name] = &tmplEvent{Original: original, Normalized: normalized}
|
events[original.Name] = &tmplEvent{Original: original, Normalized: normalized}
|
||||||
}
|
}
|
||||||
|
// Add two special fallback functions if they exist
|
||||||
|
if evmABI.HasFallback() {
|
||||||
|
fallback = &tmplMethod{Original: evmABI.Fallback}
|
||||||
|
}
|
||||||
|
if evmABI.HasReceive() {
|
||||||
|
receive = &tmplMethod{Original: evmABI.Receive}
|
||||||
|
}
|
||||||
// There is no easy way to pass arbitrary java objects to the Go side.
|
// There is no easy way to pass arbitrary java objects to the Go side.
|
||||||
if len(structs) > 0 && lang == LangJava {
|
if len(structs) > 0 && lang == LangJava {
|
||||||
return "", errors.New("java binding for tuple arguments is not supported yet")
|
return "", errors.New("java binding for tuple arguments is not supported yet")
|
||||||
@ -142,9 +177,10 @@ func Bind(types []string, abis []string, bytecodes []string, fsigs []map[string]
|
|||||||
Constructor: evmABI.Constructor,
|
Constructor: evmABI.Constructor,
|
||||||
Calls: calls,
|
Calls: calls,
|
||||||
Transacts: transacts,
|
Transacts: transacts,
|
||||||
|
Fallback: fallback,
|
||||||
|
Receive: receive,
|
||||||
Events: events,
|
Events: events,
|
||||||
Libraries: make(map[string]string),
|
Libraries: make(map[string]string),
|
||||||
Structs: structs,
|
|
||||||
}
|
}
|
||||||
// Function 4-byte signatures are stored in the same sequence
|
// Function 4-byte signatures are stored in the same sequence
|
||||||
// as types, if available.
|
// as types, if available.
|
||||||
@ -176,6 +212,7 @@ func Bind(types []string, abis []string, bytecodes []string, fsigs []map[string]
|
|||||||
Package: pkg,
|
Package: pkg,
|
||||||
Contracts: contracts,
|
Contracts: contracts,
|
||||||
Libraries: libs,
|
Libraries: libs,
|
||||||
|
Structs: structs,
|
||||||
}
|
}
|
||||||
buffer := new(bytes.Buffer)
|
buffer := new(bytes.Buffer)
|
||||||
|
|
||||||
@ -183,8 +220,6 @@ func Bind(types []string, abis []string, bytecodes []string, fsigs []map[string]
|
|||||||
"bindtype": bindType[lang],
|
"bindtype": bindType[lang],
|
||||||
"bindtopictype": bindTopicType[lang],
|
"bindtopictype": bindTopicType[lang],
|
||||||
"namedtype": namedType[lang],
|
"namedtype": namedType[lang],
|
||||||
"formatmethod": formatMethod,
|
|
||||||
"formatevent": formatEvent,
|
|
||||||
"capitalise": capitalise,
|
"capitalise": capitalise,
|
||||||
"decapitalise": decapitalise,
|
"decapitalise": decapitalise,
|
||||||
}
|
}
|
||||||
@ -211,7 +246,7 @@ var bindType = map[Lang]func(kind abi.Type, structs map[string]*tmplStruct) stri
|
|||||||
LangJava: bindTypeJava,
|
LangJava: bindTypeJava,
|
||||||
}
|
}
|
||||||
|
|
||||||
// bindBasicTypeGo converts basic solidity types(except array, slice and tuple) to Go one.
|
// bindBasicTypeGo converts basic solidity types(except array, slice and tuple) to Go ones.
|
||||||
func bindBasicTypeGo(kind abi.Type) string {
|
func bindBasicTypeGo(kind abi.Type) string {
|
||||||
switch kind.T {
|
switch kind.T {
|
||||||
case abi.AddressTy:
|
case abi.AddressTy:
|
||||||
@ -251,7 +286,7 @@ func bindTypeGo(kind abi.Type, structs map[string]*tmplStruct) string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// bindBasicTypeJava converts basic solidity types(except array, slice and tuple) to Java one.
|
// bindBasicTypeJava converts basic solidity types(except array, slice and tuple) to Java ones.
|
||||||
func bindBasicTypeJava(kind abi.Type) string {
|
func bindBasicTypeJava(kind abi.Type) string {
|
||||||
switch kind.T {
|
switch kind.T {
|
||||||
case abi.AddressTy:
|
case abi.AddressTy:
|
||||||
@ -295,7 +330,7 @@ func bindBasicTypeJava(kind abi.Type) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// pluralizeJavaType explicitly converts multidimensional types to predefined
|
// pluralizeJavaType explicitly converts multidimensional types to predefined
|
||||||
// type in go side.
|
// types in go side.
|
||||||
func pluralizeJavaType(typ string) string {
|
func pluralizeJavaType(typ string) string {
|
||||||
switch typ {
|
switch typ {
|
||||||
case "boolean":
|
case "boolean":
|
||||||
@ -334,7 +369,7 @@ var bindTopicType = map[Lang]func(kind abi.Type, structs map[string]*tmplStruct)
|
|||||||
}
|
}
|
||||||
|
|
||||||
// bindTopicTypeGo converts a Solidity topic type to a Go one. It is almost the same
|
// bindTopicTypeGo converts a Solidity topic type to a Go one. It is almost the same
|
||||||
// funcionality as for simple types, but dynamic types get converted to hashes.
|
// functionality as for simple types, but dynamic types get converted to hashes.
|
||||||
func bindTopicTypeGo(kind abi.Type, structs map[string]*tmplStruct) string {
|
func bindTopicTypeGo(kind abi.Type, structs map[string]*tmplStruct) string {
|
||||||
bound := bindTypeGo(kind, structs)
|
bound := bindTypeGo(kind, structs)
|
||||||
|
|
||||||
@ -351,7 +386,7 @@ func bindTopicTypeGo(kind abi.Type, structs map[string]*tmplStruct) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// bindTopicTypeJava converts a Solidity topic type to a Java one. It is almost the same
|
// bindTopicTypeJava converts a Solidity topic type to a Java one. It is almost the same
|
||||||
// funcionality as for simple types, but dynamic types get converted to hashes.
|
// functionality as for simple types, but dynamic types get converted to hashes.
|
||||||
func bindTopicTypeJava(kind abi.Type, structs map[string]*tmplStruct) string {
|
func bindTopicTypeJava(kind abi.Type, structs map[string]*tmplStruct) string {
|
||||||
bound := bindTypeJava(kind, structs)
|
bound := bindTypeJava(kind, structs)
|
||||||
|
|
||||||
@ -359,7 +394,7 @@ func bindTopicTypeJava(kind abi.Type, structs map[string]*tmplStruct) string {
|
|||||||
// parameters that are not value types i.e. arrays and structs are not
|
// parameters that are not value types i.e. arrays and structs are not
|
||||||
// stored directly but instead a keccak256-hash of an encoding is stored.
|
// stored directly but instead a keccak256-hash of an encoding is stored.
|
||||||
//
|
//
|
||||||
// We only convert stringS and bytes to hash, still need to deal with
|
// We only convert strings and bytes to hash, still need to deal with
|
||||||
// array(both fixed-size and dynamic-size) and struct.
|
// array(both fixed-size and dynamic-size) and struct.
|
||||||
if bound == "String" || bound == "byte[]" {
|
if bound == "String" || bound == "byte[]" {
|
||||||
bound = "Hash"
|
bound = "Hash"
|
||||||
@ -380,7 +415,7 @@ var bindStructType = map[Lang]func(kind abi.Type, structs map[string]*tmplStruct
|
|||||||
func bindStructTypeGo(kind abi.Type, structs map[string]*tmplStruct) string {
|
func bindStructTypeGo(kind abi.Type, structs map[string]*tmplStruct) string {
|
||||||
switch kind.T {
|
switch kind.T {
|
||||||
case abi.TupleTy:
|
case abi.TupleTy:
|
||||||
// We compose raw struct name and canonical parameter expression
|
// We compose a raw struct name and a canonical parameter expression
|
||||||
// together here. The reason is before solidity v0.5.11, kind.TupleRawName
|
// together here. The reason is before solidity v0.5.11, kind.TupleRawName
|
||||||
// is empty, so we use canonical parameter expression to distinguish
|
// is empty, so we use canonical parameter expression to distinguish
|
||||||
// different struct definition. From the consideration of backward
|
// different struct definition. From the consideration of backward
|
||||||
@ -419,7 +454,7 @@ func bindStructTypeGo(kind abi.Type, structs map[string]*tmplStruct) string {
|
|||||||
func bindStructTypeJava(kind abi.Type, structs map[string]*tmplStruct) string {
|
func bindStructTypeJava(kind abi.Type, structs map[string]*tmplStruct) string {
|
||||||
switch kind.T {
|
switch kind.T {
|
||||||
case abi.TupleTy:
|
case abi.TupleTy:
|
||||||
// We compose raw struct name and canonical parameter expression
|
// We compose a raw struct name and a canonical parameter expression
|
||||||
// together here. The reason is before solidity v0.5.11, kind.TupleRawName
|
// together here. The reason is before solidity v0.5.11, kind.TupleRawName
|
||||||
// is empty, so we use canonical parameter expression to distinguish
|
// is empty, so we use canonical parameter expression to distinguish
|
||||||
// different struct definition. From the consideration of backward
|
// different struct definition. From the consideration of backward
|
||||||
@ -451,7 +486,7 @@ func bindStructTypeJava(kind abi.Type, structs map[string]*tmplStruct) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// namedType is a set of functions that transform language specific types to
|
// namedType is a set of functions that transform language specific types to
|
||||||
// named versions that my be used inside method names.
|
// named versions that may be used inside method names.
|
||||||
var namedType = map[Lang]func(string, abi.Type) string{
|
var namedType = map[Lang]func(string, abi.Type) string{
|
||||||
LangGo: func(string, abi.Type) string { panic("this shouldn't be needed") },
|
LangGo: func(string, abi.Type) string { panic("this shouldn't be needed") },
|
||||||
LangJava: namedTypeJava,
|
LangJava: namedTypeJava,
|
||||||
@ -483,17 +518,24 @@ func namedTypeJava(javaKind string, solKind abi.Type) string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// alias returns an alias of the given string based on the aliasing rules
|
||||||
|
// or returns itself if no rule is matched.
|
||||||
|
func alias(aliases map[string]string, n string) string {
|
||||||
|
if alias, exist := aliases[n]; exist {
|
||||||
|
return alias
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
// methodNormalizer is a name transformer that modifies Solidity method names to
|
// methodNormalizer is a name transformer that modifies Solidity method names to
|
||||||
// conform to target language naming concentions.
|
// conform to target language naming conventions.
|
||||||
var methodNormalizer = map[Lang]func(string) string{
|
var methodNormalizer = map[Lang]func(string) string{
|
||||||
LangGo: abi.ToCamelCase,
|
LangGo: abi.ToCamelCase,
|
||||||
LangJava: decapitalise,
|
LangJava: decapitalise,
|
||||||
}
|
}
|
||||||
|
|
||||||
// capitalise makes a camel-case string which starts with an upper case character.
|
// capitalise makes a camel-case string which starts with an upper case character.
|
||||||
func capitalise(input string) string {
|
var capitalise = abi.ToCamelCase
|
||||||
return abi.ToCamelCase(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
// decapitalise makes a camel-case string which starts with a lower case character.
|
// decapitalise makes a camel-case string which starts with a lower case character.
|
||||||
func decapitalise(input string) string {
|
func decapitalise(input string) string {
|
||||||
@ -542,63 +584,3 @@ func hasStruct(t abi.Type) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// resolveArgName converts a raw argument representation into a user friendly format.
|
|
||||||
func resolveArgName(arg abi.Argument, structs map[string]*tmplStruct) string {
|
|
||||||
var (
|
|
||||||
prefix string
|
|
||||||
embedded string
|
|
||||||
typ = &arg.Type
|
|
||||||
)
|
|
||||||
loop:
|
|
||||||
for {
|
|
||||||
switch typ.T {
|
|
||||||
case abi.SliceTy:
|
|
||||||
prefix += "[]"
|
|
||||||
case abi.ArrayTy:
|
|
||||||
prefix += fmt.Sprintf("[%d]", typ.Size)
|
|
||||||
default:
|
|
||||||
embedded = typ.TupleRawName + typ.String()
|
|
||||||
break loop
|
|
||||||
}
|
|
||||||
typ = typ.Elem
|
|
||||||
}
|
|
||||||
if s, exist := structs[embedded]; exist {
|
|
||||||
return prefix + s.Name
|
|
||||||
} else {
|
|
||||||
return arg.Type.String()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// formatMethod transforms raw method representation into a user friendly one.
|
|
||||||
func formatMethod(method abi.Method, structs map[string]*tmplStruct) string {
|
|
||||||
inputs := make([]string, len(method.Inputs))
|
|
||||||
for i, input := range method.Inputs {
|
|
||||||
inputs[i] = fmt.Sprintf("%v %v", resolveArgName(input, structs), input.Name)
|
|
||||||
}
|
|
||||||
outputs := make([]string, len(method.Outputs))
|
|
||||||
for i, output := range method.Outputs {
|
|
||||||
outputs[i] = resolveArgName(output, structs)
|
|
||||||
if len(output.Name) > 0 {
|
|
||||||
outputs[i] += fmt.Sprintf(" %v", output.Name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
constant := ""
|
|
||||||
if method.Const {
|
|
||||||
constant = "constant "
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("function %v(%v) %sreturns(%v)", method.RawName, strings.Join(inputs, ", "), constant, strings.Join(outputs, ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
// formatEvent transforms raw event representation into a user friendly one.
|
|
||||||
func formatEvent(event abi.Event, structs map[string]*tmplStruct) string {
|
|
||||||
inputs := make([]string, len(event.Inputs))
|
|
||||||
for i, input := range event.Inputs {
|
|
||||||
if input.Indexed {
|
|
||||||
inputs[i] = fmt.Sprintf("%v indexed %v", resolveArgName(input, structs), input.Name)
|
|
||||||
} else {
|
|
||||||
inputs[i] = fmt.Sprintf("%v %v", resolveArgName(input, structs), input.Name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("event %v(%v)", event.RawName, strings.Join(inputs, ", "))
|
|
||||||
}
|
|
||||||
|
File diff suppressed because one or more lines are too long
@ -23,21 +23,23 @@ type tmplData struct {
|
|||||||
Package string // Name of the package to place the generated file in
|
Package string // Name of the package to place the generated file in
|
||||||
Contracts map[string]*tmplContract // List of contracts to generate into this file
|
Contracts map[string]*tmplContract // List of contracts to generate into this file
|
||||||
Libraries map[string]string // Map the bytecode's link pattern to the library name
|
Libraries map[string]string // Map the bytecode's link pattern to the library name
|
||||||
|
Structs map[string]*tmplStruct // Contract struct type definitions
|
||||||
}
|
}
|
||||||
|
|
||||||
// tmplContract contains the data needed to generate an individual contract binding.
|
// tmplContract contains the data needed to generate an individual contract binding.
|
||||||
type tmplContract struct {
|
type tmplContract struct {
|
||||||
Type string // Type name of the main contract binding
|
Type string // Type name of the main contract binding
|
||||||
InputABI string // JSON ABI used as the input to generate the binding from
|
InputABI string // JSON ABI used as the input to generate the binding from
|
||||||
InputBin string // Optional EVM bytecode used to denetare deploy code from
|
InputBin string // Optional EVM bytecode used to generate deploy code from
|
||||||
FuncSigs map[string]string // Optional map: string signature -> 4-byte signature
|
FuncSigs map[string]string // Optional map: string signature -> 4-byte signature
|
||||||
Constructor abi.Method // Contract constructor for deploy parametrization
|
Constructor abi.Method // Contract constructor for deploy parametrization
|
||||||
Calls map[string]*tmplMethod // Contract calls that only read state data
|
Calls map[string]*tmplMethod // Contract calls that only read state data
|
||||||
Transacts map[string]*tmplMethod // Contract calls that write state data
|
Transacts map[string]*tmplMethod // Contract calls that write state data
|
||||||
|
Fallback *tmplMethod // Additional special fallback function
|
||||||
|
Receive *tmplMethod // Additional special receive function
|
||||||
Events map[string]*tmplEvent // Contract events accessors
|
Events map[string]*tmplEvent // Contract events accessors
|
||||||
Libraries map[string]string // Same as tmplData, but filtered to only keep what the contract needs
|
Libraries map[string]string // Same as tmplData, but filtered to only keep what the contract needs
|
||||||
Structs map[string]*tmplStruct // Contract struct type definitions
|
Library bool // Indicator whether the contract is a library
|
||||||
Library bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// tmplMethod is a wrapper around an abi.Method that contains a few preprocessed
|
// tmplMethod is a wrapper around an abi.Method that contains a few preprocessed
|
||||||
@ -48,7 +50,8 @@ type tmplMethod struct {
|
|||||||
Structured bool // Whether the returns should be accumulated into a struct
|
Structured bool // Whether the returns should be accumulated into a struct
|
||||||
}
|
}
|
||||||
|
|
||||||
// tmplEvent is a wrapper around an a
|
// tmplEvent is a wrapper around an abi.Event that contains a few preprocessed
|
||||||
|
// and cached data fields.
|
||||||
type tmplEvent struct {
|
type tmplEvent struct {
|
||||||
Original abi.Event // Original event as parsed by the abi package
|
Original abi.Event // Original event as parsed by the abi package
|
||||||
Normalized abi.Event // Normalized version of the parsed fields
|
Normalized abi.Event // Normalized version of the parsed fields
|
||||||
@ -62,7 +65,7 @@ type tmplField struct {
|
|||||||
SolKind abi.Type // Raw abi type information
|
SolKind abi.Type // Raw abi type information
|
||||||
}
|
}
|
||||||
|
|
||||||
// tmplStruct is a wrapper around an abi.tuple contains a auto-generated
|
// tmplStruct is a wrapper around an abi.tuple and contains an auto-generated
|
||||||
// struct name.
|
// struct name.
|
||||||
type tmplStruct struct {
|
type tmplStruct struct {
|
||||||
Name string // Auto-generated struct name(before solidity v0.5.11) or raw name.
|
Name string // Auto-generated struct name(before solidity v0.5.11) or raw name.
|
||||||
@ -76,8 +79,8 @@ var tmplSource = map[Lang]string{
|
|||||||
LangJava: tmplSourceJava,
|
LangJava: tmplSourceJava,
|
||||||
}
|
}
|
||||||
|
|
||||||
// tmplSourceGo is the Go source template use to generate the contract binding
|
// tmplSourceGo is the Go source template that the generated Go contract binding
|
||||||
// based on.
|
// is based on.
|
||||||
const tmplSourceGo = `
|
const tmplSourceGo = `
|
||||||
// Code generated - DO NOT EDIT.
|
// Code generated - DO NOT EDIT.
|
||||||
// This file is a generated binding and any manual changes will be lost.
|
// This file is a generated binding and any manual changes will be lost.
|
||||||
@ -87,6 +90,7 @@ package {{.Package}}
|
|||||||
import (
|
import (
|
||||||
"math/big"
|
"math/big"
|
||||||
"strings"
|
"strings"
|
||||||
|
"errors"
|
||||||
|
|
||||||
ethereum "github.com/ethereum/go-ethereum"
|
ethereum "github.com/ethereum/go-ethereum"
|
||||||
"github.com/ethereum/go-ethereum/accounts/abi"
|
"github.com/ethereum/go-ethereum/accounts/abi"
|
||||||
@ -98,44 +102,68 @@ import (
|
|||||||
|
|
||||||
// Reference imports to suppress errors if they are not otherwise used.
|
// Reference imports to suppress errors if they are not otherwise used.
|
||||||
var (
|
var (
|
||||||
|
_ = errors.New
|
||||||
_ = big.NewInt
|
_ = big.NewInt
|
||||||
_ = strings.NewReader
|
_ = strings.NewReader
|
||||||
_ = ethereum.NotFound
|
_ = ethereum.NotFound
|
||||||
_ = abi.U256
|
|
||||||
_ = bind.Bind
|
_ = bind.Bind
|
||||||
_ = common.Big1
|
_ = common.Big1
|
||||||
_ = types.BloomLookup
|
_ = types.BloomLookup
|
||||||
_ = event.NewSubscription
|
_ = event.NewSubscription
|
||||||
)
|
)
|
||||||
|
|
||||||
{{range $contract := .Contracts}}
|
{{$structs := .Structs}}
|
||||||
{{$structs := $contract.Structs}}
|
{{range $structs}}
|
||||||
// {{.Type}}ABI is the input ABI used to generate the binding from.
|
// {{.Name}} is an auto generated low-level Go binding around an user-defined struct.
|
||||||
const {{.Type}}ABI = "{{.InputABI}}"
|
type {{.Name}} struct {
|
||||||
|
{{range $field := .Fields}}
|
||||||
|
{{$field.Name}} {{$field.Type}}{{end}}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
{{if $contract.FuncSigs}}
|
{{range $contract := .Contracts}}
|
||||||
// {{.Type}}FuncSigs maps the 4-byte function signature to its string representation.
|
// {{.Type}}MetaData contains all meta data concerning the {{.Type}} contract.
|
||||||
var {{.Type}}FuncSigs = map[string]string{
|
var {{.Type}}MetaData = &bind.MetaData{
|
||||||
|
ABI: "{{.InputABI}}",
|
||||||
|
{{if $contract.FuncSigs -}}
|
||||||
|
Sigs: map[string]string{
|
||||||
{{range $strsig, $binsig := .FuncSigs}}"{{$binsig}}": "{{$strsig}}",
|
{{range $strsig, $binsig := .FuncSigs}}"{{$binsig}}": "{{$strsig}}",
|
||||||
{{end}}
|
{{end}}
|
||||||
}
|
},
|
||||||
|
{{end -}}
|
||||||
|
{{if .InputBin -}}
|
||||||
|
Bin: "0x{{.InputBin}}",
|
||||||
|
{{end}}
|
||||||
|
}
|
||||||
|
// {{.Type}}ABI is the input ABI used to generate the binding from.
|
||||||
|
// Deprecated: Use {{.Type}}MetaData.ABI instead.
|
||||||
|
var {{.Type}}ABI = {{.Type}}MetaData.ABI
|
||||||
|
|
||||||
|
{{if $contract.FuncSigs}}
|
||||||
|
// Deprecated: Use {{.Type}}MetaData.Sigs instead.
|
||||||
|
// {{.Type}}FuncSigs maps the 4-byte function signature to its string representation.
|
||||||
|
var {{.Type}}FuncSigs = {{.Type}}MetaData.Sigs
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
||||||
{{if .InputBin}}
|
{{if .InputBin}}
|
||||||
// {{.Type}}Bin is the compiled bytecode used for deploying new contracts.
|
// {{.Type}}Bin is the compiled bytecode used for deploying new contracts.
|
||||||
var {{.Type}}Bin = "0x{{.InputBin}}"
|
// Deprecated: Use {{.Type}}MetaData.Bin instead.
|
||||||
|
var {{.Type}}Bin = {{.Type}}MetaData.Bin
|
||||||
|
|
||||||
// Deploy{{.Type}} deploys a new Ethereum contract, binding an instance of {{.Type}} to it.
|
// Deploy{{.Type}} deploys a new Ethereum contract, binding an instance of {{.Type}} to it.
|
||||||
func Deploy{{.Type}}(auth *bind.TransactOpts, backend bind.ContractBackend {{range .Constructor.Inputs}}, {{.Name}} {{bindtype .Type $structs}}{{end}}) (common.Address, *types.Transaction, *{{.Type}}, error) {
|
func Deploy{{.Type}}(auth *bind.TransactOpts, backend bind.ContractBackend {{range .Constructor.Inputs}}, {{.Name}} {{bindtype .Type $structs}}{{end}}) (common.Address, *types.Transaction, *{{.Type}}, error) {
|
||||||
parsed, err := abi.JSON(strings.NewReader({{.Type}}ABI))
|
parsed, err := {{.Type}}MetaData.GetAbi()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return common.Address{}, nil, nil, err
|
return common.Address{}, nil, nil, err
|
||||||
}
|
}
|
||||||
|
if parsed == nil {
|
||||||
|
return common.Address{}, nil, nil, errors.New("GetABI returned nil")
|
||||||
|
}
|
||||||
{{range $pattern, $name := .Libraries}}
|
{{range $pattern, $name := .Libraries}}
|
||||||
{{decapitalise $name}}Addr, _, _, _ := Deploy{{capitalise $name}}(auth, backend)
|
{{decapitalise $name}}Addr, _, _, _ := Deploy{{capitalise $name}}(auth, backend)
|
||||||
{{$contract.Type}}Bin = strings.Replace({{$contract.Type}}Bin, "__${{$pattern}}$__", {{decapitalise $name}}Addr.String()[2:], -1)
|
{{$contract.Type}}Bin = strings.Replace({{$contract.Type}}Bin, "__${{$pattern}}$__", {{decapitalise $name}}Addr.String()[2:], -1)
|
||||||
{{end}}
|
{{end}}
|
||||||
address, tx, contract, err := bind.DeployContract(auth, parsed, common.FromHex({{.Type}}Bin), backend {{range .Constructor.Inputs}}, {{.Name}}{{end}})
|
address, tx, contract, err := bind.DeployContract(auth, *parsed, common.FromHex({{.Type}}Bin), backend {{range .Constructor.Inputs}}, {{.Name}}{{end}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return common.Address{}, nil, nil, err
|
return common.Address{}, nil, nil, err
|
||||||
}
|
}
|
||||||
@ -251,7 +279,7 @@ var (
|
|||||||
// sets the output to result. The result type might be a single field for simple
|
// sets the output to result. The result type might be a single field for simple
|
||||||
// returns, a slice of interfaces for anonymous returns and a struct for named
|
// returns, a slice of interfaces for anonymous returns and a struct for named
|
||||||
// returns.
|
// returns.
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}Raw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error {
|
func (_{{$contract.Type}} *{{$contract.Type}}Raw) Call(opts *bind.CallOpts, result *[]interface{}, method string, params ...interface{}) error {
|
||||||
return _{{$contract.Type}}.Contract.{{$contract.Type}}Caller.contract.Call(opts, result, method, params...)
|
return _{{$contract.Type}}.Contract.{{$contract.Type}}Caller.contract.Call(opts, result, method, params...)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -270,7 +298,7 @@ var (
|
|||||||
// sets the output to result. The result type might be a single field for simple
|
// sets the output to result. The result type might be a single field for simple
|
||||||
// returns, a slice of interfaces for anonymous returns and a struct for named
|
// returns, a slice of interfaces for anonymous returns and a struct for named
|
||||||
// returns.
|
// returns.
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}CallerRaw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error {
|
func (_{{$contract.Type}} *{{$contract.Type}}CallerRaw) Call(opts *bind.CallOpts, result *[]interface{}, method string, params ...interface{}) error {
|
||||||
return _{{$contract.Type}}.Contract.contract.Call(opts, result, method, params...)
|
return _{{$contract.Type}}.Contract.contract.Call(opts, result, method, params...)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -285,44 +313,43 @@ var (
|
|||||||
return _{{$contract.Type}}.Contract.contract.Transact(opts, method, params...)
|
return _{{$contract.Type}}.Contract.contract.Transact(opts, method, params...)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{range .Structs}}
|
|
||||||
// {{.Name}} is an auto generated low-level Go binding around an user-defined struct.
|
|
||||||
type {{.Name}} struct {
|
|
||||||
{{range $field := .Fields}}
|
|
||||||
{{$field.Name}} {{$field.Type}}{{end}}
|
|
||||||
}
|
|
||||||
{{end}}
|
|
||||||
|
|
||||||
{{range .Calls}}
|
{{range .Calls}}
|
||||||
// {{.Normalized.Name}} is a free data retrieval call binding the contract method 0x{{printf "%x" .Original.ID}}.
|
// {{.Normalized.Name}} is a free data retrieval call binding the contract method 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{formatmethod .Original $structs}}
|
// Solidity: {{.Original.String}}
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}Caller) {{.Normalized.Name}}(opts *bind.CallOpts {{range .Normalized.Inputs}}, {{.Name}} {{bindtype .Type $structs}} {{end}}) ({{if .Structured}}struct{ {{range .Normalized.Outputs}}{{.Name}} {{bindtype .Type $structs}};{{end}} },{{else}}{{range .Normalized.Outputs}}{{bindtype .Type $structs}},{{end}}{{end}} error) {
|
func (_{{$contract.Type}} *{{$contract.Type}}Caller) {{.Normalized.Name}}(opts *bind.CallOpts {{range .Normalized.Inputs}}, {{.Name}} {{bindtype .Type $structs}} {{end}}) ({{if .Structured}}struct{ {{range .Normalized.Outputs}}{{.Name}} {{bindtype .Type $structs}};{{end}} },{{else}}{{range .Normalized.Outputs}}{{bindtype .Type $structs}},{{end}}{{end}} error) {
|
||||||
{{if .Structured}}ret := new(struct{
|
var out []interface{}
|
||||||
{{range .Normalized.Outputs}}{{.Name}} {{bindtype .Type $structs}}
|
err := _{{$contract.Type}}.contract.Call(opts, &out, "{{.Original.Name}}" {{range .Normalized.Inputs}}, {{.Name}}{{end}})
|
||||||
{{end}}
|
{{if .Structured}}
|
||||||
}){{else}}var (
|
outstruct := new(struct{ {{range .Normalized.Outputs}} {{.Name}} {{bindtype .Type $structs}}; {{end}} })
|
||||||
{{range $i, $_ := .Normalized.Outputs}}ret{{$i}} = new({{bindtype .Type $structs}})
|
if err != nil {
|
||||||
{{end}}
|
return *outstruct, err
|
||||||
){{end}}
|
}
|
||||||
out := {{if .Structured}}ret{{else}}{{if eq (len .Normalized.Outputs) 1}}ret0{{else}}&[]interface{}{
|
{{range $i, $t := .Normalized.Outputs}}
|
||||||
{{range $i, $_ := .Normalized.Outputs}}ret{{$i}},
|
outstruct.{{.Name}} = *abi.ConvertType(out[{{$i}}], new({{bindtype .Type $structs}})).(*{{bindtype .Type $structs}}){{end}}
|
||||||
{{end}}
|
|
||||||
}{{end}}{{end}}
|
return *outstruct, err
|
||||||
err := _{{$contract.Type}}.contract.Call(opts, out, "{{.Original.Name}}" {{range .Normalized.Inputs}}, {{.Name}}{{end}})
|
{{else}}
|
||||||
return {{if .Structured}}*ret,{{else}}{{range $i, $_ := .Normalized.Outputs}}*ret{{$i}},{{end}}{{end}} err
|
if err != nil {
|
||||||
|
return {{range $i, $_ := .Normalized.Outputs}}*new({{bindtype .Type $structs}}), {{end}} err
|
||||||
|
}
|
||||||
|
{{range $i, $t := .Normalized.Outputs}}
|
||||||
|
out{{$i}} := *abi.ConvertType(out[{{$i}}], new({{bindtype .Type $structs}})).(*{{bindtype .Type $structs}}){{end}}
|
||||||
|
|
||||||
|
return {{range $i, $t := .Normalized.Outputs}}out{{$i}}, {{end}} err
|
||||||
|
{{end}}
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{.Normalized.Name}} is a free data retrieval call binding the contract method 0x{{printf "%x" .Original.ID}}.
|
// {{.Normalized.Name}} is a free data retrieval call binding the contract method 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{formatmethod .Original $structs}}
|
// Solidity: {{.Original.String}}
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}Session) {{.Normalized.Name}}({{range $i, $_ := .Normalized.Inputs}}{{if ne $i 0}},{{end}} {{.Name}} {{bindtype .Type $structs}} {{end}}) ({{if .Structured}}struct{ {{range .Normalized.Outputs}}{{.Name}} {{bindtype .Type $structs}};{{end}} }, {{else}} {{range .Normalized.Outputs}}{{bindtype .Type $structs}},{{end}} {{end}} error) {
|
func (_{{$contract.Type}} *{{$contract.Type}}Session) {{.Normalized.Name}}({{range $i, $_ := .Normalized.Inputs}}{{if ne $i 0}},{{end}} {{.Name}} {{bindtype .Type $structs}} {{end}}) ({{if .Structured}}struct{ {{range .Normalized.Outputs}}{{.Name}} {{bindtype .Type $structs}};{{end}} }, {{else}} {{range .Normalized.Outputs}}{{bindtype .Type $structs}},{{end}} {{end}} error) {
|
||||||
return _{{$contract.Type}}.Contract.{{.Normalized.Name}}(&_{{$contract.Type}}.CallOpts {{range .Normalized.Inputs}}, {{.Name}}{{end}})
|
return _{{$contract.Type}}.Contract.{{.Normalized.Name}}(&_{{$contract.Type}}.CallOpts {{range .Normalized.Inputs}}, {{.Name}}{{end}})
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{.Normalized.Name}} is a free data retrieval call binding the contract method 0x{{printf "%x" .Original.ID}}.
|
// {{.Normalized.Name}} is a free data retrieval call binding the contract method 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{formatmethod .Original $structs}}
|
// Solidity: {{.Original.String}}
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}CallerSession) {{.Normalized.Name}}({{range $i, $_ := .Normalized.Inputs}}{{if ne $i 0}},{{end}} {{.Name}} {{bindtype .Type $structs}} {{end}}) ({{if .Structured}}struct{ {{range .Normalized.Outputs}}{{.Name}} {{bindtype .Type $structs}};{{end}} }, {{else}} {{range .Normalized.Outputs}}{{bindtype .Type $structs}},{{end}} {{end}} error) {
|
func (_{{$contract.Type}} *{{$contract.Type}}CallerSession) {{.Normalized.Name}}({{range $i, $_ := .Normalized.Inputs}}{{if ne $i 0}},{{end}} {{.Name}} {{bindtype .Type $structs}} {{end}}) ({{if .Structured}}struct{ {{range .Normalized.Outputs}}{{.Name}} {{bindtype .Type $structs}};{{end}} }, {{else}} {{range .Normalized.Outputs}}{{bindtype .Type $structs}},{{end}} {{end}} error) {
|
||||||
return _{{$contract.Type}}.Contract.{{.Normalized.Name}}(&_{{$contract.Type}}.CallOpts {{range .Normalized.Inputs}}, {{.Name}}{{end}})
|
return _{{$contract.Type}}.Contract.{{.Normalized.Name}}(&_{{$contract.Type}}.CallOpts {{range .Normalized.Inputs}}, {{.Name}}{{end}})
|
||||||
}
|
}
|
||||||
@ -331,26 +358,72 @@ var (
|
|||||||
{{range .Transacts}}
|
{{range .Transacts}}
|
||||||
// {{.Normalized.Name}} is a paid mutator transaction binding the contract method 0x{{printf "%x" .Original.ID}}.
|
// {{.Normalized.Name}} is a paid mutator transaction binding the contract method 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{formatmethod .Original $structs}}
|
// Solidity: {{.Original.String}}
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}Transactor) {{.Normalized.Name}}(opts *bind.TransactOpts {{range .Normalized.Inputs}}, {{.Name}} {{bindtype .Type $structs}} {{end}}) (*types.Transaction, error) {
|
func (_{{$contract.Type}} *{{$contract.Type}}Transactor) {{.Normalized.Name}}(opts *bind.TransactOpts {{range .Normalized.Inputs}}, {{.Name}} {{bindtype .Type $structs}} {{end}}) (*types.Transaction, error) {
|
||||||
return _{{$contract.Type}}.contract.Transact(opts, "{{.Original.Name}}" {{range .Normalized.Inputs}}, {{.Name}}{{end}})
|
return _{{$contract.Type}}.contract.Transact(opts, "{{.Original.Name}}" {{range .Normalized.Inputs}}, {{.Name}}{{end}})
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{.Normalized.Name}} is a paid mutator transaction binding the contract method 0x{{printf "%x" .Original.ID}}.
|
// {{.Normalized.Name}} is a paid mutator transaction binding the contract method 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{formatmethod .Original $structs}}
|
// Solidity: {{.Original.String}}
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}Session) {{.Normalized.Name}}({{range $i, $_ := .Normalized.Inputs}}{{if ne $i 0}},{{end}} {{.Name}} {{bindtype .Type $structs}} {{end}}) (*types.Transaction, error) {
|
func (_{{$contract.Type}} *{{$contract.Type}}Session) {{.Normalized.Name}}({{range $i, $_ := .Normalized.Inputs}}{{if ne $i 0}},{{end}} {{.Name}} {{bindtype .Type $structs}} {{end}}) (*types.Transaction, error) {
|
||||||
return _{{$contract.Type}}.Contract.{{.Normalized.Name}}(&_{{$contract.Type}}.TransactOpts {{range $i, $_ := .Normalized.Inputs}}, {{.Name}}{{end}})
|
return _{{$contract.Type}}.Contract.{{.Normalized.Name}}(&_{{$contract.Type}}.TransactOpts {{range $i, $_ := .Normalized.Inputs}}, {{.Name}}{{end}})
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{.Normalized.Name}} is a paid mutator transaction binding the contract method 0x{{printf "%x" .Original.ID}}.
|
// {{.Normalized.Name}} is a paid mutator transaction binding the contract method 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{formatmethod .Original $structs}}
|
// Solidity: {{.Original.String}}
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}TransactorSession) {{.Normalized.Name}}({{range $i, $_ := .Normalized.Inputs}}{{if ne $i 0}},{{end}} {{.Name}} {{bindtype .Type $structs}} {{end}}) (*types.Transaction, error) {
|
func (_{{$contract.Type}} *{{$contract.Type}}TransactorSession) {{.Normalized.Name}}({{range $i, $_ := .Normalized.Inputs}}{{if ne $i 0}},{{end}} {{.Name}} {{bindtype .Type $structs}} {{end}}) (*types.Transaction, error) {
|
||||||
return _{{$contract.Type}}.Contract.{{.Normalized.Name}}(&_{{$contract.Type}}.TransactOpts {{range $i, $_ := .Normalized.Inputs}}, {{.Name}}{{end}})
|
return _{{$contract.Type}}.Contract.{{.Normalized.Name}}(&_{{$contract.Type}}.TransactOpts {{range $i, $_ := .Normalized.Inputs}}, {{.Name}}{{end}})
|
||||||
}
|
}
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
||||||
|
{{if .Fallback}}
|
||||||
|
// Fallback is a paid mutator transaction binding the contract fallback function.
|
||||||
|
//
|
||||||
|
// Solidity: {{.Fallback.Original.String}}
|
||||||
|
func (_{{$contract.Type}} *{{$contract.Type}}Transactor) Fallback(opts *bind.TransactOpts, calldata []byte) (*types.Transaction, error) {
|
||||||
|
return _{{$contract.Type}}.contract.RawTransact(opts, calldata)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback is a paid mutator transaction binding the contract fallback function.
|
||||||
|
//
|
||||||
|
// Solidity: {{.Fallback.Original.String}}
|
||||||
|
func (_{{$contract.Type}} *{{$contract.Type}}Session) Fallback(calldata []byte) (*types.Transaction, error) {
|
||||||
|
return _{{$contract.Type}}.Contract.Fallback(&_{{$contract.Type}}.TransactOpts, calldata)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback is a paid mutator transaction binding the contract fallback function.
|
||||||
|
//
|
||||||
|
// Solidity: {{.Fallback.Original.String}}
|
||||||
|
func (_{{$contract.Type}} *{{$contract.Type}}TransactorSession) Fallback(calldata []byte) (*types.Transaction, error) {
|
||||||
|
return _{{$contract.Type}}.Contract.Fallback(&_{{$contract.Type}}.TransactOpts, calldata)
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
{{if .Receive}}
|
||||||
|
// Receive is a paid mutator transaction binding the contract receive function.
|
||||||
|
//
|
||||||
|
// Solidity: {{.Receive.Original.String}}
|
||||||
|
func (_{{$contract.Type}} *{{$contract.Type}}Transactor) Receive(opts *bind.TransactOpts) (*types.Transaction, error) {
|
||||||
|
return _{{$contract.Type}}.contract.RawTransact(opts, nil) // calldata is disallowed for receive function
|
||||||
|
}
|
||||||
|
|
||||||
|
// Receive is a paid mutator transaction binding the contract receive function.
|
||||||
|
//
|
||||||
|
// Solidity: {{.Receive.Original.String}}
|
||||||
|
func (_{{$contract.Type}} *{{$contract.Type}}Session) Receive() (*types.Transaction, error) {
|
||||||
|
return _{{$contract.Type}}.Contract.Receive(&_{{$contract.Type}}.TransactOpts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Receive is a paid mutator transaction binding the contract receive function.
|
||||||
|
//
|
||||||
|
// Solidity: {{.Receive.Original.String}}
|
||||||
|
func (_{{$contract.Type}} *{{$contract.Type}}TransactorSession) Receive() (*types.Transaction, error) {
|
||||||
|
return _{{$contract.Type}}.Contract.Receive(&_{{$contract.Type}}.TransactOpts)
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
{{range .Events}}
|
{{range .Events}}
|
||||||
// {{$contract.Type}}{{.Normalized.Name}}Iterator is returned from Filter{{.Normalized.Name}} and is used to iterate over the raw logs and unpacked data for {{.Normalized.Name}} events raised by the {{$contract.Type}} contract.
|
// {{$contract.Type}}{{.Normalized.Name}}Iterator is returned from Filter{{.Normalized.Name}} and is used to iterate over the raw logs and unpacked data for {{.Normalized.Name}} events raised by the {{$contract.Type}} contract.
|
||||||
type {{$contract.Type}}{{.Normalized.Name}}Iterator struct {
|
type {{$contract.Type}}{{.Normalized.Name}}Iterator struct {
|
||||||
@ -424,7 +497,7 @@ var (
|
|||||||
|
|
||||||
// Filter{{.Normalized.Name}} is a free log retrieval operation binding the contract event 0x{{printf "%x" .Original.ID}}.
|
// Filter{{.Normalized.Name}} is a free log retrieval operation binding the contract event 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{formatevent .Original $structs}}
|
// Solidity: {{.Original.String}}
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}Filterer) Filter{{.Normalized.Name}}(opts *bind.FilterOpts{{range .Normalized.Inputs}}{{if .Indexed}}, {{.Name}} []{{bindtype .Type $structs}}{{end}}{{end}}) (*{{$contract.Type}}{{.Normalized.Name}}Iterator, error) {
|
func (_{{$contract.Type}} *{{$contract.Type}}Filterer) Filter{{.Normalized.Name}}(opts *bind.FilterOpts{{range .Normalized.Inputs}}{{if .Indexed}}, {{.Name}} []{{bindtype .Type $structs}}{{end}}{{end}}) (*{{$contract.Type}}{{.Normalized.Name}}Iterator, error) {
|
||||||
{{range .Normalized.Inputs}}
|
{{range .Normalized.Inputs}}
|
||||||
{{if .Indexed}}var {{.Name}}Rule []interface{}
|
{{if .Indexed}}var {{.Name}}Rule []interface{}
|
||||||
@ -441,7 +514,7 @@ var (
|
|||||||
|
|
||||||
// Watch{{.Normalized.Name}} is a free log subscription operation binding the contract event 0x{{printf "%x" .Original.ID}}.
|
// Watch{{.Normalized.Name}} is a free log subscription operation binding the contract event 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{formatevent .Original $structs}}
|
// Solidity: {{.Original.String}}
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}Filterer) Watch{{.Normalized.Name}}(opts *bind.WatchOpts, sink chan<- *{{$contract.Type}}{{.Normalized.Name}}{{range .Normalized.Inputs}}{{if .Indexed}}, {{.Name}} []{{bindtype .Type $structs}}{{end}}{{end}}) (event.Subscription, error) {
|
func (_{{$contract.Type}} *{{$contract.Type}}Filterer) Watch{{.Normalized.Name}}(opts *bind.WatchOpts, sink chan<- *{{$contract.Type}}{{.Normalized.Name}}{{range .Normalized.Inputs}}{{if .Indexed}}, {{.Name}} []{{bindtype .Type $structs}}{{end}}{{end}}) (event.Subscription, error) {
|
||||||
{{range .Normalized.Inputs}}
|
{{range .Normalized.Inputs}}
|
||||||
{{if .Indexed}}var {{.Name}}Rule []interface{}
|
{{if .Indexed}}var {{.Name}}Rule []interface{}
|
||||||
@ -483,12 +556,13 @@ var (
|
|||||||
|
|
||||||
// Parse{{.Normalized.Name}} is a log parse operation binding the contract event 0x{{printf "%x" .Original.ID}}.
|
// Parse{{.Normalized.Name}} is a log parse operation binding the contract event 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{formatevent .Original $structs}}
|
// Solidity: {{.Original.String}}
|
||||||
func (_{{$contract.Type}} *{{$contract.Type}}Filterer) Parse{{.Normalized.Name}}(log types.Log) (*{{$contract.Type}}{{.Normalized.Name}}, error) {
|
func (_{{$contract.Type}} *{{$contract.Type}}Filterer) Parse{{.Normalized.Name}}(log types.Log) (*{{$contract.Type}}{{.Normalized.Name}}, error) {
|
||||||
event := new({{$contract.Type}}{{.Normalized.Name}})
|
event := new({{$contract.Type}}{{.Normalized.Name}})
|
||||||
if err := _{{$contract.Type}}.contract.UnpackLog(event, "{{.Original.Name}}", log); err != nil {
|
if err := _{{$contract.Type}}.contract.UnpackLog(event, "{{.Original.Name}}", log); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
event.Raw = log
|
||||||
return event, nil
|
return event, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -496,8 +570,8 @@ var (
|
|||||||
{{end}}
|
{{end}}
|
||||||
`
|
`
|
||||||
|
|
||||||
// tmplSourceJava is the Java source template use to generate the contract binding
|
// tmplSourceJava is the Java source template that the generated Java contract binding
|
||||||
// based on.
|
// is based on.
|
||||||
const tmplSourceJava = `
|
const tmplSourceJava = `
|
||||||
// This file is an automatically generated Java binding. Do not modify as any
|
// This file is an automatically generated Java binding. Do not modify as any
|
||||||
// change will likely be lost upon the next re-generation!
|
// change will likely be lost upon the next re-generation!
|
||||||
@ -507,8 +581,8 @@ package {{.Package}};
|
|||||||
import org.ethereum.geth.*;
|
import org.ethereum.geth.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
|
{{$structs := .Structs}}
|
||||||
{{range $contract := .Contracts}}
|
{{range $contract := .Contracts}}
|
||||||
{{$structs := $contract.Structs}}
|
|
||||||
{{if not .Library}}public {{end}}class {{.Type}} {
|
{{if not .Library}}public {{end}}class {{.Type}} {
|
||||||
// ABI is the input ABI used to generate the binding from.
|
// ABI is the input ABI used to generate the binding from.
|
||||||
public final static String ABI = "{{.InputABI}}";
|
public final static String ABI = "{{.InputABI}}";
|
||||||
@ -577,7 +651,7 @@ import java.util.*;
|
|||||||
// {{.Normalized.Name}} is a free data retrieval call binding the contract method 0x{{printf "%x" .Original.ID}}.
|
// {{.Normalized.Name}} is a free data retrieval call binding the contract method 0x{{printf "%x" .Original.ID}}.
|
||||||
//
|
//
|
||||||
// Solidity: {{.Original.String}}
|
// Solidity: {{.Original.String}}
|
||||||
public {{if gt (len .Normalized.Outputs) 1}}{{capitalise .Normalized.Name}}Results{{else}}{{range .Normalized.Outputs}}{{bindtype .Type $structs}}{{end}}{{end}} {{.Normalized.Name}}(CallOpts opts{{range .Normalized.Inputs}}, {{bindtype .Type $structs}} {{.Name}}{{end}}) throws Exception {
|
public {{if gt (len .Normalized.Outputs) 1}}{{capitalise .Normalized.Name}}Results{{else if eq (len .Normalized.Outputs) 0}}void{{else}}{{range .Normalized.Outputs}}{{bindtype .Type $structs}}{{end}}{{end}} {{.Normalized.Name}}(CallOpts opts{{range .Normalized.Inputs}}, {{bindtype .Type $structs}} {{.Name}}{{end}}) throws Exception {
|
||||||
Interfaces args = Geth.newInterfaces({{(len .Normalized.Inputs)}});
|
Interfaces args = Geth.newInterfaces({{(len .Normalized.Inputs)}});
|
||||||
{{range $index, $item := .Normalized.Inputs}}Interface arg{{$index}} = Geth.newInterface();arg{{$index}}.set{{namedtype (bindtype .Type $structs) .Type}}({{.Name}});args.set({{$index}},arg{{$index}});
|
{{range $index, $item := .Normalized.Inputs}}Interface arg{{$index}} = Geth.newInterface();arg{{$index}}.set{{namedtype (bindtype .Type $structs) .Type}}({{.Name}});args.set({{$index}},arg{{$index}});
|
||||||
{{end}}
|
{{end}}
|
||||||
@ -611,6 +685,24 @@ import java.util.*;
|
|||||||
return this.Contract.transact(opts, "{{.Original.Name}}" , args);
|
return this.Contract.transact(opts, "{{.Original.Name}}" , args);
|
||||||
}
|
}
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
||||||
|
{{if .Fallback}}
|
||||||
|
// Fallback is a paid mutator transaction binding the contract fallback function.
|
||||||
|
//
|
||||||
|
// Solidity: {{.Fallback.Original.String}}
|
||||||
|
public Transaction Fallback(TransactOpts opts, byte[] calldata) throws Exception {
|
||||||
|
return this.Contract.rawTransact(opts, calldata);
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
{{if .Receive}}
|
||||||
|
// Receive is a paid mutator transaction binding the contract receive function.
|
||||||
|
//
|
||||||
|
// Solidity: {{.Receive.Original.String}}
|
||||||
|
public Transaction Receive(TransactOpts opts) throws Exception {
|
||||||
|
return this.Contract.rawTransact(opts, null);
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
}
|
}
|
||||||
{{end}}
|
{{end}}
|
||||||
`
|
`
|
||||||
|
@ -1,244 +0,0 @@
|
|||||||
// Copyright 2018 The go-ethereum Authors
|
|
||||||
// This file is part of the go-ethereum library.
|
|
||||||
//
|
|
||||||
// The go-ethereum library is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// The go-ethereum library is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package bind
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/binary"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"math/big"
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/accounts/abi"
|
|
||||||
"github.com/ethereum/go-ethereum/common"
|
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
|
||||||
)
|
|
||||||
|
|
||||||
// makeTopics converts a filter query argument list into a filter topic set.
|
|
||||||
func makeTopics(query ...[]interface{}) ([][]common.Hash, error) {
|
|
||||||
topics := make([][]common.Hash, len(query))
|
|
||||||
for i, filter := range query {
|
|
||||||
for _, rule := range filter {
|
|
||||||
var topic common.Hash
|
|
||||||
|
|
||||||
// Try to generate the topic based on simple types
|
|
||||||
switch rule := rule.(type) {
|
|
||||||
case common.Hash:
|
|
||||||
copy(topic[:], rule[:])
|
|
||||||
case common.Address:
|
|
||||||
copy(topic[common.HashLength-common.AddressLength:], rule[:])
|
|
||||||
case *big.Int:
|
|
||||||
blob := rule.Bytes()
|
|
||||||
copy(topic[common.HashLength-len(blob):], blob)
|
|
||||||
case bool:
|
|
||||||
if rule {
|
|
||||||
topic[common.HashLength-1] = 1
|
|
||||||
}
|
|
||||||
case int8:
|
|
||||||
blob := big.NewInt(int64(rule)).Bytes()
|
|
||||||
copy(topic[common.HashLength-len(blob):], blob)
|
|
||||||
case int16:
|
|
||||||
blob := big.NewInt(int64(rule)).Bytes()
|
|
||||||
copy(topic[common.HashLength-len(blob):], blob)
|
|
||||||
case int32:
|
|
||||||
blob := big.NewInt(int64(rule)).Bytes()
|
|
||||||
copy(topic[common.HashLength-len(blob):], blob)
|
|
||||||
case int64:
|
|
||||||
blob := big.NewInt(rule).Bytes()
|
|
||||||
copy(topic[common.HashLength-len(blob):], blob)
|
|
||||||
case uint8:
|
|
||||||
blob := new(big.Int).SetUint64(uint64(rule)).Bytes()
|
|
||||||
copy(topic[common.HashLength-len(blob):], blob)
|
|
||||||
case uint16:
|
|
||||||
blob := new(big.Int).SetUint64(uint64(rule)).Bytes()
|
|
||||||
copy(topic[common.HashLength-len(blob):], blob)
|
|
||||||
case uint32:
|
|
||||||
blob := new(big.Int).SetUint64(uint64(rule)).Bytes()
|
|
||||||
copy(topic[common.HashLength-len(blob):], blob)
|
|
||||||
case uint64:
|
|
||||||
blob := new(big.Int).SetUint64(rule).Bytes()
|
|
||||||
copy(topic[common.HashLength-len(blob):], blob)
|
|
||||||
case string:
|
|
||||||
hash := crypto.Keccak256Hash([]byte(rule))
|
|
||||||
copy(topic[:], hash[:])
|
|
||||||
case []byte:
|
|
||||||
hash := crypto.Keccak256Hash(rule)
|
|
||||||
copy(topic[:], hash[:])
|
|
||||||
|
|
||||||
default:
|
|
||||||
// todo(rjl493456442) according solidity documentation, indexed event
|
|
||||||
// parameters that are not value types i.e. arrays and structs are not
|
|
||||||
// stored directly but instead a keccak256-hash of an encoding is stored.
|
|
||||||
//
|
|
||||||
// We only convert stringS and bytes to hash, still need to deal with
|
|
||||||
// array(both fixed-size and dynamic-size) and struct.
|
|
||||||
|
|
||||||
// Attempt to generate the topic from funky types
|
|
||||||
val := reflect.ValueOf(rule)
|
|
||||||
switch {
|
|
||||||
// static byte array
|
|
||||||
case val.Kind() == reflect.Array && reflect.TypeOf(rule).Elem().Kind() == reflect.Uint8:
|
|
||||||
reflect.Copy(reflect.ValueOf(topic[:val.Len()]), val)
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("unsupported indexed type: %T", rule)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
topics[i] = append(topics[i], topic)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return topics, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Big batch of reflect types for topic reconstruction.
|
|
||||||
var (
|
|
||||||
reflectHash = reflect.TypeOf(common.Hash{})
|
|
||||||
reflectAddress = reflect.TypeOf(common.Address{})
|
|
||||||
reflectBigInt = reflect.TypeOf(new(big.Int))
|
|
||||||
)
|
|
||||||
|
|
||||||
// parseTopics converts the indexed topic fields into actual log field values.
|
|
||||||
//
|
|
||||||
// Note, dynamic types cannot be reconstructed since they get mapped to Keccak256
|
|
||||||
// hashes as the topic value!
|
|
||||||
func parseTopics(out interface{}, fields abi.Arguments, topics []common.Hash) error {
|
|
||||||
// Sanity check that the fields and topics match up
|
|
||||||
if len(fields) != len(topics) {
|
|
||||||
return errors.New("topic/field count mismatch")
|
|
||||||
}
|
|
||||||
// Iterate over all the fields and reconstruct them from topics
|
|
||||||
for _, arg := range fields {
|
|
||||||
if !arg.Indexed {
|
|
||||||
return errors.New("non-indexed field in topic reconstruction")
|
|
||||||
}
|
|
||||||
field := reflect.ValueOf(out).Elem().FieldByName(capitalise(arg.Name))
|
|
||||||
|
|
||||||
// Try to parse the topic back into the fields based on primitive types
|
|
||||||
switch field.Kind() {
|
|
||||||
case reflect.Bool:
|
|
||||||
if topics[0][common.HashLength-1] == 1 {
|
|
||||||
field.Set(reflect.ValueOf(true))
|
|
||||||
}
|
|
||||||
case reflect.Int8:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
field.Set(reflect.ValueOf(int8(num.Int64())))
|
|
||||||
|
|
||||||
case reflect.Int16:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
field.Set(reflect.ValueOf(int16(num.Int64())))
|
|
||||||
|
|
||||||
case reflect.Int32:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
field.Set(reflect.ValueOf(int32(num.Int64())))
|
|
||||||
|
|
||||||
case reflect.Int64:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
field.Set(reflect.ValueOf(num.Int64()))
|
|
||||||
|
|
||||||
case reflect.Uint8:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
field.Set(reflect.ValueOf(uint8(num.Uint64())))
|
|
||||||
|
|
||||||
case reflect.Uint16:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
field.Set(reflect.ValueOf(uint16(num.Uint64())))
|
|
||||||
|
|
||||||
case reflect.Uint32:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
field.Set(reflect.ValueOf(uint32(num.Uint64())))
|
|
||||||
|
|
||||||
case reflect.Uint64:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
field.Set(reflect.ValueOf(num.Uint64()))
|
|
||||||
|
|
||||||
default:
|
|
||||||
// Ran out of plain primitive types, try custom types
|
|
||||||
|
|
||||||
switch field.Type() {
|
|
||||||
case reflectHash: // Also covers all dynamic types
|
|
||||||
field.Set(reflect.ValueOf(topics[0]))
|
|
||||||
|
|
||||||
case reflectAddress:
|
|
||||||
var addr common.Address
|
|
||||||
copy(addr[:], topics[0][common.HashLength-common.AddressLength:])
|
|
||||||
field.Set(reflect.ValueOf(addr))
|
|
||||||
|
|
||||||
case reflectBigInt:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
field.Set(reflect.ValueOf(num))
|
|
||||||
|
|
||||||
default:
|
|
||||||
// Ran out of custom types, try the crazies
|
|
||||||
switch {
|
|
||||||
// static byte array
|
|
||||||
case arg.Type.T == abi.FixedBytesTy:
|
|
||||||
reflect.Copy(field, reflect.ValueOf(topics[0][:arg.Type.Size]))
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("unsupported indexed type: %v", arg.Type)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
topics = topics[1:]
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseTopicsIntoMap converts the indexed topic field-value pairs into map key-value pairs
|
|
||||||
func parseTopicsIntoMap(out map[string]interface{}, fields abi.Arguments, topics []common.Hash) error {
|
|
||||||
// Sanity check that the fields and topics match up
|
|
||||||
if len(fields) != len(topics) {
|
|
||||||
return errors.New("topic/field count mismatch")
|
|
||||||
}
|
|
||||||
// Iterate over all the fields and reconstruct them from topics
|
|
||||||
for _, arg := range fields {
|
|
||||||
if !arg.Indexed {
|
|
||||||
return errors.New("non-indexed field in topic reconstruction")
|
|
||||||
}
|
|
||||||
|
|
||||||
switch arg.Type.T {
|
|
||||||
case abi.BoolTy:
|
|
||||||
out[arg.Name] = topics[0][common.HashLength-1] == 1
|
|
||||||
case abi.IntTy, abi.UintTy:
|
|
||||||
num := new(big.Int).SetBytes(topics[0][:])
|
|
||||||
out[arg.Name] = num
|
|
||||||
case abi.AddressTy:
|
|
||||||
var addr common.Address
|
|
||||||
copy(addr[:], topics[0][common.HashLength-common.AddressLength:])
|
|
||||||
out[arg.Name] = addr
|
|
||||||
case abi.HashTy:
|
|
||||||
out[arg.Name] = topics[0]
|
|
||||||
case abi.FixedBytesTy:
|
|
||||||
out[arg.Name] = topics[0][:]
|
|
||||||
case abi.StringTy, abi.BytesTy, abi.SliceTy, abi.ArrayTy:
|
|
||||||
// Array types (including strings and bytes) have their keccak256 hashes stored in the topic- not a hash
|
|
||||||
// whose bytes can be decoded to the actual value- so the best we can do is retrieve that hash
|
|
||||||
out[arg.Name] = topics[0]
|
|
||||||
case abi.FunctionTy:
|
|
||||||
if garbage := binary.BigEndian.Uint64(topics[0][0:8]); garbage != 0 {
|
|
||||||
return fmt.Errorf("bind: got improperly encoded function type, got %v", topics[0].Bytes())
|
|
||||||
}
|
|
||||||
var tmp [24]byte
|
|
||||||
copy(tmp[:], topics[0][8:32])
|
|
||||||
out[arg.Name] = tmp
|
|
||||||
default: // Not handling tuples
|
|
||||||
return fmt.Errorf("unsupported indexed type: %v", arg.Type)
|
|
||||||
}
|
|
||||||
|
|
||||||
topics = topics[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
@ -1,103 +0,0 @@
|
|||||||
// Copyright 2019 The go-ethereum Authors
|
|
||||||
// This file is part of the go-ethereum library.
|
|
||||||
//
|
|
||||||
// The go-ethereum library is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// The go-ethereum library is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package bind
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/accounts/abi"
|
|
||||||
"github.com/ethereum/go-ethereum/common"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestMakeTopics(t *testing.T) {
|
|
||||||
type args struct {
|
|
||||||
query [][]interface{}
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
args args
|
|
||||||
want [][]common.Hash
|
|
||||||
wantErr bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
"support fixed byte types, right padded to 32 bytes",
|
|
||||||
args{[][]interface{}{{[5]byte{1, 2, 3, 4, 5}}}},
|
|
||||||
[][]common.Hash{{common.Hash{1, 2, 3, 4, 5}}},
|
|
||||||
false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
got, err := makeTopics(tt.args.query...)
|
|
||||||
if (err != nil) != tt.wantErr {
|
|
||||||
t.Errorf("makeTopics() error = %v, wantErr %v", err, tt.wantErr)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(got, tt.want) {
|
|
||||||
t.Errorf("makeTopics() = %v, want %v", got, tt.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestParseTopics(t *testing.T) {
|
|
||||||
type bytesStruct struct {
|
|
||||||
StaticBytes [5]byte
|
|
||||||
}
|
|
||||||
bytesType, _ := abi.NewType("bytes5", "", nil)
|
|
||||||
type args struct {
|
|
||||||
createObj func() interface{}
|
|
||||||
resultObj func() interface{}
|
|
||||||
fields abi.Arguments
|
|
||||||
topics []common.Hash
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
args args
|
|
||||||
wantErr bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "support fixed byte types, right padded to 32 bytes",
|
|
||||||
args: args{
|
|
||||||
createObj: func() interface{} { return &bytesStruct{} },
|
|
||||||
resultObj: func() interface{} { return &bytesStruct{StaticBytes: [5]byte{1, 2, 3, 4, 5}} },
|
|
||||||
fields: abi.Arguments{abi.Argument{
|
|
||||||
Name: "staticBytes",
|
|
||||||
Type: bytesType,
|
|
||||||
Indexed: true,
|
|
||||||
}},
|
|
||||||
topics: []common.Hash{
|
|
||||||
{1, 2, 3, 4, 5},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
createObj := tt.args.createObj()
|
|
||||||
if err := parseTopics(createObj, tt.args.fields, tt.args.topics); (err != nil) != tt.wantErr {
|
|
||||||
t.Errorf("parseTopics() error = %v, wantErr %v", err, tt.wantErr)
|
|
||||||
}
|
|
||||||
resultObj := tt.args.resultObj()
|
|
||||||
if !reflect.DeepEqual(createObj, resultObj) {
|
|
||||||
t.Errorf("parseTopics() = %v, want %v", createObj, resultObj)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
@ -18,7 +18,7 @@ package bind
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"errors"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
@ -56,14 +56,14 @@ func WaitMined(ctx context.Context, b DeployBackend, tx *types.Transaction) (*ty
|
|||||||
// contract address when it is mined. It stops waiting when ctx is canceled.
|
// contract address when it is mined. It stops waiting when ctx is canceled.
|
||||||
func WaitDeployed(ctx context.Context, b DeployBackend, tx *types.Transaction) (common.Address, error) {
|
func WaitDeployed(ctx context.Context, b DeployBackend, tx *types.Transaction) (common.Address, error) {
|
||||||
if tx.To() != nil {
|
if tx.To() != nil {
|
||||||
return common.Address{}, fmt.Errorf("tx is not contract creation")
|
return common.Address{}, errors.New("tx is not contract creation")
|
||||||
}
|
}
|
||||||
receipt, err := WaitMined(ctx, b, tx)
|
receipt, err := WaitMined(ctx, b, tx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return common.Address{}, err
|
return common.Address{}, err
|
||||||
}
|
}
|
||||||
if receipt.ContractAddress == (common.Address{}) {
|
if receipt.ContractAddress == (common.Address{}) {
|
||||||
return common.Address{}, fmt.Errorf("zero address")
|
return common.Address{}, errors.New("zero address")
|
||||||
}
|
}
|
||||||
// Check that code has indeed been deployed at the address.
|
// Check that code has indeed been deployed at the address.
|
||||||
// This matters on pre-Homestead chains: OOG in the constructor
|
// This matters on pre-Homestead chains: OOG in the constructor
|
||||||
|
@ -18,6 +18,7 @@ package bind_test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"math/big"
|
"math/big"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
@ -55,14 +56,17 @@ func TestWaitDeployed(t *testing.T) {
|
|||||||
for name, test := range waitDeployedTests {
|
for name, test := range waitDeployedTests {
|
||||||
backend := backends.NewSimulatedBackend(
|
backend := backends.NewSimulatedBackend(
|
||||||
core.GenesisAlloc{
|
core.GenesisAlloc{
|
||||||
crypto.PubkeyToAddress(testKey.PublicKey): {Balance: big.NewInt(10000000000)},
|
crypto.PubkeyToAddress(testKey.PublicKey): {Balance: big.NewInt(10000000000000000)},
|
||||||
},
|
},
|
||||||
10000000,
|
10000000,
|
||||||
)
|
)
|
||||||
defer backend.Close()
|
defer backend.Close()
|
||||||
|
|
||||||
// Create the transaction.
|
// Create the transaction
|
||||||
tx := types.NewContractCreation(0, big.NewInt(0), test.gas, big.NewInt(1), common.FromHex(test.code))
|
head, _ := backend.HeaderByNumber(context.Background(), nil) // Should be child's, good enough
|
||||||
|
gasPrice := new(big.Int).Add(head.BaseFee, big.NewInt(1))
|
||||||
|
|
||||||
|
tx := types.NewContractCreation(0, big.NewInt(0), test.gas, gasPrice, common.FromHex(test.code))
|
||||||
tx, _ = types.SignTx(tx, types.HomesteadSigner{}, testKey)
|
tx, _ = types.SignTx(tx, types.HomesteadSigner{}, testKey)
|
||||||
|
|
||||||
// Wait for it to get mined in the background.
|
// Wait for it to get mined in the background.
|
||||||
@ -84,7 +88,7 @@ func TestWaitDeployed(t *testing.T) {
|
|||||||
select {
|
select {
|
||||||
case <-mined:
|
case <-mined:
|
||||||
if err != test.wantErr {
|
if err != test.wantErr {
|
||||||
t.Errorf("test %q: error mismatch: got %q, want %q", name, err, test.wantErr)
|
t.Errorf("test %q: error mismatch: want %q, got %q", name, test.wantErr, err)
|
||||||
}
|
}
|
||||||
if address != test.wantAddress {
|
if address != test.wantAddress {
|
||||||
t.Errorf("test %q: unexpected contract address %s", name, address.Hex())
|
t.Errorf("test %q: unexpected contract address %s", name, address.Hex())
|
||||||
@ -94,3 +98,43 @@ func TestWaitDeployed(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestWaitDeployedCornerCases(t *testing.T) {
|
||||||
|
backend := backends.NewSimulatedBackend(
|
||||||
|
core.GenesisAlloc{
|
||||||
|
crypto.PubkeyToAddress(testKey.PublicKey): {Balance: big.NewInt(10000000000000000)},
|
||||||
|
},
|
||||||
|
10000000,
|
||||||
|
)
|
||||||
|
defer backend.Close()
|
||||||
|
|
||||||
|
head, _ := backend.HeaderByNumber(context.Background(), nil) // Should be child's, good enough
|
||||||
|
gasPrice := new(big.Int).Add(head.BaseFee, big.NewInt(1))
|
||||||
|
|
||||||
|
// Create a transaction to an account.
|
||||||
|
code := "6060604052600a8060106000396000f360606040526008565b00"
|
||||||
|
tx := types.NewTransaction(0, common.HexToAddress("0x01"), big.NewInt(0), 3000000, gasPrice, common.FromHex(code))
|
||||||
|
tx, _ = types.SignTx(tx, types.HomesteadSigner{}, testKey)
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
defer cancel()
|
||||||
|
backend.SendTransaction(ctx, tx)
|
||||||
|
backend.Commit()
|
||||||
|
notContentCreation := errors.New("tx is not contract creation")
|
||||||
|
if _, err := bind.WaitDeployed(ctx, backend, tx); err.Error() != notContentCreation.Error() {
|
||||||
|
t.Errorf("error missmatch: want %q, got %q, ", notContentCreation, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a transaction that is not mined.
|
||||||
|
tx = types.NewContractCreation(1, big.NewInt(0), 3000000, gasPrice, common.FromHex(code))
|
||||||
|
tx, _ = types.SignTx(tx, types.HomesteadSigner{}, testKey)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
contextCanceled := errors.New("context canceled")
|
||||||
|
if _, err := bind.WaitDeployed(ctx, backend, tx); err.Error() != contextCanceled.Error() {
|
||||||
|
t.Errorf("error missmatch: want %q, got %q, ", contextCanceled, err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
backend.SendTransaction(ctx, tx)
|
||||||
|
cancel()
|
||||||
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Copyright 2016 The go-ethereum Authors
|
// Copyright 2021 The go-ethereum Authors
|
||||||
// This file is part of the go-ethereum library.
|
// This file is part of the go-ethereum library.
|
||||||
//
|
//
|
||||||
// The go-ethereum library is free software: you can redistribute it and/or modify
|
// The go-ethereum library is free software: you can redistribute it and/or modify
|
||||||
@ -17,68 +17,75 @@
|
|||||||
package abi
|
package abi
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/ethereum/go-ethereum/common"
|
||||||
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
type Error struct {
|
||||||
errBadBool = errors.New("abi: improperly encoded boolean value")
|
Name string
|
||||||
)
|
Inputs Arguments
|
||||||
|
str string
|
||||||
// formatSliceString formats the reflection kind with the given slice size
|
// Sig contains the string signature according to the ABI spec.
|
||||||
// and returns a formatted string representation.
|
// e.g. event foo(uint32 a, int b) = "foo(uint32,int256)"
|
||||||
func formatSliceString(kind reflect.Kind, sliceSize int) string {
|
// Please note that "int" is substitute for its canonical representation "int256"
|
||||||
if sliceSize == -1 {
|
Sig string
|
||||||
return fmt.Sprintf("[]%v", kind)
|
// ID returns the canonical representation of the event's signature used by the
|
||||||
}
|
// abi definition to identify event names and types.
|
||||||
return fmt.Sprintf("[%d]%v", sliceSize, kind)
|
ID common.Hash
|
||||||
}
|
}
|
||||||
|
|
||||||
// sliceTypeCheck checks that the given slice can by assigned to the reflection
|
func NewError(name string, inputs Arguments) Error {
|
||||||
// type in t.
|
// sanitize inputs to remove inputs without names
|
||||||
func sliceTypeCheck(t Type, val reflect.Value) error {
|
// and precompute string and sig representation.
|
||||||
if val.Kind() != reflect.Slice && val.Kind() != reflect.Array {
|
names := make([]string, len(inputs))
|
||||||
return typeErr(formatSliceString(t.Kind, t.Size), val.Type())
|
types := make([]string, len(inputs))
|
||||||
}
|
for i, input := range inputs {
|
||||||
|
if input.Name == "" {
|
||||||
if t.T == ArrayTy && val.Len() != t.Size {
|
inputs[i] = Argument{
|
||||||
return typeErr(formatSliceString(t.Elem.Kind, t.Size), formatSliceString(val.Type().Elem().Kind(), val.Len()))
|
Name: fmt.Sprintf("arg%d", i),
|
||||||
}
|
Indexed: input.Indexed,
|
||||||
|
Type: input.Type,
|
||||||
if t.Elem.T == SliceTy {
|
}
|
||||||
if val.Len() > 0 {
|
} else {
|
||||||
return sliceTypeCheck(*t.Elem, val.Index(0))
|
inputs[i] = input
|
||||||
}
|
}
|
||||||
} else if t.Elem.T == ArrayTy {
|
// string representation
|
||||||
return sliceTypeCheck(*t.Elem, val.Index(0))
|
names[i] = fmt.Sprintf("%v %v", input.Type, inputs[i].Name)
|
||||||
|
if input.Indexed {
|
||||||
|
names[i] = fmt.Sprintf("%v indexed %v", input.Type, inputs[i].Name)
|
||||||
|
}
|
||||||
|
// sig representation
|
||||||
|
types[i] = input.Type.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
if elemKind := val.Type().Elem().Kind(); elemKind != t.Elem.Kind {
|
str := fmt.Sprintf("error %v(%v)", name, strings.Join(names, ", "))
|
||||||
return typeErr(formatSliceString(t.Elem.Kind, t.Size), val.Type())
|
sig := fmt.Sprintf("%v(%v)", name, strings.Join(types, ","))
|
||||||
|
id := common.BytesToHash(crypto.Keccak256([]byte(sig)))
|
||||||
|
|
||||||
|
return Error{
|
||||||
|
Name: name,
|
||||||
|
Inputs: inputs,
|
||||||
|
str: str,
|
||||||
|
Sig: sig,
|
||||||
|
ID: id,
|
||||||
}
|
}
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// typeCheck checks that the given reflection value can be assigned to the reflection
|
func (e *Error) String() string {
|
||||||
// type in t.
|
return e.str
|
||||||
func typeCheck(t Type, value reflect.Value) error {
|
|
||||||
if t.T == SliceTy || t.T == ArrayTy {
|
|
||||||
return sliceTypeCheck(t, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check base type validity. Element types will be checked later on.
|
|
||||||
if t.Kind != value.Kind() {
|
|
||||||
return typeErr(t.Kind, value.Kind())
|
|
||||||
} else if t.T == FixedBytesTy && t.Size != value.Len() {
|
|
||||||
return typeErr(t.Type, value.Type())
|
|
||||||
} else {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// typeErr returns a formatted type casting error.
|
func (e *Error) Unpack(data []byte) (interface{}, error) {
|
||||||
func typeErr(expected, got interface{}) error {
|
if len(data) < 4 {
|
||||||
return fmt.Errorf("abi: cannot use %v as type %v as argument", got, expected)
|
return "", errors.New("invalid data for unpacking")
|
||||||
|
}
|
||||||
|
if !bytes.Equal(data[:4], e.ID[:4]) {
|
||||||
|
return "", errors.New("invalid data for unpacking")
|
||||||
|
}
|
||||||
|
return e.Inputs.Unpack(data[4:])
|
||||||
}
|
}
|
||||||
|
82
accounts/abi/error_handling.go
Normal file
82
accounts/abi/error_handling.go
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
// Copyright 2016 The go-ethereum Authors
|
||||||
|
// This file is part of the go-ethereum library.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Lesser General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public License
|
||||||
|
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package abi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
errBadBool = errors.New("abi: improperly encoded boolean value")
|
||||||
|
)
|
||||||
|
|
||||||
|
// formatSliceString formats the reflection kind with the given slice size
|
||||||
|
// and returns a formatted string representation.
|
||||||
|
func formatSliceString(kind reflect.Kind, sliceSize int) string {
|
||||||
|
if sliceSize == -1 {
|
||||||
|
return fmt.Sprintf("[]%v", kind)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("[%d]%v", sliceSize, kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
// sliceTypeCheck checks that the given slice can by assigned to the reflection
|
||||||
|
// type in t.
|
||||||
|
func sliceTypeCheck(t Type, val reflect.Value) error {
|
||||||
|
if val.Kind() != reflect.Slice && val.Kind() != reflect.Array {
|
||||||
|
return typeErr(formatSliceString(t.GetType().Kind(), t.Size), val.Type())
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.T == ArrayTy && val.Len() != t.Size {
|
||||||
|
return typeErr(formatSliceString(t.Elem.GetType().Kind(), t.Size), formatSliceString(val.Type().Elem().Kind(), val.Len()))
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.Elem.T == SliceTy || t.Elem.T == ArrayTy {
|
||||||
|
if val.Len() > 0 {
|
||||||
|
return sliceTypeCheck(*t.Elem, val.Index(0))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if val.Type().Elem().Kind() != t.Elem.GetType().Kind() {
|
||||||
|
return typeErr(formatSliceString(t.Elem.GetType().Kind(), t.Size), val.Type())
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// typeCheck checks that the given reflection value can be assigned to the reflection
|
||||||
|
// type in t.
|
||||||
|
func typeCheck(t Type, value reflect.Value) error {
|
||||||
|
if t.T == SliceTy || t.T == ArrayTy {
|
||||||
|
return sliceTypeCheck(t, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check base type validity. Element types will be checked later on.
|
||||||
|
if t.GetType().Kind() != value.Kind() {
|
||||||
|
return typeErr(t.GetType().Kind(), value.Kind())
|
||||||
|
} else if t.T == FixedBytesTy && t.Size != value.Len() {
|
||||||
|
return typeErr(t.GetType(), value.Type())
|
||||||
|
} else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// typeErr returns a formatted type casting error.
|
||||||
|
func typeErr(expected, got interface{}) error {
|
||||||
|
return fmt.Errorf("abi: cannot use %v as type %v as argument", got, expected)
|
||||||
|
}
|
@ -32,7 +32,7 @@ type Event struct {
|
|||||||
// the raw name and a suffix will be added in the case of a event overload.
|
// the raw name and a suffix will be added in the case of a event overload.
|
||||||
//
|
//
|
||||||
// e.g.
|
// e.g.
|
||||||
// There are two events have same name:
|
// These are two events that have the same name:
|
||||||
// * foo(int,int)
|
// * foo(int,int)
|
||||||
// * foo(uint,uint)
|
// * foo(uint,uint)
|
||||||
// The event name of the first one wll be resolved as foo while the second one
|
// The event name of the first one wll be resolved as foo while the second one
|
||||||
@ -42,36 +42,59 @@ type Event struct {
|
|||||||
RawName string
|
RawName string
|
||||||
Anonymous bool
|
Anonymous bool
|
||||||
Inputs Arguments
|
Inputs Arguments
|
||||||
|
str string
|
||||||
|
// Sig contains the string signature according to the ABI spec.
|
||||||
|
// e.g. event foo(uint32 a, int b) = "foo(uint32,int256)"
|
||||||
|
// Please note that "int" is substitute for its canonical representation "int256"
|
||||||
|
Sig string
|
||||||
|
// ID returns the canonical representation of the event's signature used by the
|
||||||
|
// abi definition to identify event names and types.
|
||||||
|
ID common.Hash
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEvent creates a new Event.
|
||||||
|
// It sanitizes the input arguments to remove unnamed arguments.
|
||||||
|
// It also precomputes the id, signature and string representation
|
||||||
|
// of the event.
|
||||||
|
func NewEvent(name, rawName string, anonymous bool, inputs Arguments) Event {
|
||||||
|
// sanitize inputs to remove inputs without names
|
||||||
|
// and precompute string and sig representation.
|
||||||
|
names := make([]string, len(inputs))
|
||||||
|
types := make([]string, len(inputs))
|
||||||
|
for i, input := range inputs {
|
||||||
|
if input.Name == "" {
|
||||||
|
inputs[i] = Argument{
|
||||||
|
Name: fmt.Sprintf("arg%d", i),
|
||||||
|
Indexed: input.Indexed,
|
||||||
|
Type: input.Type,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
inputs[i] = input
|
||||||
|
}
|
||||||
|
// string representation
|
||||||
|
names[i] = fmt.Sprintf("%v %v", input.Type, inputs[i].Name)
|
||||||
|
if input.Indexed {
|
||||||
|
names[i] = fmt.Sprintf("%v indexed %v", input.Type, inputs[i].Name)
|
||||||
|
}
|
||||||
|
// sig representation
|
||||||
|
types[i] = input.Type.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
str := fmt.Sprintf("event %v(%v)", rawName, strings.Join(names, ", "))
|
||||||
|
sig := fmt.Sprintf("%v(%v)", rawName, strings.Join(types, ","))
|
||||||
|
id := common.BytesToHash(crypto.Keccak256([]byte(sig)))
|
||||||
|
|
||||||
|
return Event{
|
||||||
|
Name: name,
|
||||||
|
RawName: rawName,
|
||||||
|
Anonymous: anonymous,
|
||||||
|
Inputs: inputs,
|
||||||
|
str: str,
|
||||||
|
Sig: sig,
|
||||||
|
ID: id,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e Event) String() string {
|
func (e Event) String() string {
|
||||||
inputs := make([]string, len(e.Inputs))
|
return e.str
|
||||||
for i, input := range e.Inputs {
|
|
||||||
inputs[i] = fmt.Sprintf("%v %v", input.Type, input.Name)
|
|
||||||
if input.Indexed {
|
|
||||||
inputs[i] = fmt.Sprintf("%v indexed %v", input.Type, input.Name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("event %v(%v)", e.RawName, strings.Join(inputs, ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sig returns the event string signature according to the ABI spec.
|
|
||||||
//
|
|
||||||
// Example
|
|
||||||
//
|
|
||||||
// event foo(uint32 a, int b) = "foo(uint32,int256)"
|
|
||||||
//
|
|
||||||
// Please note that "int" is substitute for its canonical representation "int256"
|
|
||||||
func (e Event) Sig() string {
|
|
||||||
types := make([]string, len(e.Inputs))
|
|
||||||
for i, input := range e.Inputs {
|
|
||||||
types[i] = input.Type.String()
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%v(%v)", e.RawName, strings.Join(types, ","))
|
|
||||||
}
|
|
||||||
|
|
||||||
// ID returns the canonical representation of the event's signature used by the
|
|
||||||
// abi definition to identify event names and types.
|
|
||||||
func (e Event) ID() common.Hash {
|
|
||||||
return common.BytesToHash(crypto.Keccak256([]byte(e.Sig())))
|
|
||||||
}
|
}
|
||||||
|
@ -104,8 +104,8 @@ func TestEventId(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for name, event := range abi.Events {
|
for name, event := range abi.Events {
|
||||||
if event.ID() != test.expectations[name] {
|
if event.ID != test.expectations[name] {
|
||||||
t.Errorf("expected id to be %x, got %x", test.expectations[name], event.ID())
|
t.Errorf("expected id to be %x, got %x", test.expectations[name], event.ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -147,10 +147,6 @@ func TestEventString(t *testing.T) {
|
|||||||
// TestEventMultiValueWithArrayUnpack verifies that array fields will be counted after parsing array.
|
// TestEventMultiValueWithArrayUnpack verifies that array fields will be counted after parsing array.
|
||||||
func TestEventMultiValueWithArrayUnpack(t *testing.T) {
|
func TestEventMultiValueWithArrayUnpack(t *testing.T) {
|
||||||
definition := `[{"name": "test", "type": "event", "inputs": [{"indexed": false, "name":"value1", "type":"uint8[2]"},{"indexed": false, "name":"value2", "type":"uint8"}]}]`
|
definition := `[{"name": "test", "type": "event", "inputs": [{"indexed": false, "name":"value1", "type":"uint8[2]"},{"indexed": false, "name":"value2", "type":"uint8"}]}]`
|
||||||
type testStruct struct {
|
|
||||||
Value1 [2]uint8
|
|
||||||
Value2 uint8
|
|
||||||
}
|
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
var b bytes.Buffer
|
var b bytes.Buffer
|
||||||
@ -158,10 +154,10 @@ func TestEventMultiValueWithArrayUnpack(t *testing.T) {
|
|||||||
for ; i <= 3; i++ {
|
for ; i <= 3; i++ {
|
||||||
b.Write(packNum(reflect.ValueOf(i)))
|
b.Write(packNum(reflect.ValueOf(i)))
|
||||||
}
|
}
|
||||||
var rst testStruct
|
unpacked, err := abi.Unpack("test", b.Bytes())
|
||||||
require.NoError(t, abi.Unpack(&rst, "test", b.Bytes()))
|
require.NoError(t, err)
|
||||||
require.Equal(t, [2]uint8{1, 2}, rst.Value1)
|
require.Equal(t, [2]uint8{1, 2}, unpacked[0])
|
||||||
require.Equal(t, uint8(3), rst.Value2)
|
require.Equal(t, uint8(3), unpacked[1])
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEventTupleUnpack(t *testing.T) {
|
func TestEventTupleUnpack(t *testing.T) {
|
||||||
@ -173,7 +169,7 @@ func TestEventTupleUnpack(t *testing.T) {
|
|||||||
type EventTransferWithTag struct {
|
type EventTransferWithTag struct {
|
||||||
// this is valid because `value` is not exportable,
|
// this is valid because `value` is not exportable,
|
||||||
// so value is only unmarshalled into `Value1`.
|
// so value is only unmarshalled into `Value1`.
|
||||||
value *big.Int
|
value *big.Int //lint:ignore U1000 unused field is part of test
|
||||||
Value1 *big.Int `abi:"value"`
|
Value1 *big.Int `abi:"value"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -312,14 +308,14 @@ func TestEventTupleUnpack(t *testing.T) {
|
|||||||
&[]interface{}{common.Address{}, new(big.Int)},
|
&[]interface{}{common.Address{}, new(big.Int)},
|
||||||
&[]interface{}{},
|
&[]interface{}{},
|
||||||
jsonEventPledge,
|
jsonEventPledge,
|
||||||
"abi: insufficient number of elements in the list/array for unpack, want 3, got 2",
|
"abi: insufficient number of arguments for unpack, want 3, got 2",
|
||||||
"Can not unpack Pledge event into too short slice",
|
"Can not unpack Pledge event into too short slice",
|
||||||
}, {
|
}, {
|
||||||
pledgeData1,
|
pledgeData1,
|
||||||
new(map[string]interface{}),
|
new(map[string]interface{}),
|
||||||
&[]interface{}{},
|
&[]interface{}{},
|
||||||
jsonEventPledge,
|
jsonEventPledge,
|
||||||
"abi: cannot unmarshal tuple into map[string]interface {}",
|
"abi:[2] cannot unmarshal tuple in to map[string]interface {}",
|
||||||
"Can not unpack Pledge event into map",
|
"Can not unpack Pledge event into map",
|
||||||
}, {
|
}, {
|
||||||
mixedCaseData1,
|
mixedCaseData1,
|
||||||
@ -351,48 +347,14 @@ func unpackTestEventData(dest interface{}, hexData string, jsonEvent []byte, ass
|
|||||||
var e Event
|
var e Event
|
||||||
assert.NoError(json.Unmarshal(jsonEvent, &e), "Should be able to unmarshal event ABI")
|
assert.NoError(json.Unmarshal(jsonEvent, &e), "Should be able to unmarshal event ABI")
|
||||||
a := ABI{Events: map[string]Event{"e": e}}
|
a := ABI{Events: map[string]Event{"e": e}}
|
||||||
return a.Unpack(dest, "e", data)
|
return a.UnpackIntoInterface(dest, "e", data)
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
Taken from
|
|
||||||
https://github.com/ethereum/go-ethereum/pull/15568
|
|
||||||
*/
|
|
||||||
|
|
||||||
type testResult struct {
|
|
||||||
Values [2]*big.Int
|
|
||||||
Value1 *big.Int
|
|
||||||
Value2 *big.Int
|
|
||||||
}
|
|
||||||
|
|
||||||
type testCase struct {
|
|
||||||
definition string
|
|
||||||
want testResult
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tc testCase) encoded(intType, arrayType Type) []byte {
|
|
||||||
var b bytes.Buffer
|
|
||||||
if tc.want.Value1 != nil {
|
|
||||||
val, _ := intType.pack(reflect.ValueOf(tc.want.Value1))
|
|
||||||
b.Write(val)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !reflect.DeepEqual(tc.want.Values, [2]*big.Int{nil, nil}) {
|
|
||||||
val, _ := arrayType.pack(reflect.ValueOf(tc.want.Values))
|
|
||||||
b.Write(val)
|
|
||||||
}
|
|
||||||
if tc.want.Value2 != nil {
|
|
||||||
val, _ := intType.pack(reflect.ValueOf(tc.want.Value2))
|
|
||||||
b.Write(val)
|
|
||||||
}
|
|
||||||
return b.Bytes()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestEventUnpackIndexed verifies that indexed field will be skipped by event decoder.
|
// TestEventUnpackIndexed verifies that indexed field will be skipped by event decoder.
|
||||||
func TestEventUnpackIndexed(t *testing.T) {
|
func TestEventUnpackIndexed(t *testing.T) {
|
||||||
definition := `[{"name": "test", "type": "event", "inputs": [{"indexed": true, "name":"value1", "type":"uint8"},{"indexed": false, "name":"value2", "type":"uint8"}]}]`
|
definition := `[{"name": "test", "type": "event", "inputs": [{"indexed": true, "name":"value1", "type":"uint8"},{"indexed": false, "name":"value2", "type":"uint8"}]}]`
|
||||||
type testStruct struct {
|
type testStruct struct {
|
||||||
Value1 uint8
|
Value1 uint8 // indexed
|
||||||
Value2 uint8
|
Value2 uint8
|
||||||
}
|
}
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
@ -400,16 +362,16 @@ func TestEventUnpackIndexed(t *testing.T) {
|
|||||||
var b bytes.Buffer
|
var b bytes.Buffer
|
||||||
b.Write(packNum(reflect.ValueOf(uint8(8))))
|
b.Write(packNum(reflect.ValueOf(uint8(8))))
|
||||||
var rst testStruct
|
var rst testStruct
|
||||||
require.NoError(t, abi.Unpack(&rst, "test", b.Bytes()))
|
require.NoError(t, abi.UnpackIntoInterface(&rst, "test", b.Bytes()))
|
||||||
require.Equal(t, uint8(0), rst.Value1)
|
require.Equal(t, uint8(0), rst.Value1)
|
||||||
require.Equal(t, uint8(8), rst.Value2)
|
require.Equal(t, uint8(8), rst.Value2)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestEventIndexedWithArrayUnpack verifies that decoder will not overlow when static array is indexed input.
|
// TestEventIndexedWithArrayUnpack verifies that decoder will not overflow when static array is indexed input.
|
||||||
func TestEventIndexedWithArrayUnpack(t *testing.T) {
|
func TestEventIndexedWithArrayUnpack(t *testing.T) {
|
||||||
definition := `[{"name": "test", "type": "event", "inputs": [{"indexed": true, "name":"value1", "type":"uint8[2]"},{"indexed": false, "name":"value2", "type":"string"}]}]`
|
definition := `[{"name": "test", "type": "event", "inputs": [{"indexed": true, "name":"value1", "type":"uint8[2]"},{"indexed": false, "name":"value2", "type":"string"}]}]`
|
||||||
type testStruct struct {
|
type testStruct struct {
|
||||||
Value1 [2]uint8
|
Value1 [2]uint8 // indexed
|
||||||
Value2 string
|
Value2 string
|
||||||
}
|
}
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
@ -422,7 +384,7 @@ func TestEventIndexedWithArrayUnpack(t *testing.T) {
|
|||||||
b.Write(common.RightPadBytes([]byte(stringOut), 32))
|
b.Write(common.RightPadBytes([]byte(stringOut), 32))
|
||||||
|
|
||||||
var rst testStruct
|
var rst testStruct
|
||||||
require.NoError(t, abi.Unpack(&rst, "test", b.Bytes()))
|
require.NoError(t, abi.UnpackIntoInterface(&rst, "test", b.Bytes()))
|
||||||
require.Equal(t, [2]uint8{0, 0}, rst.Value1)
|
require.Equal(t, [2]uint8{0, 0}, rst.Value1)
|
||||||
require.Equal(t, stringOut, rst.Value2)
|
require.Equal(t, stringOut, rst.Value2)
|
||||||
}
|
}
|
||||||
|
@ -23,11 +23,29 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// FunctionType represents different types of functions a contract might have.
|
||||||
|
type FunctionType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Constructor represents the constructor of the contract.
|
||||||
|
// The constructor function is called while deploying a contract.
|
||||||
|
Constructor FunctionType = iota
|
||||||
|
// Fallback represents the fallback function.
|
||||||
|
// This function is executed if no other function matches the given function
|
||||||
|
// signature and no receive function is specified.
|
||||||
|
Fallback
|
||||||
|
// Receive represents the receive function.
|
||||||
|
// This function is executed on plain Ether transfers.
|
||||||
|
Receive
|
||||||
|
// Function represents a normal function.
|
||||||
|
Function
|
||||||
|
)
|
||||||
|
|
||||||
// Method represents a callable given a `Name` and whether the method is a constant.
|
// Method represents a callable given a `Name` and whether the method is a constant.
|
||||||
// If the method is `Const` no transaction needs to be created for this
|
// If the method is `Const` no transaction needs to be created for this
|
||||||
// particular Method call. It can easily be simulated using a local VM.
|
// particular Method call. It can easily be simulated using a local VM.
|
||||||
// For example a `Balance()` method only needs to retrieve something
|
// For example a `Balance()` method only needs to retrieve something
|
||||||
// from the storage and therefore requires no Tx to be send to the
|
// from the storage and therefore requires no Tx to be sent to the
|
||||||
// network. A method such as `Transact` does require a Tx and thus will
|
// network. A method such as `Transact` does require a Tx and thus will
|
||||||
// be flagged `false`.
|
// be flagged `false`.
|
||||||
// Input specifies the required input parameters for this gives method.
|
// Input specifies the required input parameters for this gives method.
|
||||||
@ -36,55 +54,114 @@ type Method struct {
|
|||||||
// the raw name and a suffix will be added in the case of a function overload.
|
// the raw name and a suffix will be added in the case of a function overload.
|
||||||
//
|
//
|
||||||
// e.g.
|
// e.g.
|
||||||
// There are two functions have same name:
|
// These are two functions that have the same name:
|
||||||
// * foo(int,int)
|
// * foo(int,int)
|
||||||
// * foo(uint,uint)
|
// * foo(uint,uint)
|
||||||
// The method name of the first one will be resolved as foo while the second one
|
// The method name of the first one will be resolved as foo while the second one
|
||||||
// will be resolved as foo0.
|
// will be resolved as foo0.
|
||||||
Name string
|
Name string
|
||||||
// RawName is the raw method name parsed from ABI.
|
RawName string // RawName is the raw method name parsed from ABI
|
||||||
RawName string
|
|
||||||
Const bool
|
// Type indicates whether the method is a
|
||||||
|
// special fallback introduced in solidity v0.6.0
|
||||||
|
Type FunctionType
|
||||||
|
|
||||||
|
// StateMutability indicates the mutability state of method,
|
||||||
|
// the default value is nonpayable. It can be empty if the abi
|
||||||
|
// is generated by legacy compiler.
|
||||||
|
StateMutability string
|
||||||
|
|
||||||
|
// Legacy indicators generated by compiler before v0.6.0
|
||||||
|
Constant bool
|
||||||
|
Payable bool
|
||||||
|
|
||||||
Inputs Arguments
|
Inputs Arguments
|
||||||
Outputs Arguments
|
Outputs Arguments
|
||||||
|
str string
|
||||||
|
// Sig returns the methods string signature according to the ABI spec.
|
||||||
|
// e.g. function foo(uint32 a, int b) = "foo(uint32,int256)"
|
||||||
|
// Please note that "int" is substitute for its canonical representation "int256"
|
||||||
|
Sig string
|
||||||
|
// ID returns the canonical representation of the method's signature used by the
|
||||||
|
// abi definition to identify method names and types.
|
||||||
|
ID []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sig returns the methods string signature according to the ABI spec.
|
// NewMethod creates a new Method.
|
||||||
//
|
// A method should always be created using NewMethod.
|
||||||
// Example
|
// It also precomputes the sig representation and the string representation
|
||||||
//
|
// of the method.
|
||||||
// function foo(uint32 a, int b) = "foo(uint32,int256)"
|
func NewMethod(name string, rawName string, funType FunctionType, mutability string, isConst, isPayable bool, inputs Arguments, outputs Arguments) Method {
|
||||||
//
|
var (
|
||||||
// Please note that "int" is substitute for its canonical representation "int256"
|
types = make([]string, len(inputs))
|
||||||
func (method Method) Sig() string {
|
inputNames = make([]string, len(inputs))
|
||||||
types := make([]string, len(method.Inputs))
|
outputNames = make([]string, len(outputs))
|
||||||
for i, input := range method.Inputs {
|
)
|
||||||
|
for i, input := range inputs {
|
||||||
|
inputNames[i] = fmt.Sprintf("%v %v", input.Type, input.Name)
|
||||||
types[i] = input.Type.String()
|
types[i] = input.Type.String()
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("%v(%v)", method.RawName, strings.Join(types, ","))
|
for i, output := range outputs {
|
||||||
|
outputNames[i] = output.Type.String()
|
||||||
|
if len(output.Name) > 0 {
|
||||||
|
outputNames[i] += fmt.Sprintf(" %v", output.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// calculate the signature and method id. Note only function
|
||||||
|
// has meaningful signature and id.
|
||||||
|
var (
|
||||||
|
sig string
|
||||||
|
id []byte
|
||||||
|
)
|
||||||
|
if funType == Function {
|
||||||
|
sig = fmt.Sprintf("%v(%v)", rawName, strings.Join(types, ","))
|
||||||
|
id = crypto.Keccak256([]byte(sig))[:4]
|
||||||
|
}
|
||||||
|
// Extract meaningful state mutability of solidity method.
|
||||||
|
// If it's default value, never print it.
|
||||||
|
state := mutability
|
||||||
|
if state == "nonpayable" {
|
||||||
|
state = ""
|
||||||
|
}
|
||||||
|
if state != "" {
|
||||||
|
state = state + " "
|
||||||
|
}
|
||||||
|
identity := fmt.Sprintf("function %v", rawName)
|
||||||
|
if funType == Fallback {
|
||||||
|
identity = "fallback"
|
||||||
|
} else if funType == Receive {
|
||||||
|
identity = "receive"
|
||||||
|
} else if funType == Constructor {
|
||||||
|
identity = "constructor"
|
||||||
|
}
|
||||||
|
str := fmt.Sprintf("%v(%v) %sreturns(%v)", identity, strings.Join(inputNames, ", "), state, strings.Join(outputNames, ", "))
|
||||||
|
|
||||||
|
return Method{
|
||||||
|
Name: name,
|
||||||
|
RawName: rawName,
|
||||||
|
Type: funType,
|
||||||
|
StateMutability: mutability,
|
||||||
|
Constant: isConst,
|
||||||
|
Payable: isPayable,
|
||||||
|
Inputs: inputs,
|
||||||
|
Outputs: outputs,
|
||||||
|
str: str,
|
||||||
|
Sig: sig,
|
||||||
|
ID: id,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (method Method) String() string {
|
func (method Method) String() string {
|
||||||
inputs := make([]string, len(method.Inputs))
|
return method.str
|
||||||
for i, input := range method.Inputs {
|
|
||||||
inputs[i] = fmt.Sprintf("%v %v", input.Type, input.Name)
|
|
||||||
}
|
|
||||||
outputs := make([]string, len(method.Outputs))
|
|
||||||
for i, output := range method.Outputs {
|
|
||||||
outputs[i] = output.Type.String()
|
|
||||||
if len(output.Name) > 0 {
|
|
||||||
outputs[i] += fmt.Sprintf(" %v", output.Name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
constant := ""
|
|
||||||
if method.Const {
|
|
||||||
constant = "constant "
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("function %v(%v) %sreturns(%v)", method.RawName, strings.Join(inputs, ", "), constant, strings.Join(outputs, ", "))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ID returns the canonical representation of the method's signature used by the
|
// IsConstant returns the indicator whether the method is read-only.
|
||||||
// abi definition to identify method names and types.
|
func (method Method) IsConstant() bool {
|
||||||
func (method Method) ID() []byte {
|
return method.StateMutability == "view" || method.StateMutability == "pure" || method.Constant
|
||||||
return crypto.Keccak256([]byte(method.Sig()))[:4]
|
}
|
||||||
|
|
||||||
|
// IsPayable returns the indicator whether the method can process
|
||||||
|
// plain ether transfers.
|
||||||
|
func (method Method) IsPayable() bool {
|
||||||
|
return method.StateMutability == "payable" || method.Payable
|
||||||
}
|
}
|
||||||
|
@ -23,13 +23,15 @@ import (
|
|||||||
|
|
||||||
const methoddata = `
|
const methoddata = `
|
||||||
[
|
[
|
||||||
{"type": "function", "name": "balance", "constant": true },
|
{"type": "function", "name": "balance", "stateMutability": "view"},
|
||||||
{"type": "function", "name": "send", "constant": false, "inputs": [{ "name": "amount", "type": "uint256" }]},
|
{"type": "function", "name": "send", "inputs": [{ "name": "amount", "type": "uint256" }]},
|
||||||
{"type": "function", "name": "transfer", "constant": false, "inputs": [{"name": "from", "type": "address"}, {"name": "to", "type": "address"}, {"name": "value", "type": "uint256"}], "outputs": [{"name": "success", "type": "bool"}]},
|
{"type": "function", "name": "transfer", "inputs": [{"name": "from", "type": "address"}, {"name": "to", "type": "address"}, {"name": "value", "type": "uint256"}], "outputs": [{"name": "success", "type": "bool"}]},
|
||||||
{"constant":false,"inputs":[{"components":[{"name":"x","type":"uint256"},{"name":"y","type":"uint256"}],"name":"a","type":"tuple"}],"name":"tuple","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},
|
{"constant":false,"inputs":[{"components":[{"name":"x","type":"uint256"},{"name":"y","type":"uint256"}],"name":"a","type":"tuple"}],"name":"tuple","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},
|
||||||
{"constant":false,"inputs":[{"components":[{"name":"x","type":"uint256"},{"name":"y","type":"uint256"}],"name":"a","type":"tuple[]"}],"name":"tupleSlice","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},
|
{"constant":false,"inputs":[{"components":[{"name":"x","type":"uint256"},{"name":"y","type":"uint256"}],"name":"a","type":"tuple[]"}],"name":"tupleSlice","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},
|
||||||
{"constant":false,"inputs":[{"components":[{"name":"x","type":"uint256"},{"name":"y","type":"uint256"}],"name":"a","type":"tuple[5]"}],"name":"tupleArray","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},
|
{"constant":false,"inputs":[{"components":[{"name":"x","type":"uint256"},{"name":"y","type":"uint256"}],"name":"a","type":"tuple[5]"}],"name":"tupleArray","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},
|
||||||
{"constant":false,"inputs":[{"components":[{"name":"x","type":"uint256"},{"name":"y","type":"uint256"}],"name":"a","type":"tuple[5][]"}],"name":"complexTuple","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"}
|
{"constant":false,"inputs":[{"components":[{"name":"x","type":"uint256"},{"name":"y","type":"uint256"}],"name":"a","type":"tuple[5][]"}],"name":"complexTuple","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},
|
||||||
|
{"stateMutability":"nonpayable","type":"fallback"},
|
||||||
|
{"stateMutability":"payable","type":"receive"}
|
||||||
]`
|
]`
|
||||||
|
|
||||||
func TestMethodString(t *testing.T) {
|
func TestMethodString(t *testing.T) {
|
||||||
@ -39,7 +41,7 @@ func TestMethodString(t *testing.T) {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
method: "balance",
|
method: "balance",
|
||||||
expectation: "function balance() constant returns()",
|
expectation: "function balance() view returns()",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
method: "send",
|
method: "send",
|
||||||
@ -65,6 +67,14 @@ func TestMethodString(t *testing.T) {
|
|||||||
method: "complexTuple",
|
method: "complexTuple",
|
||||||
expectation: "function complexTuple((uint256,uint256)[5][] a) returns()",
|
expectation: "function complexTuple((uint256,uint256)[5][] a) returns()",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
method: "fallback",
|
||||||
|
expectation: "fallback() returns()",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
method: "receive",
|
||||||
|
expectation: "receive() payable returns()",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
abi, err := JSON(strings.NewReader(methoddata))
|
abi, err := JSON(strings.NewReader(methoddata))
|
||||||
@ -73,7 +83,14 @@ func TestMethodString(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
got := abi.Methods[test.method].String()
|
var got string
|
||||||
|
if test.method == "fallback" {
|
||||||
|
got = abi.Fallback.String()
|
||||||
|
} else if test.method == "receive" {
|
||||||
|
got = abi.Receive.String()
|
||||||
|
} else {
|
||||||
|
got = abi.Methods[test.method].String()
|
||||||
|
}
|
||||||
if got != test.expectation {
|
if got != test.expectation {
|
||||||
t.Errorf("expected string to be %s, got %s", test.expectation, got)
|
t.Errorf("expected string to be %s, got %s", test.expectation, got)
|
||||||
}
|
}
|
||||||
@ -120,7 +137,7 @@ func TestMethodSig(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range cases {
|
for _, test := range cases {
|
||||||
got := abi.Methods[test.method].Sig()
|
got := abi.Methods[test.method].Sig
|
||||||
if got != test.expect {
|
if got != test.expect {
|
||||||
t.Errorf("expected string to be %s, got %s", test.expect, got)
|
t.Errorf("expected string to be %s, got %s", test.expect, got)
|
||||||
}
|
}
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
// Copyright 2015 The go-ethereum Authors
|
|
||||||
// This file is part of the go-ethereum library.
|
|
||||||
//
|
|
||||||
// The go-ethereum library is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// The go-ethereum library is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package abi
|
|
||||||
|
|
||||||
import (
|
|
||||||
"math/big"
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/common"
|
|
||||||
"github.com/ethereum/go-ethereum/common/math"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
bigT = reflect.TypeOf(&big.Int{})
|
|
||||||
derefbigT = reflect.TypeOf(big.Int{})
|
|
||||||
uint8T = reflect.TypeOf(uint8(0))
|
|
||||||
uint16T = reflect.TypeOf(uint16(0))
|
|
||||||
uint32T = reflect.TypeOf(uint32(0))
|
|
||||||
uint64T = reflect.TypeOf(uint64(0))
|
|
||||||
int8T = reflect.TypeOf(int8(0))
|
|
||||||
int16T = reflect.TypeOf(int16(0))
|
|
||||||
int32T = reflect.TypeOf(int32(0))
|
|
||||||
int64T = reflect.TypeOf(int64(0))
|
|
||||||
addressT = reflect.TypeOf(common.Address{})
|
|
||||||
)
|
|
||||||
|
|
||||||
// U256 converts a big Int into a 256bit EVM number.
|
|
||||||
func U256(n *big.Int) []byte {
|
|
||||||
return math.PaddedBigBytes(math.U256(n), 32)
|
|
||||||
}
|
|
@ -1,33 +0,0 @@
|
|||||||
// Copyright 2015 The go-ethereum Authors
|
|
||||||
// This file is part of the go-ethereum library.
|
|
||||||
//
|
|
||||||
// The go-ethereum library is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU Lesser General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// The go-ethereum library is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU Lesser General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU Lesser General Public License
|
|
||||||
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
package abi
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"math/big"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestNumberTypes(t *testing.T) {
|
|
||||||
ubytes := make([]byte, 32)
|
|
||||||
ubytes[31] = 1
|
|
||||||
|
|
||||||
unsigned := U256(big.NewInt(1))
|
|
||||||
if !bytes.Equal(unsigned, ubytes) {
|
|
||||||
t.Errorf("expected %x got %x", ubytes, unsigned)
|
|
||||||
}
|
|
||||||
}
|
|
@ -17,6 +17,8 @@
|
|||||||
package abi
|
package abi
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
"math/big"
|
"math/big"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
@ -25,7 +27,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// packBytesSlice packs the given bytes as [L, V] as the canonical representation
|
// packBytesSlice packs the given bytes as [L, V] as the canonical representation
|
||||||
// bytes slice
|
// bytes slice.
|
||||||
func packBytesSlice(bytes []byte, l int) []byte {
|
func packBytesSlice(bytes []byte, l int) []byte {
|
||||||
len := packNum(reflect.ValueOf(l))
|
len := packNum(reflect.ValueOf(l))
|
||||||
return append(len, common.RightPadBytes(bytes, (l+31)/32*32)...)
|
return append(len, common.RightPadBytes(bytes, (l+31)/32*32)...)
|
||||||
@ -33,49 +35,51 @@ func packBytesSlice(bytes []byte, l int) []byte {
|
|||||||
|
|
||||||
// packElement packs the given reflect value according to the abi specification in
|
// packElement packs the given reflect value according to the abi specification in
|
||||||
// t.
|
// t.
|
||||||
func packElement(t Type, reflectValue reflect.Value) []byte {
|
func packElement(t Type, reflectValue reflect.Value) ([]byte, error) {
|
||||||
switch t.T {
|
switch t.T {
|
||||||
case IntTy, UintTy:
|
case IntTy, UintTy:
|
||||||
return packNum(reflectValue)
|
return packNum(reflectValue), nil
|
||||||
case StringTy:
|
case StringTy:
|
||||||
return packBytesSlice([]byte(reflectValue.String()), reflectValue.Len())
|
return packBytesSlice([]byte(reflectValue.String()), reflectValue.Len()), nil
|
||||||
case AddressTy:
|
case AddressTy:
|
||||||
if reflectValue.Kind() == reflect.Array {
|
if reflectValue.Kind() == reflect.Array {
|
||||||
reflectValue = mustArrayToByteSlice(reflectValue)
|
reflectValue = mustArrayToByteSlice(reflectValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
return common.LeftPadBytes(reflectValue.Bytes(), 32)
|
return common.LeftPadBytes(reflectValue.Bytes(), 32), nil
|
||||||
case BoolTy:
|
case BoolTy:
|
||||||
if reflectValue.Bool() {
|
if reflectValue.Bool() {
|
||||||
return math.PaddedBigBytes(common.Big1, 32)
|
return math.PaddedBigBytes(common.Big1, 32), nil
|
||||||
}
|
}
|
||||||
return math.PaddedBigBytes(common.Big0, 32)
|
return math.PaddedBigBytes(common.Big0, 32), nil
|
||||||
case BytesTy:
|
case BytesTy:
|
||||||
if reflectValue.Kind() == reflect.Array {
|
if reflectValue.Kind() == reflect.Array {
|
||||||
reflectValue = mustArrayToByteSlice(reflectValue)
|
reflectValue = mustArrayToByteSlice(reflectValue)
|
||||||
}
|
}
|
||||||
return packBytesSlice(reflectValue.Bytes(), reflectValue.Len())
|
if reflectValue.Type() != reflect.TypeOf([]byte{}) {
|
||||||
|
return []byte{}, errors.New("Bytes type is neither slice nor array")
|
||||||
|
}
|
||||||
|
return packBytesSlice(reflectValue.Bytes(), reflectValue.Len()), nil
|
||||||
case FixedBytesTy, FunctionTy:
|
case FixedBytesTy, FunctionTy:
|
||||||
if reflectValue.Kind() == reflect.Array {
|
if reflectValue.Kind() == reflect.Array {
|
||||||
reflectValue = mustArrayToByteSlice(reflectValue)
|
reflectValue = mustArrayToByteSlice(reflectValue)
|
||||||
}
|
}
|
||||||
return common.RightPadBytes(reflectValue.Bytes(), 32)
|
return common.RightPadBytes(reflectValue.Bytes(), 32), nil
|
||||||
default:
|
default:
|
||||||
panic("abi: fatal error")
|
return []byte{}, fmt.Errorf("Could not pack element, unknown type: %v", t.T)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// packNum packs the given number (using the reflect value) and will cast it to appropriate number representation
|
// packNum packs the given number (using the reflect value) and will cast it to appropriate number representation.
|
||||||
func packNum(value reflect.Value) []byte {
|
func packNum(value reflect.Value) []byte {
|
||||||
switch kind := value.Kind(); kind {
|
switch kind := value.Kind(); kind {
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||||
return U256(new(big.Int).SetUint64(value.Uint()))
|
return math.U256Bytes(new(big.Int).SetUint64(value.Uint()))
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
return U256(big.NewInt(value.Int()))
|
return math.U256Bytes(big.NewInt(value.Int()))
|
||||||
case reflect.Ptr:
|
case reflect.Ptr:
|
||||||
return U256(value.Interface().(*big.Int))
|
return math.U256Bytes(new(big.Int).Set(value.Interface().(*big.Int)))
|
||||||
default:
|
default:
|
||||||
panic("abi: fatal error")
|
panic("abi: fatal error")
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -18,623 +18,51 @@ package abi
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"math/big"
|
"math/big"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// TestPack tests the general pack/unpack tests in packing_test.go
|
||||||
func TestPack(t *testing.T) {
|
func TestPack(t *testing.T) {
|
||||||
for i, test := range []struct {
|
for i, test := range packUnpackTests {
|
||||||
typ string
|
t.Run(strconv.Itoa(i), func(t *testing.T) {
|
||||||
components []ArgumentMarshaling
|
encb, err := hex.DecodeString(test.packed)
|
||||||
input interface{}
|
if err != nil {
|
||||||
output []byte
|
t.Fatalf("invalid hex %s: %v", test.packed, err)
|
||||||
}{
|
}
|
||||||
{
|
inDef := fmt.Sprintf(`[{ "name" : "method", "type": "function", "inputs": %s}]`, test.def)
|
||||||
"uint8",
|
inAbi, err := JSON(strings.NewReader(inDef))
|
||||||
nil,
|
if err != nil {
|
||||||
uint8(2),
|
t.Fatalf("invalid ABI definition %s, %v", inDef, err)
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
}
|
||||||
},
|
var packed []byte
|
||||||
{
|
packed, err = inAbi.Pack("method", test.unpacked)
|
||||||
"uint8[]",
|
|
||||||
nil,
|
|
||||||
[]uint8{1, 2},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uint16",
|
|
||||||
nil,
|
|
||||||
uint16(2),
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uint16[]",
|
|
||||||
nil,
|
|
||||||
[]uint16{1, 2},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uint32",
|
|
||||||
nil,
|
|
||||||
uint32(2),
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uint32[]",
|
|
||||||
nil,
|
|
||||||
[]uint32{1, 2},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uint64",
|
|
||||||
nil,
|
|
||||||
uint64(2),
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uint64[]",
|
|
||||||
nil,
|
|
||||||
[]uint64{1, 2},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uint256",
|
|
||||||
nil,
|
|
||||||
big.NewInt(2),
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uint256[]",
|
|
||||||
nil,
|
|
||||||
[]*big.Int{big.NewInt(1), big.NewInt(2)},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int8",
|
|
||||||
nil,
|
|
||||||
int8(2),
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int8[]",
|
|
||||||
nil,
|
|
||||||
[]int8{1, 2},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int16",
|
|
||||||
nil,
|
|
||||||
int16(2),
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int16[]",
|
|
||||||
nil,
|
|
||||||
[]int16{1, 2},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int32",
|
|
||||||
nil,
|
|
||||||
int32(2),
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int32[]",
|
|
||||||
nil,
|
|
||||||
[]int32{1, 2},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int64",
|
|
||||||
nil,
|
|
||||||
int64(2),
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int64[]",
|
|
||||||
nil,
|
|
||||||
[]int64{1, 2},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int256",
|
|
||||||
nil,
|
|
||||||
big.NewInt(2),
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"int256[]",
|
|
||||||
nil,
|
|
||||||
[]*big.Int{big.NewInt(1), big.NewInt(2)},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes1",
|
|
||||||
nil,
|
|
||||||
[1]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes2",
|
|
||||||
nil,
|
|
||||||
[2]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes3",
|
|
||||||
nil,
|
|
||||||
[3]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes4",
|
|
||||||
nil,
|
|
||||||
[4]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes5",
|
|
||||||
nil,
|
|
||||||
[5]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes6",
|
|
||||||
nil,
|
|
||||||
[6]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes7",
|
|
||||||
nil,
|
|
||||||
[7]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes8",
|
|
||||||
nil,
|
|
||||||
[8]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes9",
|
|
||||||
nil,
|
|
||||||
[9]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes10",
|
|
||||||
nil,
|
|
||||||
[10]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes11",
|
|
||||||
nil,
|
|
||||||
[11]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes12",
|
|
||||||
nil,
|
|
||||||
[12]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes13",
|
|
||||||
nil,
|
|
||||||
[13]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes14",
|
|
||||||
nil,
|
|
||||||
[14]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes15",
|
|
||||||
nil,
|
|
||||||
[15]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes16",
|
|
||||||
nil,
|
|
||||||
[16]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes17",
|
|
||||||
nil,
|
|
||||||
[17]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes18",
|
|
||||||
nil,
|
|
||||||
[18]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes19",
|
|
||||||
nil,
|
|
||||||
[19]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes20",
|
|
||||||
nil,
|
|
||||||
[20]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes21",
|
|
||||||
nil,
|
|
||||||
[21]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes22",
|
|
||||||
nil,
|
|
||||||
[22]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes23",
|
|
||||||
nil,
|
|
||||||
[23]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes24",
|
|
||||||
nil,
|
|
||||||
[24]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes25",
|
|
||||||
nil,
|
|
||||||
[25]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes26",
|
|
||||||
nil,
|
|
||||||
[26]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes27",
|
|
||||||
nil,
|
|
||||||
[27]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes28",
|
|
||||||
nil,
|
|
||||||
[28]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes29",
|
|
||||||
nil,
|
|
||||||
[29]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes30",
|
|
||||||
nil,
|
|
||||||
[30]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes31",
|
|
||||||
nil,
|
|
||||||
[31]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes32",
|
|
||||||
nil,
|
|
||||||
[32]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uint32[2][3][4]",
|
|
||||||
nil,
|
|
||||||
[4][3][2]uint32{{{1, 2}, {3, 4}, {5, 6}}, {{7, 8}, {9, 10}, {11, 12}}, {{13, 14}, {15, 16}, {17, 18}}, {{19, 20}, {21, 22}, {23, 24}}},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000d000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000f000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000110000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000001300000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000015000000000000000000000000000000000000000000000000000000000000001600000000000000000000000000000000000000000000000000000000000000170000000000000000000000000000000000000000000000000000000000000018"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address[]",
|
|
||||||
nil,
|
|
||||||
[]common.Address{{1}, {2}},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000200000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes32[]",
|
|
||||||
nil,
|
|
||||||
[]common.Hash{{1}, {2}},
|
|
||||||
common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000201000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"function",
|
|
||||||
nil,
|
|
||||||
[24]byte{1},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"string",
|
|
||||||
nil,
|
|
||||||
"foobar",
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000006666f6f6261720000000000000000000000000000000000000000000000000000"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"string[]",
|
|
||||||
nil,
|
|
||||||
[]string{"hello", "foobar"},
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002" + // len(array) = 2
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000040" + // offset 64 to i = 0
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000080" + // offset 128 to i = 1
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000005" + // len(str[0]) = 5
|
|
||||||
"68656c6c6f000000000000000000000000000000000000000000000000000000" + // str[0]
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000006" + // len(str[1]) = 6
|
|
||||||
"666f6f6261720000000000000000000000000000000000000000000000000000"), // str[1]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"string[2]",
|
|
||||||
nil,
|
|
||||||
[]string{"hello", "foobar"},
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000040" + // offset to i = 0
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000080" + // offset to i = 1
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000005" + // len(str[0]) = 5
|
|
||||||
"68656c6c6f000000000000000000000000000000000000000000000000000000" + // str[0]
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000006" + // len(str[1]) = 6
|
|
||||||
"666f6f6261720000000000000000000000000000000000000000000000000000"), // str[1]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bytes32[][]",
|
|
||||||
nil,
|
|
||||||
[][]common.Hash{{{1}, {2}}, {{3}, {4}, {5}}},
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002" + // len(array) = 2
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000040" + // offset 64 to i = 0
|
|
||||||
"00000000000000000000000000000000000000000000000000000000000000a0" + // offset 160 to i = 1
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // len(array[0]) = 2
|
|
||||||
"0100000000000000000000000000000000000000000000000000000000000000" + // array[0][0]
|
|
||||||
"0200000000000000000000000000000000000000000000000000000000000000" + // array[0][1]
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000003" + // len(array[1]) = 3
|
|
||||||
"0300000000000000000000000000000000000000000000000000000000000000" + // array[1][0]
|
|
||||||
"0400000000000000000000000000000000000000000000000000000000000000" + // array[1][1]
|
|
||||||
"0500000000000000000000000000000000000000000000000000000000000000"), // array[1][2]
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
if err != nil {
|
||||||
"bytes32[][2]",
|
t.Fatalf("test %d (%v) failed: %v", i, test.def, err)
|
||||||
nil,
|
}
|
||||||
[][]common.Hash{{{1}, {2}}, {{3}, {4}, {5}}},
|
if !reflect.DeepEqual(packed[4:], encb) {
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000040" + // offset 64 to i = 0
|
t.Errorf("test %d (%v) failed: expected %v, got %v", i, test.def, encb, packed[4:])
|
||||||
"00000000000000000000000000000000000000000000000000000000000000a0" + // offset 160 to i = 1
|
}
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // len(array[0]) = 2
|
})
|
||||||
"0100000000000000000000000000000000000000000000000000000000000000" + // array[0][0]
|
|
||||||
"0200000000000000000000000000000000000000000000000000000000000000" + // array[0][1]
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000003" + // len(array[1]) = 3
|
|
||||||
"0300000000000000000000000000000000000000000000000000000000000000" + // array[1][0]
|
|
||||||
"0400000000000000000000000000000000000000000000000000000000000000" + // array[1][1]
|
|
||||||
"0500000000000000000000000000000000000000000000000000000000000000"), // array[1][2]
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"bytes32[3][2]",
|
|
||||||
nil,
|
|
||||||
[][]common.Hash{{{1}, {2}, {3}}, {{3}, {4}, {5}}},
|
|
||||||
common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000" + // array[0][0]
|
|
||||||
"0200000000000000000000000000000000000000000000000000000000000000" + // array[0][1]
|
|
||||||
"0300000000000000000000000000000000000000000000000000000000000000" + // array[0][2]
|
|
||||||
"0300000000000000000000000000000000000000000000000000000000000000" + // array[1][0]
|
|
||||||
"0400000000000000000000000000000000000000000000000000000000000000" + // array[1][1]
|
|
||||||
"0500000000000000000000000000000000000000000000000000000000000000"), // array[1][2]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// static tuple
|
|
||||||
"tuple",
|
|
||||||
[]ArgumentMarshaling{
|
|
||||||
{Name: "a", Type: "int64"},
|
|
||||||
{Name: "b", Type: "int256"},
|
|
||||||
{Name: "c", Type: "int256"},
|
|
||||||
{Name: "d", Type: "bool"},
|
|
||||||
{Name: "e", Type: "bytes32[3][2]"},
|
|
||||||
},
|
|
||||||
struct {
|
|
||||||
A int64
|
|
||||||
B *big.Int
|
|
||||||
C *big.Int
|
|
||||||
D bool
|
|
||||||
E [][]common.Hash
|
|
||||||
}{1, big.NewInt(1), big.NewInt(-1), true, [][]common.Hash{{{1}, {2}, {3}}, {{3}, {4}, {5}}}},
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000001" + // struct[a]
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // struct[b]
|
|
||||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // struct[c]
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // struct[d]
|
|
||||||
"0100000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[0][0]
|
|
||||||
"0200000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[0][1]
|
|
||||||
"0300000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[0][2]
|
|
||||||
"0300000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[1][0]
|
|
||||||
"0400000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[1][1]
|
|
||||||
"0500000000000000000000000000000000000000000000000000000000000000"), // struct[e] array[1][2]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// dynamic tuple
|
|
||||||
"tuple",
|
|
||||||
[]ArgumentMarshaling{
|
|
||||||
{Name: "a", Type: "string"},
|
|
||||||
{Name: "b", Type: "int64"},
|
|
||||||
{Name: "c", Type: "bytes"},
|
|
||||||
{Name: "d", Type: "string[]"},
|
|
||||||
{Name: "e", Type: "int256[]"},
|
|
||||||
{Name: "f", Type: "address[]"},
|
|
||||||
},
|
|
||||||
struct {
|
|
||||||
FieldA string `abi:"a"` // Test whether abi tag works
|
|
||||||
FieldB int64 `abi:"b"`
|
|
||||||
C []byte
|
|
||||||
D []string
|
|
||||||
E []*big.Int
|
|
||||||
F []common.Address
|
|
||||||
}{"foobar", 1, []byte{1}, []string{"foo", "bar"}, []*big.Int{big.NewInt(1), big.NewInt(-1)}, []common.Address{{1}, {2}}},
|
|
||||||
common.Hex2Bytes("00000000000000000000000000000000000000000000000000000000000000c0" + // struct[a] offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // struct[b]
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000100" + // struct[c] offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000140" + // struct[d] offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000220" + // struct[e] offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000280" + // struct[f] offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000006" + // struct[a] length
|
|
||||||
"666f6f6261720000000000000000000000000000000000000000000000000000" + // struct[a] "foobar"
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // struct[c] length
|
|
||||||
"0100000000000000000000000000000000000000000000000000000000000000" + // []byte{1}
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // struct[d] length
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000040" + // foo offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000080" + // bar offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000003" + // foo length
|
|
||||||
"666f6f0000000000000000000000000000000000000000000000000000000000" + // foo
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000003" + // bar offset
|
|
||||||
"6261720000000000000000000000000000000000000000000000000000000000" + // bar
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // struct[e] length
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // 1
|
|
||||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // -1
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // struct[f] length
|
|
||||||
"0000000000000000000000000100000000000000000000000000000000000000" + // common.Address{1}
|
|
||||||
"0000000000000000000000000200000000000000000000000000000000000000"), // common.Address{2}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// nested tuple
|
|
||||||
"tuple",
|
|
||||||
[]ArgumentMarshaling{
|
|
||||||
{Name: "a", Type: "tuple", Components: []ArgumentMarshaling{{Name: "a", Type: "uint256"}, {Name: "b", Type: "uint256[]"}}},
|
|
||||||
{Name: "b", Type: "int256[]"},
|
|
||||||
},
|
|
||||||
struct {
|
|
||||||
A struct {
|
|
||||||
FieldA *big.Int `abi:"a"`
|
|
||||||
B []*big.Int
|
|
||||||
}
|
|
||||||
B []*big.Int
|
|
||||||
}{
|
|
||||||
A: struct {
|
|
||||||
FieldA *big.Int `abi:"a"` // Test whether abi tag works for nested tuple
|
|
||||||
B []*big.Int
|
|
||||||
}{big.NewInt(1), []*big.Int{big.NewInt(1), big.NewInt(0)}},
|
|
||||||
B: []*big.Int{big.NewInt(1), big.NewInt(0)}},
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000040" + // a offset
|
|
||||||
"00000000000000000000000000000000000000000000000000000000000000e0" + // b offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // a.a value
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000040" + // a.b offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // a.b length
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // a.b[0] value
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000000" + // a.b[1] value
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // b length
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // b[0] value
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000000"), // b[1] value
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// tuple slice
|
|
||||||
"tuple[]",
|
|
||||||
[]ArgumentMarshaling{
|
|
||||||
{Name: "a", Type: "int256"},
|
|
||||||
{Name: "b", Type: "int256[]"},
|
|
||||||
},
|
|
||||||
[]struct {
|
|
||||||
A *big.Int
|
|
||||||
B []*big.Int
|
|
||||||
}{
|
|
||||||
{big.NewInt(-1), []*big.Int{big.NewInt(1), big.NewInt(0)}},
|
|
||||||
{big.NewInt(1), []*big.Int{big.NewInt(2), big.NewInt(-1)}},
|
|
||||||
},
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000002" + // tuple length
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000040" + // tuple[0] offset
|
|
||||||
"00000000000000000000000000000000000000000000000000000000000000e0" + // tuple[1] offset
|
|
||||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // tuple[0].A
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000040" + // tuple[0].B offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[0].B length
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[0].B[0] value
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000000" + // tuple[0].B[1] value
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[1].A
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000040" + // tuple[1].B offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[1].B length
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[1].B[0] value
|
|
||||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"), // tuple[1].B[1] value
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// static tuple array
|
|
||||||
"tuple[2]",
|
|
||||||
[]ArgumentMarshaling{
|
|
||||||
{Name: "a", Type: "int256"},
|
|
||||||
{Name: "b", Type: "int256"},
|
|
||||||
},
|
|
||||||
[2]struct {
|
|
||||||
A *big.Int
|
|
||||||
B *big.Int
|
|
||||||
}{
|
|
||||||
{big.NewInt(-1), big.NewInt(1)},
|
|
||||||
{big.NewInt(1), big.NewInt(-1)},
|
|
||||||
},
|
|
||||||
common.Hex2Bytes("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // tuple[0].a
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[0].b
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[1].a
|
|
||||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"), // tuple[1].b
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// dynamic tuple array
|
|
||||||
"tuple[2]",
|
|
||||||
[]ArgumentMarshaling{
|
|
||||||
{Name: "a", Type: "int256[]"},
|
|
||||||
},
|
|
||||||
[2]struct {
|
|
||||||
A []*big.Int
|
|
||||||
}{
|
|
||||||
{[]*big.Int{big.NewInt(-1), big.NewInt(1)}},
|
|
||||||
{[]*big.Int{big.NewInt(1), big.NewInt(-1)}},
|
|
||||||
},
|
|
||||||
common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000040" + // tuple[0] offset
|
|
||||||
"00000000000000000000000000000000000000000000000000000000000000c0" + // tuple[1] offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000020" + // tuple[0].A offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[0].A length
|
|
||||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // tuple[0].A[0]
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[0].A[1]
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000020" + // tuple[1].A offset
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[1].A length
|
|
||||||
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[1].A[0]
|
|
||||||
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"), // tuple[1].A[1]
|
|
||||||
},
|
|
||||||
} {
|
|
||||||
typ, err := NewType(test.typ, "", test.components)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("%v failed. Unexpected parse error: %v", i, err)
|
|
||||||
}
|
|
||||||
output, err := typ.pack(reflect.ValueOf(test.input))
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("%v failed. Unexpected pack error: %v", i, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !bytes.Equal(output, test.output) {
|
|
||||||
t.Errorf("input %d for typ: %v failed. Expected bytes: '%x' Got: '%x'", i, typ.String(), test.output, output)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMethodPack(t *testing.T) {
|
func TestMethodPack(t *testing.T) {
|
||||||
abi, err := JSON(strings.NewReader(jsondata2))
|
abi, err := JSON(strings.NewReader(jsondata))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
sig := abi.Methods["slice"].ID()
|
sig := abi.Methods["slice"].ID
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{1}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{1}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
||||||
|
|
||||||
@ -648,7 +76,7 @@ func TestMethodPack(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var addrA, addrB = common.Address{1}, common.Address{2}
|
var addrA, addrB = common.Address{1}, common.Address{2}
|
||||||
sig = abi.Methods["sliceAddress"].ID()
|
sig = abi.Methods["sliceAddress"].ID
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{32}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{32}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes(addrA[:], 32)...)
|
sig = append(sig, common.LeftPadBytes(addrA[:], 32)...)
|
||||||
@ -663,7 +91,7 @@ func TestMethodPack(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var addrC, addrD = common.Address{3}, common.Address{4}
|
var addrC, addrD = common.Address{3}, common.Address{4}
|
||||||
sig = abi.Methods["sliceMultiAddress"].ID()
|
sig = abi.Methods["sliceMultiAddress"].ID
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{64}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{64}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{160}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{160}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
||||||
@ -681,7 +109,7 @@ func TestMethodPack(t *testing.T) {
|
|||||||
t.Errorf("expected %x got %x", sig, packed)
|
t.Errorf("expected %x got %x", sig, packed)
|
||||||
}
|
}
|
||||||
|
|
||||||
sig = abi.Methods["slice256"].ID()
|
sig = abi.Methods["slice256"].ID
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{1}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{1}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
||||||
|
|
||||||
@ -695,7 +123,7 @@ func TestMethodPack(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
a := [2][2]*big.Int{{big.NewInt(1), big.NewInt(1)}, {big.NewInt(2), big.NewInt(0)}}
|
a := [2][2]*big.Int{{big.NewInt(1), big.NewInt(1)}, {big.NewInt(2), big.NewInt(0)}}
|
||||||
sig = abi.Methods["nestedArray"].ID()
|
sig = abi.Methods["nestedArray"].ID
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{1}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{1}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{1}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{1}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{2}, 32)...)
|
||||||
@ -712,7 +140,7 @@ func TestMethodPack(t *testing.T) {
|
|||||||
t.Errorf("expected %x got %x", sig, packed)
|
t.Errorf("expected %x got %x", sig, packed)
|
||||||
}
|
}
|
||||||
|
|
||||||
sig = abi.Methods["nestedArray2"].ID()
|
sig = abi.Methods["nestedArray2"].ID
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{0x20}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{0x20}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{0x40}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{0x40}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{0x80}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{0x80}, 32)...)
|
||||||
@ -728,7 +156,7 @@ func TestMethodPack(t *testing.T) {
|
|||||||
t.Errorf("expected %x got %x", sig, packed)
|
t.Errorf("expected %x got %x", sig, packed)
|
||||||
}
|
}
|
||||||
|
|
||||||
sig = abi.Methods["nestedSlice"].ID()
|
sig = abi.Methods["nestedSlice"].ID
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{0x20}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{0x20}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{0x02}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{0x02}, 32)...)
|
||||||
sig = append(sig, common.LeftPadBytes([]byte{0x40}, 32)...)
|
sig = append(sig, common.LeftPadBytes([]byte{0x40}, 32)...)
|
||||||
|
990
accounts/abi/packing_test.go
Normal file
990
accounts/abi/packing_test.go
Normal file
@ -0,0 +1,990 @@
|
|||||||
|
// Copyright 2020 The go-ethereum Authors
|
||||||
|
// This file is part of the go-ethereum library.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Lesser General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public License
|
||||||
|
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package abi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math/big"
|
||||||
|
|
||||||
|
"github.com/ethereum/go-ethereum/common"
|
||||||
|
)
|
||||||
|
|
||||||
|
type packUnpackTest struct {
|
||||||
|
def string
|
||||||
|
unpacked interface{}
|
||||||
|
packed string
|
||||||
|
}
|
||||||
|
|
||||||
|
var packUnpackTests = []packUnpackTest{
|
||||||
|
// Booleans
|
||||||
|
{
|
||||||
|
def: `[{ "type": "bool" }]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
unpacked: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{ "type": "bool" }]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: false,
|
||||||
|
},
|
||||||
|
// Integers
|
||||||
|
{
|
||||||
|
def: `[{ "type": "uint8" }]`,
|
||||||
|
unpacked: uint8(2),
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{ "type": "uint8[]" }]`,
|
||||||
|
unpacked: []uint8{1, 2},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{ "type": "uint16" }]`,
|
||||||
|
unpacked: uint16(2),
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{ "type": "uint16[]" }]`,
|
||||||
|
unpacked: []uint16{1, 2},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint17"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
unpacked: big.NewInt(1),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint32"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
unpacked: uint32(1),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint32[]"}]`,
|
||||||
|
unpacked: []uint32{1, 2},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint64"}]`,
|
||||||
|
unpacked: uint64(2),
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint64[]"}]`,
|
||||||
|
unpacked: []uint64{1, 2},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint256"}]`,
|
||||||
|
unpacked: big.NewInt(2),
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint256[]"}]`,
|
||||||
|
unpacked: []*big.Int{big.NewInt(1), big.NewInt(2)},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int8"}]`,
|
||||||
|
unpacked: int8(2),
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int8[]"}]`,
|
||||||
|
unpacked: []int8{1, 2},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int16"}]`,
|
||||||
|
unpacked: int16(2),
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int16[]"}]`,
|
||||||
|
unpacked: []int16{1, 2},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int17"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
unpacked: big.NewInt(1),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int32"}]`,
|
||||||
|
unpacked: int32(2),
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int32"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
unpacked: int32(1),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int32[]"}]`,
|
||||||
|
unpacked: []int32{1, 2},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int64"}]`,
|
||||||
|
unpacked: int64(2),
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int64[]"}]`,
|
||||||
|
unpacked: []int64{1, 2},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int256"}]`,
|
||||||
|
unpacked: big.NewInt(2),
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int256"}]`,
|
||||||
|
packed: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
unpacked: big.NewInt(-1),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int256[]"}]`,
|
||||||
|
unpacked: []*big.Int{big.NewInt(1), big.NewInt(2)},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
},
|
||||||
|
// Address
|
||||||
|
{
|
||||||
|
def: `[{"type": "address"}]`,
|
||||||
|
packed: "0000000000000000000000000100000000000000000000000000000000000000",
|
||||||
|
unpacked: common.Address{1},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "address[]"}]`,
|
||||||
|
unpacked: []common.Address{{1}, {2}},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000100000000000000000000000000000000000000" +
|
||||||
|
"0000000000000000000000000200000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
// Bytes
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes1"}]`,
|
||||||
|
unpacked: [1]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes2"}]`,
|
||||||
|
unpacked: [2]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes3"}]`,
|
||||||
|
unpacked: [3]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes4"}]`,
|
||||||
|
unpacked: [4]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes5"}]`,
|
||||||
|
unpacked: [5]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes6"}]`,
|
||||||
|
unpacked: [6]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes7"}]`,
|
||||||
|
unpacked: [7]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes8"}]`,
|
||||||
|
unpacked: [8]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes9"}]`,
|
||||||
|
unpacked: [9]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes10"}]`,
|
||||||
|
unpacked: [10]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes11"}]`,
|
||||||
|
unpacked: [11]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes12"}]`,
|
||||||
|
unpacked: [12]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes13"}]`,
|
||||||
|
unpacked: [13]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes14"}]`,
|
||||||
|
unpacked: [14]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes15"}]`,
|
||||||
|
unpacked: [15]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes16"}]`,
|
||||||
|
unpacked: [16]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes17"}]`,
|
||||||
|
unpacked: [17]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes18"}]`,
|
||||||
|
unpacked: [18]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes19"}]`,
|
||||||
|
unpacked: [19]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes20"}]`,
|
||||||
|
unpacked: [20]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes21"}]`,
|
||||||
|
unpacked: [21]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes22"}]`,
|
||||||
|
unpacked: [22]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes23"}]`,
|
||||||
|
unpacked: [23]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes24"}]`,
|
||||||
|
unpacked: [24]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes25"}]`,
|
||||||
|
unpacked: [25]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes26"}]`,
|
||||||
|
unpacked: [26]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes27"}]`,
|
||||||
|
unpacked: [27]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes28"}]`,
|
||||||
|
unpacked: [28]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes29"}]`,
|
||||||
|
unpacked: [29]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes30"}]`,
|
||||||
|
unpacked: [30]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes31"}]`,
|
||||||
|
unpacked: [31]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes32"}]`,
|
||||||
|
unpacked: [32]byte{1},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes32"}]`,
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: [32]byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes32"}]`,
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: [32]byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
|
||||||
|
},
|
||||||
|
// Functions
|
||||||
|
{
|
||||||
|
def: `[{"type": "function"}]`,
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: [24]byte{1},
|
||||||
|
},
|
||||||
|
// Slice and Array
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: []uint8{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: []uint8{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint256[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: []*big.Int{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [2]uint8{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int8[2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [2]int8{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int16[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: []int16{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int16[2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [2]int16{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int32[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: []int32{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int32[2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [2]int32{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int64[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: []int64{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int64[2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [2]int64{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int256[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: []*big.Int{big.NewInt(1), big.NewInt(2)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "int256[3]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003",
|
||||||
|
unpacked: [3]*big.Int{big.NewInt(1), big.NewInt(2), big.NewInt(3)},
|
||||||
|
},
|
||||||
|
// multi dimensional, if these pass, all types that don't require length prefix should pass
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[][]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: [][]uint8{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[][]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" +
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000a0" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [][]uint8{{1, 2}, {1, 2}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[][]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" +
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000a0" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003",
|
||||||
|
unpacked: [][]uint8{{1, 2}, {1, 2, 3}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[2][2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [2][2]uint8{{1, 2}, {1, 2}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[][2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000060" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000000" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: [2][]uint8{{}, {}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[][2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000080" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
unpacked: [2][]uint8{{1}, {1}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[2][]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: [][2]uint8{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[2][]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [][2]uint8{{1, 2}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint8[2][]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [][2]uint8{{1, 2}, {1, 2}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint16[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: []uint16{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint16[2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [2]uint16{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint32[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: []uint32{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint32[2][3][4]"}]`,
|
||||||
|
unpacked: [4][3][2]uint32{{{1, 2}, {3, 4}, {5, 6}}, {{7, 8}, {9, 10}, {11, 12}}, {{13, 14}, {15, 16}, {17, 18}}, {{19, 20}, {21, 22}, {23, 24}}},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000004" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000005" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000006" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000007" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000008" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000009" +
|
||||||
|
"000000000000000000000000000000000000000000000000000000000000000a" +
|
||||||
|
"000000000000000000000000000000000000000000000000000000000000000b" +
|
||||||
|
"000000000000000000000000000000000000000000000000000000000000000c" +
|
||||||
|
"000000000000000000000000000000000000000000000000000000000000000d" +
|
||||||
|
"000000000000000000000000000000000000000000000000000000000000000e" +
|
||||||
|
"000000000000000000000000000000000000000000000000000000000000000f" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000010" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000011" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000012" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000013" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000014" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000015" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000016" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000017" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000018",
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes32[]"}]`,
|
||||||
|
unpacked: [][32]byte{{1}, {2}},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0100000000000000000000000000000000000000000000000000000000000000" +
|
||||||
|
"0200000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint32[2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [2]uint32{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint64[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: []uint64{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint64[2]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: [2]uint64{1, 2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint256[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: []*big.Int{big.NewInt(1), big.NewInt(2)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint256[3]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003",
|
||||||
|
unpacked: [3]*big.Int{big.NewInt(1), big.NewInt(2), big.NewInt(3)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "string[4]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000080" +
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000c0" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000100" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000140" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000005" +
|
||||||
|
"48656c6c6f000000000000000000000000000000000000000000000000000000" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000005" +
|
||||||
|
"576f726c64000000000000000000000000000000000000000000000000000000" +
|
||||||
|
"000000000000000000000000000000000000000000000000000000000000000b" +
|
||||||
|
"476f2d657468657265756d000000000000000000000000000000000000000000" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000008" +
|
||||||
|
"457468657265756d000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: [4]string{"Hello", "World", "Go-ethereum", "Ethereum"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "string[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000080" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000008" +
|
||||||
|
"457468657265756d000000000000000000000000000000000000000000000000" +
|
||||||
|
"000000000000000000000000000000000000000000000000000000000000000b" +
|
||||||
|
"676f2d657468657265756d000000000000000000000000000000000000000000",
|
||||||
|
unpacked: []string{"Ethereum", "go-ethereum"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes[]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000080" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003" +
|
||||||
|
"f0f0f00000000000000000000000000000000000000000000000000000000000" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003" +
|
||||||
|
"f0f0f00000000000000000000000000000000000000000000000000000000000",
|
||||||
|
unpacked: [][]byte{{0xf0, 0xf0, 0xf0}, {0xf0, 0xf0, 0xf0}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "uint256[2][][]"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" +
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000e0" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000c8" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000003e8" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000c8" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000003e8",
|
||||||
|
unpacked: [][][2]*big.Int{{{big.NewInt(1), big.NewInt(200)}, {big.NewInt(1), big.NewInt(1000)}}, {{big.NewInt(1), big.NewInt(200)}, {big.NewInt(1), big.NewInt(1000)}}},
|
||||||
|
},
|
||||||
|
// struct outputs
|
||||||
|
{
|
||||||
|
def: `[{"components": [{"name":"int1","type":"int256"},{"name":"int2","type":"int256"}], "type":"tuple"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: struct {
|
||||||
|
Int1 *big.Int
|
||||||
|
Int2 *big.Int
|
||||||
|
}{big.NewInt(1), big.NewInt(2)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"components": [{"name":"int_one","type":"int256"}], "type":"tuple"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
unpacked: struct {
|
||||||
|
IntOne *big.Int
|
||||||
|
}{big.NewInt(1)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"components": [{"name":"int__one","type":"int256"}], "type":"tuple"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
unpacked: struct {
|
||||||
|
IntOne *big.Int
|
||||||
|
}{big.NewInt(1)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"components": [{"name":"int_one_","type":"int256"}], "type":"tuple"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
|
unpacked: struct {
|
||||||
|
IntOne *big.Int
|
||||||
|
}{big.NewInt(1)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"components": [{"name":"int_one","type":"int256"}, {"name":"intone","type":"int256"}], "type":"tuple"}]`,
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
unpacked: struct {
|
||||||
|
IntOne *big.Int
|
||||||
|
Intone *big.Int
|
||||||
|
}{big.NewInt(1), big.NewInt(2)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "string"}]`,
|
||||||
|
unpacked: "foobar",
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000006" +
|
||||||
|
"666f6f6261720000000000000000000000000000000000000000000000000000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "string[]"}]`,
|
||||||
|
unpacked: []string{"hello", "foobar"},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // len(array) = 2
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // offset 64 to i = 0
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000080" + // offset 128 to i = 1
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000005" + // len(str[0]) = 5
|
||||||
|
"68656c6c6f000000000000000000000000000000000000000000000000000000" + // str[0]
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000006" + // len(str[1]) = 6
|
||||||
|
"666f6f6261720000000000000000000000000000000000000000000000000000", // str[1]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "string[2]"}]`,
|
||||||
|
unpacked: [2]string{"hello", "foobar"},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // offset to i = 0
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000080" + // offset to i = 1
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000005" + // len(str[0]) = 5
|
||||||
|
"68656c6c6f000000000000000000000000000000000000000000000000000000" + // str[0]
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000006" + // len(str[1]) = 6
|
||||||
|
"666f6f6261720000000000000000000000000000000000000000000000000000", // str[1]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes32[][]"}]`,
|
||||||
|
unpacked: [][][32]byte{{{1}, {2}}, {{3}, {4}, {5}}},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // len(array) = 2
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // offset 64 to i = 0
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000a0" + // offset 160 to i = 1
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // len(array[0]) = 2
|
||||||
|
"0100000000000000000000000000000000000000000000000000000000000000" + // array[0][0]
|
||||||
|
"0200000000000000000000000000000000000000000000000000000000000000" + // array[0][1]
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003" + // len(array[1]) = 3
|
||||||
|
"0300000000000000000000000000000000000000000000000000000000000000" + // array[1][0]
|
||||||
|
"0400000000000000000000000000000000000000000000000000000000000000" + // array[1][1]
|
||||||
|
"0500000000000000000000000000000000000000000000000000000000000000", // array[1][2]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes32[][2]"}]`,
|
||||||
|
unpacked: [2][][32]byte{{{1}, {2}}, {{3}, {4}, {5}}},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // offset 64 to i = 0
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000a0" + // offset 160 to i = 1
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // len(array[0]) = 2
|
||||||
|
"0100000000000000000000000000000000000000000000000000000000000000" + // array[0][0]
|
||||||
|
"0200000000000000000000000000000000000000000000000000000000000000" + // array[0][1]
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003" + // len(array[1]) = 3
|
||||||
|
"0300000000000000000000000000000000000000000000000000000000000000" + // array[1][0]
|
||||||
|
"0400000000000000000000000000000000000000000000000000000000000000" + // array[1][1]
|
||||||
|
"0500000000000000000000000000000000000000000000000000000000000000", // array[1][2]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"type": "bytes32[3][2]"}]`,
|
||||||
|
unpacked: [2][3][32]byte{{{1}, {2}, {3}}, {{3}, {4}, {5}}},
|
||||||
|
packed: "0100000000000000000000000000000000000000000000000000000000000000" + // array[0][0]
|
||||||
|
"0200000000000000000000000000000000000000000000000000000000000000" + // array[0][1]
|
||||||
|
"0300000000000000000000000000000000000000000000000000000000000000" + // array[0][2]
|
||||||
|
"0300000000000000000000000000000000000000000000000000000000000000" + // array[1][0]
|
||||||
|
"0400000000000000000000000000000000000000000000000000000000000000" + // array[1][1]
|
||||||
|
"0500000000000000000000000000000000000000000000000000000000000000", // array[1][2]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// static tuple
|
||||||
|
def: `[{"components": [{"name":"a","type":"int64"},
|
||||||
|
{"name":"b","type":"int256"},
|
||||||
|
{"name":"c","type":"int256"},
|
||||||
|
{"name":"d","type":"bool"},
|
||||||
|
{"name":"e","type":"bytes32[3][2]"}], "type":"tuple"}]`,
|
||||||
|
unpacked: struct {
|
||||||
|
A int64
|
||||||
|
B *big.Int
|
||||||
|
C *big.Int
|
||||||
|
D bool
|
||||||
|
E [2][3][32]byte
|
||||||
|
}{1, big.NewInt(1), big.NewInt(-1), true, [2][3][32]byte{{{1}, {2}, {3}}, {{3}, {4}, {5}}}},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000001" + // struct[a]
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // struct[b]
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // struct[c]
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // struct[d]
|
||||||
|
"0100000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[0][0]
|
||||||
|
"0200000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[0][1]
|
||||||
|
"0300000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[0][2]
|
||||||
|
"0300000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[1][0]
|
||||||
|
"0400000000000000000000000000000000000000000000000000000000000000" + // struct[e] array[1][1]
|
||||||
|
"0500000000000000000000000000000000000000000000000000000000000000", // struct[e] array[1][2]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"components": [{"name":"a","type":"string"},
|
||||||
|
{"name":"b","type":"int64"},
|
||||||
|
{"name":"c","type":"bytes"},
|
||||||
|
{"name":"d","type":"string[]"},
|
||||||
|
{"name":"e","type":"int256[]"},
|
||||||
|
{"name":"f","type":"address[]"}], "type":"tuple"}]`,
|
||||||
|
unpacked: struct {
|
||||||
|
A string
|
||||||
|
B int64
|
||||||
|
C []byte
|
||||||
|
D []string
|
||||||
|
E []*big.Int
|
||||||
|
F []common.Address
|
||||||
|
}{"foobar", 1, []byte{1}, []string{"foo", "bar"}, []*big.Int{big.NewInt(1), big.NewInt(-1)}, []common.Address{{1}, {2}}},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" + // struct a
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000c0" + // struct[a] offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // struct[b]
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000100" + // struct[c] offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000140" + // struct[d] offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000220" + // struct[e] offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000280" + // struct[f] offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000006" + // struct[a] length
|
||||||
|
"666f6f6261720000000000000000000000000000000000000000000000000000" + // struct[a] "foobar"
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // struct[c] length
|
||||||
|
"0100000000000000000000000000000000000000000000000000000000000000" + // []byte{1}
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // struct[d] length
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // foo offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000080" + // bar offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003" + // foo length
|
||||||
|
"666f6f0000000000000000000000000000000000000000000000000000000000" + // foo
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003" + // bar offset
|
||||||
|
"6261720000000000000000000000000000000000000000000000000000000000" + // bar
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // struct[e] length
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // 1
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // -1
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // struct[f] length
|
||||||
|
"0000000000000000000000000100000000000000000000000000000000000000" + // common.Address{1}
|
||||||
|
"0000000000000000000000000200000000000000000000000000000000000000", // common.Address{2}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"components": [{ "type": "tuple","components": [{"name": "a","type": "uint256"},
|
||||||
|
{"name": "b","type": "uint256[]"}],
|
||||||
|
"name": "a","type": "tuple"},
|
||||||
|
{"name": "b","type": "uint256[]"}], "type": "tuple"}]`,
|
||||||
|
unpacked: struct {
|
||||||
|
A struct {
|
||||||
|
A *big.Int
|
||||||
|
B []*big.Int
|
||||||
|
}
|
||||||
|
B []*big.Int
|
||||||
|
}{
|
||||||
|
A: struct {
|
||||||
|
A *big.Int
|
||||||
|
B []*big.Int
|
||||||
|
}{big.NewInt(1), []*big.Int{big.NewInt(1), big.NewInt(2)}},
|
||||||
|
B: []*big.Int{big.NewInt(1), big.NewInt(2)}},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" + // struct a
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // a offset
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000e0" + // b offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // a.a value
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // a.b offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // a.b length
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // a.b[0] value
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // a.b[1] value
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // b length
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // b[0] value
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002", // b[1] value
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
def: `[{"components": [{"name": "a","type": "int256"},
|
||||||
|
{"name": "b","type": "int256[]"}],
|
||||||
|
"name": "a","type": "tuple[]"}]`,
|
||||||
|
unpacked: []struct {
|
||||||
|
A *big.Int
|
||||||
|
B []*big.Int
|
||||||
|
}{
|
||||||
|
{big.NewInt(-1), []*big.Int{big.NewInt(1), big.NewInt(3)}},
|
||||||
|
{big.NewInt(1), []*big.Int{big.NewInt(2), big.NewInt(-1)}},
|
||||||
|
},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple length
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // tuple[0] offset
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000e0" + // tuple[1] offset
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // tuple[0].A
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // tuple[0].B offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[0].B length
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[0].B[0] value
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000003" + // tuple[0].B[1] value
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[1].A
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // tuple[1].B offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[1].B length
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[1].B[0] value
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", // tuple[1].B[1] value
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"components": [{"name": "a","type": "int256"},
|
||||||
|
{"name": "b","type": "int256"}],
|
||||||
|
"name": "a","type": "tuple[2]"}]`,
|
||||||
|
unpacked: [2]struct {
|
||||||
|
A *big.Int
|
||||||
|
B *big.Int
|
||||||
|
}{
|
||||||
|
{big.NewInt(-1), big.NewInt(1)},
|
||||||
|
{big.NewInt(1), big.NewInt(-1)},
|
||||||
|
},
|
||||||
|
packed: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // tuple[0].a
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[0].b
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[1].a
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", // tuple[1].b
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"components": [{"name": "a","type": "int256[]"}],
|
||||||
|
"name": "a","type": "tuple[2]"}]`,
|
||||||
|
unpacked: [2]struct {
|
||||||
|
A []*big.Int
|
||||||
|
}{
|
||||||
|
{[]*big.Int{big.NewInt(-1), big.NewInt(1)}},
|
||||||
|
{[]*big.Int{big.NewInt(1), big.NewInt(-1)}},
|
||||||
|
},
|
||||||
|
packed: "0000000000000000000000000000000000000000000000000000000000000020" +
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000040" + // tuple[0] offset
|
||||||
|
"00000000000000000000000000000000000000000000000000000000000000c0" + // tuple[1] offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000020" + // tuple[0].A offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[0].A length
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + // tuple[0].A[0]
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[0].A[1]
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000020" + // tuple[1].A offset
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000002" + // tuple[1].A length
|
||||||
|
"0000000000000000000000000000000000000000000000000000000000000001" + // tuple[1].A[0]
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", // tuple[1].A[1]
|
||||||
|
},
|
||||||
|
}
|
@ -17,57 +17,74 @@
|
|||||||
package abi
|
package abi
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math/big"
|
||||||
"reflect"
|
"reflect"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ConvertType converts an interface of a runtime type into a interface of the
|
||||||
|
// given type
|
||||||
|
// e.g. turn
|
||||||
|
// var fields []reflect.StructField
|
||||||
|
// fields = append(fields, reflect.StructField{
|
||||||
|
// Name: "X",
|
||||||
|
// Type: reflect.TypeOf(new(big.Int)),
|
||||||
|
// Tag: reflect.StructTag("json:\"" + "x" + "\""),
|
||||||
|
// }
|
||||||
|
// into
|
||||||
|
// type TupleT struct { X *big.Int }
|
||||||
|
func ConvertType(in interface{}, proto interface{}) interface{} {
|
||||||
|
protoType := reflect.TypeOf(proto)
|
||||||
|
if reflect.TypeOf(in).ConvertibleTo(protoType) {
|
||||||
|
return reflect.ValueOf(in).Convert(protoType).Interface()
|
||||||
|
}
|
||||||
|
// Use set as a last ditch effort
|
||||||
|
if err := set(reflect.ValueOf(proto), reflect.ValueOf(in)); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return proto
|
||||||
|
}
|
||||||
|
|
||||||
// indirect recursively dereferences the value until it either gets the value
|
// indirect recursively dereferences the value until it either gets the value
|
||||||
// or finds a big.Int
|
// or finds a big.Int
|
||||||
func indirect(v reflect.Value) reflect.Value {
|
func indirect(v reflect.Value) reflect.Value {
|
||||||
if v.Kind() == reflect.Ptr && v.Elem().Type() != derefbigT {
|
if v.Kind() == reflect.Ptr && v.Elem().Type() != reflect.TypeOf(big.Int{}) {
|
||||||
return indirect(v.Elem())
|
return indirect(v.Elem())
|
||||||
}
|
}
|
||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
// indirectInterfaceOrPtr recursively dereferences the value until value is not interface.
|
// reflectIntType returns the reflect using the given size and
|
||||||
func indirectInterfaceOrPtr(v reflect.Value) reflect.Value {
|
|
||||||
if (v.Kind() == reflect.Interface || v.Kind() == reflect.Ptr) && v.Elem().IsValid() {
|
|
||||||
return indirect(v.Elem())
|
|
||||||
}
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
|
|
||||||
// reflectIntKind returns the reflect using the given size and
|
|
||||||
// unsignedness.
|
// unsignedness.
|
||||||
func reflectIntKindAndType(unsigned bool, size int) (reflect.Kind, reflect.Type) {
|
func reflectIntType(unsigned bool, size int) reflect.Type {
|
||||||
|
if unsigned {
|
||||||
|
switch size {
|
||||||
|
case 8:
|
||||||
|
return reflect.TypeOf(uint8(0))
|
||||||
|
case 16:
|
||||||
|
return reflect.TypeOf(uint16(0))
|
||||||
|
case 32:
|
||||||
|
return reflect.TypeOf(uint32(0))
|
||||||
|
case 64:
|
||||||
|
return reflect.TypeOf(uint64(0))
|
||||||
|
}
|
||||||
|
}
|
||||||
switch size {
|
switch size {
|
||||||
case 8:
|
case 8:
|
||||||
if unsigned {
|
return reflect.TypeOf(int8(0))
|
||||||
return reflect.Uint8, uint8T
|
|
||||||
}
|
|
||||||
return reflect.Int8, int8T
|
|
||||||
case 16:
|
case 16:
|
||||||
if unsigned {
|
return reflect.TypeOf(int16(0))
|
||||||
return reflect.Uint16, uint16T
|
|
||||||
}
|
|
||||||
return reflect.Int16, int16T
|
|
||||||
case 32:
|
case 32:
|
||||||
if unsigned {
|
return reflect.TypeOf(int32(0))
|
||||||
return reflect.Uint32, uint32T
|
|
||||||
}
|
|
||||||
return reflect.Int32, int32T
|
|
||||||
case 64:
|
case 64:
|
||||||
if unsigned {
|
return reflect.TypeOf(int64(0))
|
||||||
return reflect.Uint64, uint64T
|
|
||||||
}
|
|
||||||
return reflect.Int64, int64T
|
|
||||||
}
|
}
|
||||||
return reflect.Ptr, bigT
|
return reflect.TypeOf(&big.Int{})
|
||||||
}
|
}
|
||||||
|
|
||||||
// mustArrayToBytesSlice creates a new byte slice with the exact same size as value
|
// mustArrayToByteSlice creates a new byte slice with the exact same size as value
|
||||||
// and copies the bytes in value to the new slice.
|
// and copies the bytes in value to the new slice.
|
||||||
func mustArrayToByteSlice(value reflect.Value) reflect.Value {
|
func mustArrayToByteSlice(value reflect.Value) reflect.Value {
|
||||||
slice := reflect.MakeSlice(reflect.TypeOf([]byte{}), value.Len(), value.Len())
|
slice := reflect.MakeSlice(reflect.TypeOf([]byte{}), value.Len(), value.Len())
|
||||||
@ -84,12 +101,16 @@ func set(dst, src reflect.Value) error {
|
|||||||
switch {
|
switch {
|
||||||
case dstType.Kind() == reflect.Interface && dst.Elem().IsValid():
|
case dstType.Kind() == reflect.Interface && dst.Elem().IsValid():
|
||||||
return set(dst.Elem(), src)
|
return set(dst.Elem(), src)
|
||||||
case dstType.Kind() == reflect.Ptr && dstType.Elem() != derefbigT:
|
case dstType.Kind() == reflect.Ptr && dstType.Elem() != reflect.TypeOf(big.Int{}):
|
||||||
return set(dst.Elem(), src)
|
return set(dst.Elem(), src)
|
||||||
case srcType.AssignableTo(dstType) && dst.CanSet():
|
case srcType.AssignableTo(dstType) && dst.CanSet():
|
||||||
dst.Set(src)
|
dst.Set(src)
|
||||||
case dstType.Kind() == reflect.Slice && srcType.Kind() == reflect.Slice:
|
case dstType.Kind() == reflect.Slice && srcType.Kind() == reflect.Slice && dst.CanSet():
|
||||||
return setSlice(dst, src)
|
return setSlice(dst, src)
|
||||||
|
case dstType.Kind() == reflect.Array:
|
||||||
|
return setArray(dst, src)
|
||||||
|
case dstType.Kind() == reflect.Struct:
|
||||||
|
return setStruct(dst, src)
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("abi: cannot unmarshal %v in to %v", src.Type(), dst.Type())
|
return fmt.Errorf("abi: cannot unmarshal %v in to %v", src.Type(), dst.Type())
|
||||||
}
|
}
|
||||||
@ -98,38 +119,52 @@ func set(dst, src reflect.Value) error {
|
|||||||
|
|
||||||
// setSlice attempts to assign src to dst when slices are not assignable by default
|
// setSlice attempts to assign src to dst when slices are not assignable by default
|
||||||
// e.g. src: [][]byte -> dst: [][15]byte
|
// e.g. src: [][]byte -> dst: [][15]byte
|
||||||
|
// setSlice ignores if we cannot copy all of src' elements.
|
||||||
func setSlice(dst, src reflect.Value) error {
|
func setSlice(dst, src reflect.Value) error {
|
||||||
slice := reflect.MakeSlice(dst.Type(), src.Len(), src.Len())
|
slice := reflect.MakeSlice(dst.Type(), src.Len(), src.Len())
|
||||||
for i := 0; i < src.Len(); i++ {
|
for i := 0; i < src.Len(); i++ {
|
||||||
v := src.Index(i)
|
if err := set(slice.Index(i), src.Index(i)); err != nil {
|
||||||
reflect.Copy(slice.Index(i), v)
|
return err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
dst.Set(slice)
|
if dst.CanSet() {
|
||||||
return nil
|
dst.Set(slice)
|
||||||
}
|
return nil
|
||||||
|
}
|
||||||
// requireAssignable assures that `dest` is a pointer and it's not an interface.
|
return errors.New("Cannot set slice, destination not settable")
|
||||||
func requireAssignable(dst, src reflect.Value) error {
|
}
|
||||||
if dst.Kind() != reflect.Ptr && dst.Kind() != reflect.Interface {
|
|
||||||
return fmt.Errorf("abi: cannot unmarshal %v into %v", src.Type(), dst.Type())
|
func setArray(dst, src reflect.Value) error {
|
||||||
}
|
if src.Kind() == reflect.Ptr {
|
||||||
return nil
|
return set(dst, indirect(src))
|
||||||
}
|
}
|
||||||
|
array := reflect.New(dst.Type()).Elem()
|
||||||
// requireUnpackKind verifies preconditions for unpacking `args` into `kind`
|
min := src.Len()
|
||||||
func requireUnpackKind(v reflect.Value, t reflect.Type, k reflect.Kind,
|
if src.Len() > dst.Len() {
|
||||||
args Arguments) error {
|
min = dst.Len()
|
||||||
|
}
|
||||||
switch k {
|
for i := 0; i < min; i++ {
|
||||||
case reflect.Struct:
|
if err := set(array.Index(i), src.Index(i)); err != nil {
|
||||||
case reflect.Slice, reflect.Array:
|
return err
|
||||||
if minLen := args.LengthNonIndexed(); v.Len() < minLen {
|
}
|
||||||
return fmt.Errorf("abi: insufficient number of elements in the list/array for unpack, want %d, got %d",
|
}
|
||||||
minLen, v.Len())
|
if dst.CanSet() {
|
||||||
|
dst.Set(array)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errors.New("Cannot set array, destination not settable")
|
||||||
|
}
|
||||||
|
|
||||||
|
func setStruct(dst, src reflect.Value) error {
|
||||||
|
for i := 0; i < src.NumField(); i++ {
|
||||||
|
srcField := src.Field(i)
|
||||||
|
dstField := dst.Field(i)
|
||||||
|
if !dstField.IsValid() || !srcField.IsValid() {
|
||||||
|
return fmt.Errorf("Could not find src field: %v value: %v in destination", srcField.Type().Name(), srcField)
|
||||||
|
}
|
||||||
|
if err := set(dstField, srcField); err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
default:
|
|
||||||
return fmt.Errorf("abi: cannot unmarshal tuple into %v", t)
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -156,9 +191,8 @@ func mapArgNamesToStructFields(argNames []string, value reflect.Value) (map[stri
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// skip fields that have no abi:"" tag.
|
// skip fields that have no abi:"" tag.
|
||||||
var ok bool
|
tagName, ok := typ.Field(i).Tag.Lookup("abi")
|
||||||
var tagName string
|
if !ok {
|
||||||
if tagName, ok = typ.Field(i).Tag.Lookup("abi"); !ok {
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// check if tag is empty.
|
// check if tag is empty.
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
package abi
|
package abi
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"math/big"
|
||||||
"reflect"
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
@ -189,3 +190,72 @@ func TestReflectNameToStruct(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestConvertType(t *testing.T) {
|
||||||
|
// Test Basic Struct
|
||||||
|
type T struct {
|
||||||
|
X *big.Int
|
||||||
|
Y *big.Int
|
||||||
|
}
|
||||||
|
// Create on-the-fly structure
|
||||||
|
var fields []reflect.StructField
|
||||||
|
fields = append(fields, reflect.StructField{
|
||||||
|
Name: "X",
|
||||||
|
Type: reflect.TypeOf(new(big.Int)),
|
||||||
|
Tag: "json:\"" + "x" + "\"",
|
||||||
|
})
|
||||||
|
fields = append(fields, reflect.StructField{
|
||||||
|
Name: "Y",
|
||||||
|
Type: reflect.TypeOf(new(big.Int)),
|
||||||
|
Tag: "json:\"" + "y" + "\"",
|
||||||
|
})
|
||||||
|
val := reflect.New(reflect.StructOf(fields))
|
||||||
|
val.Elem().Field(0).Set(reflect.ValueOf(big.NewInt(1)))
|
||||||
|
val.Elem().Field(1).Set(reflect.ValueOf(big.NewInt(2)))
|
||||||
|
// ConvertType
|
||||||
|
out := *ConvertType(val.Interface(), new(T)).(*T)
|
||||||
|
if out.X.Cmp(big.NewInt(1)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out.X, big.NewInt(1))
|
||||||
|
}
|
||||||
|
if out.Y.Cmp(big.NewInt(2)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out.Y, big.NewInt(2))
|
||||||
|
}
|
||||||
|
// Slice Type
|
||||||
|
val2 := reflect.MakeSlice(reflect.SliceOf(reflect.StructOf(fields)), 2, 2)
|
||||||
|
val2.Index(0).Field(0).Set(reflect.ValueOf(big.NewInt(1)))
|
||||||
|
val2.Index(0).Field(1).Set(reflect.ValueOf(big.NewInt(2)))
|
||||||
|
val2.Index(1).Field(0).Set(reflect.ValueOf(big.NewInt(3)))
|
||||||
|
val2.Index(1).Field(1).Set(reflect.ValueOf(big.NewInt(4)))
|
||||||
|
out2 := *ConvertType(val2.Interface(), new([]T)).(*[]T)
|
||||||
|
if out2[0].X.Cmp(big.NewInt(1)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out2[0].X, big.NewInt(1))
|
||||||
|
}
|
||||||
|
if out2[0].Y.Cmp(big.NewInt(2)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out2[1].Y, big.NewInt(2))
|
||||||
|
}
|
||||||
|
if out2[1].X.Cmp(big.NewInt(3)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out2[0].X, big.NewInt(1))
|
||||||
|
}
|
||||||
|
if out2[1].Y.Cmp(big.NewInt(4)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out2[1].Y, big.NewInt(2))
|
||||||
|
}
|
||||||
|
// Array Type
|
||||||
|
val3 := reflect.New(reflect.ArrayOf(2, reflect.StructOf(fields)))
|
||||||
|
val3.Elem().Index(0).Field(0).Set(reflect.ValueOf(big.NewInt(1)))
|
||||||
|
val3.Elem().Index(0).Field(1).Set(reflect.ValueOf(big.NewInt(2)))
|
||||||
|
val3.Elem().Index(1).Field(0).Set(reflect.ValueOf(big.NewInt(3)))
|
||||||
|
val3.Elem().Index(1).Field(1).Set(reflect.ValueOf(big.NewInt(4)))
|
||||||
|
out3 := *ConvertType(val3.Interface(), new([2]T)).(*[2]T)
|
||||||
|
if out3[0].X.Cmp(big.NewInt(1)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out3[0].X, big.NewInt(1))
|
||||||
|
}
|
||||||
|
if out3[0].Y.Cmp(big.NewInt(2)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out3[1].Y, big.NewInt(2))
|
||||||
|
}
|
||||||
|
if out3[1].X.Cmp(big.NewInt(3)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out3[0].X, big.NewInt(1))
|
||||||
|
}
|
||||||
|
if out3[1].Y.Cmp(big.NewInt(4)) != 0 {
|
||||||
|
t.Errorf("ConvertType failed, got %v want %v", out3[1].Y, big.NewInt(2))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
173
accounts/abi/topics.go
Normal file
173
accounts/abi/topics.go
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
// Copyright 2018 The go-ethereum Authors
|
||||||
|
// This file is part of the go-ethereum library.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Lesser General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public License
|
||||||
|
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package abi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/binary"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"math/big"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"github.com/ethereum/go-ethereum/common"
|
||||||
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MakeTopics converts a filter query argument list into a filter topic set.
|
||||||
|
func MakeTopics(query ...[]interface{}) ([][]common.Hash, error) {
|
||||||
|
topics := make([][]common.Hash, len(query))
|
||||||
|
for i, filter := range query {
|
||||||
|
for _, rule := range filter {
|
||||||
|
var topic common.Hash
|
||||||
|
|
||||||
|
// Try to generate the topic based on simple types
|
||||||
|
switch rule := rule.(type) {
|
||||||
|
case common.Hash:
|
||||||
|
copy(topic[:], rule[:])
|
||||||
|
case common.Address:
|
||||||
|
copy(topic[common.HashLength-common.AddressLength:], rule[:])
|
||||||
|
case *big.Int:
|
||||||
|
blob := rule.Bytes()
|
||||||
|
copy(topic[common.HashLength-len(blob):], blob)
|
||||||
|
case bool:
|
||||||
|
if rule {
|
||||||
|
topic[common.HashLength-1] = 1
|
||||||
|
}
|
||||||
|
case int8:
|
||||||
|
copy(topic[:], genIntType(int64(rule), 1))
|
||||||
|
case int16:
|
||||||
|
copy(topic[:], genIntType(int64(rule), 2))
|
||||||
|
case int32:
|
||||||
|
copy(topic[:], genIntType(int64(rule), 4))
|
||||||
|
case int64:
|
||||||
|
copy(topic[:], genIntType(rule, 8))
|
||||||
|
case uint8:
|
||||||
|
blob := new(big.Int).SetUint64(uint64(rule)).Bytes()
|
||||||
|
copy(topic[common.HashLength-len(blob):], blob)
|
||||||
|
case uint16:
|
||||||
|
blob := new(big.Int).SetUint64(uint64(rule)).Bytes()
|
||||||
|
copy(topic[common.HashLength-len(blob):], blob)
|
||||||
|
case uint32:
|
||||||
|
blob := new(big.Int).SetUint64(uint64(rule)).Bytes()
|
||||||
|
copy(topic[common.HashLength-len(blob):], blob)
|
||||||
|
case uint64:
|
||||||
|
blob := new(big.Int).SetUint64(rule).Bytes()
|
||||||
|
copy(topic[common.HashLength-len(blob):], blob)
|
||||||
|
case string:
|
||||||
|
hash := crypto.Keccak256Hash([]byte(rule))
|
||||||
|
copy(topic[:], hash[:])
|
||||||
|
case []byte:
|
||||||
|
hash := crypto.Keccak256Hash(rule)
|
||||||
|
copy(topic[:], hash[:])
|
||||||
|
|
||||||
|
default:
|
||||||
|
// todo(rjl493456442) according solidity documentation, indexed event
|
||||||
|
// parameters that are not value types i.e. arrays and structs are not
|
||||||
|
// stored directly but instead a keccak256-hash of an encoding is stored.
|
||||||
|
//
|
||||||
|
// We only convert stringS and bytes to hash, still need to deal with
|
||||||
|
// array(both fixed-size and dynamic-size) and struct.
|
||||||
|
|
||||||
|
// Attempt to generate the topic from funky types
|
||||||
|
val := reflect.ValueOf(rule)
|
||||||
|
switch {
|
||||||
|
// static byte array
|
||||||
|
case val.Kind() == reflect.Array && reflect.TypeOf(rule).Elem().Kind() == reflect.Uint8:
|
||||||
|
reflect.Copy(reflect.ValueOf(topic[:val.Len()]), val)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported indexed type: %T", rule)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
topics[i] = append(topics[i], topic)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return topics, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func genIntType(rule int64, size uint) []byte {
|
||||||
|
var topic [common.HashLength]byte
|
||||||
|
if rule < 0 {
|
||||||
|
// if a rule is negative, we need to put it into two's complement.
|
||||||
|
// extended to common.HashLength bytes.
|
||||||
|
topic = [common.HashLength]byte{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}
|
||||||
|
}
|
||||||
|
for i := uint(0); i < size; i++ {
|
||||||
|
topic[common.HashLength-i-1] = byte(rule >> (i * 8))
|
||||||
|
}
|
||||||
|
return topic[:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseTopics converts the indexed topic fields into actual log field values.
|
||||||
|
func ParseTopics(out interface{}, fields Arguments, topics []common.Hash) error {
|
||||||
|
return parseTopicWithSetter(fields, topics,
|
||||||
|
func(arg Argument, reconstr interface{}) {
|
||||||
|
field := reflect.ValueOf(out).Elem().FieldByName(ToCamelCase(arg.Name))
|
||||||
|
field.Set(reflect.ValueOf(reconstr))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseTopicsIntoMap converts the indexed topic field-value pairs into map key-value pairs.
|
||||||
|
func ParseTopicsIntoMap(out map[string]interface{}, fields Arguments, topics []common.Hash) error {
|
||||||
|
return parseTopicWithSetter(fields, topics,
|
||||||
|
func(arg Argument, reconstr interface{}) {
|
||||||
|
out[arg.Name] = reconstr
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTopicWithSetter converts the indexed topic field-value pairs and stores them using the
|
||||||
|
// provided set function.
|
||||||
|
//
|
||||||
|
// Note, dynamic types cannot be reconstructed since they get mapped to Keccak256
|
||||||
|
// hashes as the topic value!
|
||||||
|
func parseTopicWithSetter(fields Arguments, topics []common.Hash, setter func(Argument, interface{})) error {
|
||||||
|
// Sanity check that the fields and topics match up
|
||||||
|
if len(fields) != len(topics) {
|
||||||
|
return errors.New("topic/field count mismatch")
|
||||||
|
}
|
||||||
|
// Iterate over all the fields and reconstruct them from topics
|
||||||
|
for i, arg := range fields {
|
||||||
|
if !arg.Indexed {
|
||||||
|
return errors.New("non-indexed field in topic reconstruction")
|
||||||
|
}
|
||||||
|
var reconstr interface{}
|
||||||
|
switch arg.Type.T {
|
||||||
|
case TupleTy:
|
||||||
|
return errors.New("tuple type in topic reconstruction")
|
||||||
|
case StringTy, BytesTy, SliceTy, ArrayTy:
|
||||||
|
// Array types (including strings and bytes) have their keccak256 hashes stored in the topic- not a hash
|
||||||
|
// whose bytes can be decoded to the actual value- so the best we can do is retrieve that hash
|
||||||
|
reconstr = topics[i]
|
||||||
|
case FunctionTy:
|
||||||
|
if garbage := binary.BigEndian.Uint64(topics[i][0:8]); garbage != 0 {
|
||||||
|
return fmt.Errorf("bind: got improperly encoded function type, got %v", topics[i].Bytes())
|
||||||
|
}
|
||||||
|
var tmp [24]byte
|
||||||
|
copy(tmp[:], topics[i][8:32])
|
||||||
|
reconstr = tmp
|
||||||
|
default:
|
||||||
|
var err error
|
||||||
|
reconstr, err = toGoType(0, arg.Type, topics[i].Bytes())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Use the setter function to store the value
|
||||||
|
setter(arg, reconstr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
381
accounts/abi/topics_test.go
Normal file
381
accounts/abi/topics_test.go
Normal file
@ -0,0 +1,381 @@
|
|||||||
|
// Copyright 2019 The go-ethereum Authors
|
||||||
|
// This file is part of the go-ethereum library.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Lesser General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public License
|
||||||
|
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package abi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math/big"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/ethereum/go-ethereum/common"
|
||||||
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMakeTopics(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
query [][]interface{}
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want [][]common.Hash
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"support fixed byte types, right padded to 32 bytes",
|
||||||
|
args{[][]interface{}{{[5]byte{1, 2, 3, 4, 5}}}},
|
||||||
|
[][]common.Hash{{common.Hash{1, 2, 3, 4, 5}}},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"support common hash types in topics",
|
||||||
|
args{[][]interface{}{{common.Hash{1, 2, 3, 4, 5}}}},
|
||||||
|
[][]common.Hash{{common.Hash{1, 2, 3, 4, 5}}},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"support address types in topics",
|
||||||
|
args{[][]interface{}{{common.Address{1, 2, 3, 4, 5}}}},
|
||||||
|
[][]common.Hash{{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5}}},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"support *big.Int types in topics",
|
||||||
|
args{[][]interface{}{{big.NewInt(1).Lsh(big.NewInt(2), 254)}}},
|
||||||
|
[][]common.Hash{{common.Hash{128}}},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"support boolean types in topics",
|
||||||
|
args{[][]interface{}{
|
||||||
|
{true},
|
||||||
|
{false},
|
||||||
|
}},
|
||||||
|
[][]common.Hash{
|
||||||
|
{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}},
|
||||||
|
{common.Hash{0}},
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"support int/uint(8/16/32/64) types in topics",
|
||||||
|
args{[][]interface{}{
|
||||||
|
{int8(-2)},
|
||||||
|
{int16(-3)},
|
||||||
|
{int32(-4)},
|
||||||
|
{int64(-5)},
|
||||||
|
{int8(1)},
|
||||||
|
{int16(256)},
|
||||||
|
{int32(65536)},
|
||||||
|
{int64(4294967296)},
|
||||||
|
{uint8(1)},
|
||||||
|
{uint16(256)},
|
||||||
|
{uint32(65536)},
|
||||||
|
{uint64(4294967296)},
|
||||||
|
}},
|
||||||
|
[][]common.Hash{
|
||||||
|
{common.Hash{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254}},
|
||||||
|
{common.Hash{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253}},
|
||||||
|
{common.Hash{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252}},
|
||||||
|
{common.Hash{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251}},
|
||||||
|
{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}},
|
||||||
|
{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0}},
|
||||||
|
{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0}},
|
||||||
|
{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0}},
|
||||||
|
{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}},
|
||||||
|
{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0}},
|
||||||
|
{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0}},
|
||||||
|
{common.Hash{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0}},
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"support string types in topics",
|
||||||
|
args{[][]interface{}{{"hello world"}}},
|
||||||
|
[][]common.Hash{{crypto.Keccak256Hash([]byte("hello world"))}},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"support byte slice types in topics",
|
||||||
|
args{[][]interface{}{{[]byte{1, 2, 3}}}},
|
||||||
|
[][]common.Hash{{crypto.Keccak256Hash([]byte{1, 2, 3})}},
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got, err := MakeTopics(tt.args.query...)
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("makeTopics() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
|
t.Errorf("makeTopics() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type args struct {
|
||||||
|
createObj func() interface{}
|
||||||
|
resultObj func() interface{}
|
||||||
|
resultMap func() map[string]interface{}
|
||||||
|
fields Arguments
|
||||||
|
topics []common.Hash
|
||||||
|
}
|
||||||
|
|
||||||
|
type bytesStruct struct {
|
||||||
|
StaticBytes [5]byte
|
||||||
|
}
|
||||||
|
type int8Struct struct {
|
||||||
|
Int8Value int8
|
||||||
|
}
|
||||||
|
type int256Struct struct {
|
||||||
|
Int256Value *big.Int
|
||||||
|
}
|
||||||
|
|
||||||
|
type hashStruct struct {
|
||||||
|
HashValue common.Hash
|
||||||
|
}
|
||||||
|
|
||||||
|
type funcStruct struct {
|
||||||
|
FuncValue [24]byte
|
||||||
|
}
|
||||||
|
|
||||||
|
type topicTest struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
wantErr bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupTopicsTests() []topicTest {
|
||||||
|
bytesType, _ := NewType("bytes5", "", nil)
|
||||||
|
int8Type, _ := NewType("int8", "", nil)
|
||||||
|
int256Type, _ := NewType("int256", "", nil)
|
||||||
|
tupleType, _ := NewType("tuple(int256,int8)", "", nil)
|
||||||
|
stringType, _ := NewType("string", "", nil)
|
||||||
|
funcType, _ := NewType("function", "", nil)
|
||||||
|
|
||||||
|
tests := []topicTest{
|
||||||
|
{
|
||||||
|
name: "support fixed byte types, right padded to 32 bytes",
|
||||||
|
args: args{
|
||||||
|
createObj: func() interface{} { return &bytesStruct{} },
|
||||||
|
resultObj: func() interface{} { return &bytesStruct{StaticBytes: [5]byte{1, 2, 3, 4, 5}} },
|
||||||
|
resultMap: func() map[string]interface{} {
|
||||||
|
return map[string]interface{}{"staticBytes": [5]byte{1, 2, 3, 4, 5}}
|
||||||
|
},
|
||||||
|
fields: Arguments{Argument{
|
||||||
|
Name: "staticBytes",
|
||||||
|
Type: bytesType,
|
||||||
|
Indexed: true,
|
||||||
|
}},
|
||||||
|
topics: []common.Hash{
|
||||||
|
{1, 2, 3, 4, 5},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "int8 with negative value",
|
||||||
|
args: args{
|
||||||
|
createObj: func() interface{} { return &int8Struct{} },
|
||||||
|
resultObj: func() interface{} { return &int8Struct{Int8Value: -1} },
|
||||||
|
resultMap: func() map[string]interface{} {
|
||||||
|
return map[string]interface{}{"int8Value": int8(-1)}
|
||||||
|
},
|
||||||
|
fields: Arguments{Argument{
|
||||||
|
Name: "int8Value",
|
||||||
|
Type: int8Type,
|
||||||
|
Indexed: true,
|
||||||
|
}},
|
||||||
|
topics: []common.Hash{
|
||||||
|
{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
|
||||||
|
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "int256 with negative value",
|
||||||
|
args: args{
|
||||||
|
createObj: func() interface{} { return &int256Struct{} },
|
||||||
|
resultObj: func() interface{} { return &int256Struct{Int256Value: big.NewInt(-1)} },
|
||||||
|
resultMap: func() map[string]interface{} {
|
||||||
|
return map[string]interface{}{"int256Value": big.NewInt(-1)}
|
||||||
|
},
|
||||||
|
fields: Arguments{Argument{
|
||||||
|
Name: "int256Value",
|
||||||
|
Type: int256Type,
|
||||||
|
Indexed: true,
|
||||||
|
}},
|
||||||
|
topics: []common.Hash{
|
||||||
|
{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
|
||||||
|
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "hash type",
|
||||||
|
args: args{
|
||||||
|
createObj: func() interface{} { return &hashStruct{} },
|
||||||
|
resultObj: func() interface{} { return &hashStruct{crypto.Keccak256Hash([]byte("stringtopic"))} },
|
||||||
|
resultMap: func() map[string]interface{} {
|
||||||
|
return map[string]interface{}{"hashValue": crypto.Keccak256Hash([]byte("stringtopic"))}
|
||||||
|
},
|
||||||
|
fields: Arguments{Argument{
|
||||||
|
Name: "hashValue",
|
||||||
|
Type: stringType,
|
||||||
|
Indexed: true,
|
||||||
|
}},
|
||||||
|
topics: []common.Hash{
|
||||||
|
crypto.Keccak256Hash([]byte("stringtopic")),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "function type",
|
||||||
|
args: args{
|
||||||
|
createObj: func() interface{} { return &funcStruct{} },
|
||||||
|
resultObj: func() interface{} {
|
||||||
|
return &funcStruct{[24]byte{255, 255, 255, 255, 255, 255, 255, 255,
|
||||||
|
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}}
|
||||||
|
},
|
||||||
|
resultMap: func() map[string]interface{} {
|
||||||
|
return map[string]interface{}{"funcValue": [24]byte{255, 255, 255, 255, 255, 255, 255, 255,
|
||||||
|
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}}
|
||||||
|
},
|
||||||
|
fields: Arguments{Argument{
|
||||||
|
Name: "funcValue",
|
||||||
|
Type: funcType,
|
||||||
|
Indexed: true,
|
||||||
|
}},
|
||||||
|
topics: []common.Hash{
|
||||||
|
{0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255,
|
||||||
|
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error on topic/field count mismatch",
|
||||||
|
args: args{
|
||||||
|
createObj: func() interface{} { return nil },
|
||||||
|
resultObj: func() interface{} { return nil },
|
||||||
|
resultMap: func() map[string]interface{} { return make(map[string]interface{}) },
|
||||||
|
fields: Arguments{Argument{
|
||||||
|
Name: "tupletype",
|
||||||
|
Type: tupleType,
|
||||||
|
Indexed: true,
|
||||||
|
}},
|
||||||
|
topics: []common.Hash{},
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error on unindexed arguments",
|
||||||
|
args: args{
|
||||||
|
createObj: func() interface{} { return &int256Struct{} },
|
||||||
|
resultObj: func() interface{} { return &int256Struct{} },
|
||||||
|
resultMap: func() map[string]interface{} { return make(map[string]interface{}) },
|
||||||
|
fields: Arguments{Argument{
|
||||||
|
Name: "int256Value",
|
||||||
|
Type: int256Type,
|
||||||
|
Indexed: false,
|
||||||
|
}},
|
||||||
|
topics: []common.Hash{
|
||||||
|
{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
|
||||||
|
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error on tuple in topic reconstruction",
|
||||||
|
args: args{
|
||||||
|
createObj: func() interface{} { return &tupleType },
|
||||||
|
resultObj: func() interface{} { return &tupleType },
|
||||||
|
resultMap: func() map[string]interface{} { return make(map[string]interface{}) },
|
||||||
|
fields: Arguments{Argument{
|
||||||
|
Name: "tupletype",
|
||||||
|
Type: tupleType,
|
||||||
|
Indexed: true,
|
||||||
|
}},
|
||||||
|
topics: []common.Hash{{0}},
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error on improper encoded function",
|
||||||
|
args: args{
|
||||||
|
createObj: func() interface{} { return &funcStruct{} },
|
||||||
|
resultObj: func() interface{} { return &funcStruct{} },
|
||||||
|
resultMap: func() map[string]interface{} {
|
||||||
|
return make(map[string]interface{})
|
||||||
|
},
|
||||||
|
fields: Arguments{Argument{
|
||||||
|
Name: "funcValue",
|
||||||
|
Type: funcType,
|
||||||
|
Indexed: true,
|
||||||
|
}},
|
||||||
|
topics: []common.Hash{
|
||||||
|
{0, 0, 0, 0, 0, 0, 0, 128, 255, 255, 255, 255, 255, 255, 255, 255,
|
||||||
|
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return tests
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseTopics(t *testing.T) {
|
||||||
|
tests := setupTopicsTests()
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
createObj := tt.args.createObj()
|
||||||
|
if err := ParseTopics(createObj, tt.args.fields, tt.args.topics); (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("parseTopics() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
}
|
||||||
|
resultObj := tt.args.resultObj()
|
||||||
|
if !reflect.DeepEqual(createObj, resultObj) {
|
||||||
|
t.Errorf("parseTopics() = %v, want %v", createObj, resultObj)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseTopicsIntoMap(t *testing.T) {
|
||||||
|
tests := setupTopicsTests()
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
outMap := make(map[string]interface{})
|
||||||
|
if err := ParseTopicsIntoMap(outMap, tt.args.fields, tt.args.topics); (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("parseTopicsIntoMap() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
}
|
||||||
|
resultMap := tt.args.resultMap()
|
||||||
|
if !reflect.DeepEqual(outMap, resultMap) {
|
||||||
|
t.Errorf("parseTopicsIntoMap() = %v, want %v", outMap, resultMap)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -23,6 +23,8 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/ethereum/go-ethereum/common"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Type enumerator
|
// Type enumerator
|
||||||
@ -42,20 +44,19 @@ const (
|
|||||||
FunctionTy
|
FunctionTy
|
||||||
)
|
)
|
||||||
|
|
||||||
// Type is the reflection of the supported argument type
|
// Type is the reflection of the supported argument type.
|
||||||
type Type struct {
|
type Type struct {
|
||||||
Elem *Type
|
Elem *Type
|
||||||
Kind reflect.Kind
|
|
||||||
Type reflect.Type
|
|
||||||
Size int
|
Size int
|
||||||
T byte // Our own type checking
|
T byte // Our own type checking
|
||||||
|
|
||||||
stringKind string // holds the unparsed string for deriving signatures
|
stringKind string // holds the unparsed string for deriving signatures
|
||||||
|
|
||||||
// Tuple relative fields
|
// Tuple relative fields
|
||||||
TupleRawName string // Raw struct name defined in source code, may be empty.
|
TupleRawName string // Raw struct name defined in source code, may be empty.
|
||||||
TupleElems []*Type // Type information of all tuple fields
|
TupleElems []*Type // Type information of all tuple fields
|
||||||
TupleRawNames []string // Raw field name of all tuple fields
|
TupleRawNames []string // Raw field name of all tuple fields
|
||||||
|
TupleType reflect.Type // Underlying struct of the tuple
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -94,20 +95,16 @@ func NewType(t string, internalType string, components []ArgumentMarshaling) (ty
|
|||||||
if len(intz) == 0 {
|
if len(intz) == 0 {
|
||||||
// is a slice
|
// is a slice
|
||||||
typ.T = SliceTy
|
typ.T = SliceTy
|
||||||
typ.Kind = reflect.Slice
|
|
||||||
typ.Elem = &embeddedType
|
typ.Elem = &embeddedType
|
||||||
typ.Type = reflect.SliceOf(embeddedType.Type)
|
|
||||||
typ.stringKind = embeddedType.stringKind + sliced
|
typ.stringKind = embeddedType.stringKind + sliced
|
||||||
} else if len(intz) == 1 {
|
} else if len(intz) == 1 {
|
||||||
// is a array
|
// is an array
|
||||||
typ.T = ArrayTy
|
typ.T = ArrayTy
|
||||||
typ.Kind = reflect.Array
|
|
||||||
typ.Elem = &embeddedType
|
typ.Elem = &embeddedType
|
||||||
typ.Size, err = strconv.Atoi(intz[0])
|
typ.Size, err = strconv.Atoi(intz[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Type{}, fmt.Errorf("abi: error parsing variable size: %v", err)
|
return Type{}, fmt.Errorf("abi: error parsing variable size: %v", err)
|
||||||
}
|
}
|
||||||
typ.Type = reflect.ArrayOf(typ.Size, embeddedType.Type)
|
|
||||||
typ.stringKind = embeddedType.stringKind + sliced
|
typ.stringKind = embeddedType.stringKind + sliced
|
||||||
} else {
|
} else {
|
||||||
return Type{}, fmt.Errorf("invalid formatting of array type")
|
return Type{}, fmt.Errorf("invalid formatting of array type")
|
||||||
@ -139,36 +136,24 @@ func NewType(t string, internalType string, components []ArgumentMarshaling) (ty
|
|||||||
// varType is the parsed abi type
|
// varType is the parsed abi type
|
||||||
switch varType := parsedType[1]; varType {
|
switch varType := parsedType[1]; varType {
|
||||||
case "int":
|
case "int":
|
||||||
typ.Kind, typ.Type = reflectIntKindAndType(false, varSize)
|
|
||||||
typ.Size = varSize
|
typ.Size = varSize
|
||||||
typ.T = IntTy
|
typ.T = IntTy
|
||||||
case "uint":
|
case "uint":
|
||||||
typ.Kind, typ.Type = reflectIntKindAndType(true, varSize)
|
|
||||||
typ.Size = varSize
|
typ.Size = varSize
|
||||||
typ.T = UintTy
|
typ.T = UintTy
|
||||||
case "bool":
|
case "bool":
|
||||||
typ.Kind = reflect.Bool
|
|
||||||
typ.T = BoolTy
|
typ.T = BoolTy
|
||||||
typ.Type = reflect.TypeOf(bool(false))
|
|
||||||
case "address":
|
case "address":
|
||||||
typ.Kind = reflect.Array
|
|
||||||
typ.Type = addressT
|
|
||||||
typ.Size = 20
|
typ.Size = 20
|
||||||
typ.T = AddressTy
|
typ.T = AddressTy
|
||||||
case "string":
|
case "string":
|
||||||
typ.Kind = reflect.String
|
|
||||||
typ.Type = reflect.TypeOf("")
|
|
||||||
typ.T = StringTy
|
typ.T = StringTy
|
||||||
case "bytes":
|
case "bytes":
|
||||||
if varSize == 0 {
|
if varSize == 0 {
|
||||||
typ.T = BytesTy
|
typ.T = BytesTy
|
||||||
typ.Kind = reflect.Slice
|
|
||||||
typ.Type = reflect.SliceOf(reflect.TypeOf(byte(0)))
|
|
||||||
} else {
|
} else {
|
||||||
typ.T = FixedBytesTy
|
typ.T = FixedBytesTy
|
||||||
typ.Kind = reflect.Array
|
|
||||||
typ.Size = varSize
|
typ.Size = varSize
|
||||||
typ.Type = reflect.ArrayOf(varSize, reflect.TypeOf(byte(0)))
|
|
||||||
}
|
}
|
||||||
case "tuple":
|
case "tuple":
|
||||||
var (
|
var (
|
||||||
@ -178,17 +163,20 @@ func NewType(t string, internalType string, components []ArgumentMarshaling) (ty
|
|||||||
expression string // canonical parameter expression
|
expression string // canonical parameter expression
|
||||||
)
|
)
|
||||||
expression += "("
|
expression += "("
|
||||||
|
overloadedNames := make(map[string]string)
|
||||||
for idx, c := range components {
|
for idx, c := range components {
|
||||||
cType, err := NewType(c.Type, c.InternalType, c.Components)
|
cType, err := NewType(c.Type, c.InternalType, c.Components)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Type{}, err
|
return Type{}, err
|
||||||
}
|
}
|
||||||
if ToCamelCase(c.Name) == "" {
|
fieldName, err := overloadedArgName(c.Name, overloadedNames)
|
||||||
return Type{}, errors.New("abi: purely anonymous or underscored field is not supported")
|
if err != nil {
|
||||||
|
return Type{}, err
|
||||||
}
|
}
|
||||||
|
overloadedNames[fieldName] = fieldName
|
||||||
fields = append(fields, reflect.StructField{
|
fields = append(fields, reflect.StructField{
|
||||||
Name: ToCamelCase(c.Name), // reflect.StructOf will panic for any exported field.
|
Name: fieldName, // reflect.StructOf will panic for any exported field.
|
||||||
Type: cType.Type,
|
Type: cType.GetType(),
|
||||||
Tag: reflect.StructTag("json:\"" + c.Name + "\""),
|
Tag: reflect.StructTag("json:\"" + c.Name + "\""),
|
||||||
})
|
})
|
||||||
elems = append(elems, &cType)
|
elems = append(elems, &cType)
|
||||||
@ -199,8 +187,8 @@ func NewType(t string, internalType string, components []ArgumentMarshaling) (ty
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
expression += ")"
|
expression += ")"
|
||||||
typ.Kind = reflect.Struct
|
|
||||||
typ.Type = reflect.StructOf(fields)
|
typ.TupleType = reflect.StructOf(fields)
|
||||||
typ.TupleElems = elems
|
typ.TupleElems = elems
|
||||||
typ.TupleRawNames = names
|
typ.TupleRawNames = names
|
||||||
typ.T = TupleTy
|
typ.T = TupleTy
|
||||||
@ -217,10 +205,8 @@ func NewType(t string, internalType string, components []ArgumentMarshaling) (ty
|
|||||||
}
|
}
|
||||||
|
|
||||||
case "function":
|
case "function":
|
||||||
typ.Kind = reflect.Array
|
|
||||||
typ.T = FunctionTy
|
typ.T = FunctionTy
|
||||||
typ.Size = 24
|
typ.Size = 24
|
||||||
typ.Type = reflect.ArrayOf(24, reflect.TypeOf(byte(0)))
|
|
||||||
default:
|
default:
|
||||||
return Type{}, fmt.Errorf("unsupported arg type: %s", t)
|
return Type{}, fmt.Errorf("unsupported arg type: %s", t)
|
||||||
}
|
}
|
||||||
@ -228,7 +214,57 @@ func NewType(t string, internalType string, components []ArgumentMarshaling) (ty
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// String implements Stringer
|
// GetType returns the reflection type of the ABI type.
|
||||||
|
func (t Type) GetType() reflect.Type {
|
||||||
|
switch t.T {
|
||||||
|
case IntTy:
|
||||||
|
return reflectIntType(false, t.Size)
|
||||||
|
case UintTy:
|
||||||
|
return reflectIntType(true, t.Size)
|
||||||
|
case BoolTy:
|
||||||
|
return reflect.TypeOf(false)
|
||||||
|
case StringTy:
|
||||||
|
return reflect.TypeOf("")
|
||||||
|
case SliceTy:
|
||||||
|
return reflect.SliceOf(t.Elem.GetType())
|
||||||
|
case ArrayTy:
|
||||||
|
return reflect.ArrayOf(t.Size, t.Elem.GetType())
|
||||||
|
case TupleTy:
|
||||||
|
return t.TupleType
|
||||||
|
case AddressTy:
|
||||||
|
return reflect.TypeOf(common.Address{})
|
||||||
|
case FixedBytesTy:
|
||||||
|
return reflect.ArrayOf(t.Size, reflect.TypeOf(byte(0)))
|
||||||
|
case BytesTy:
|
||||||
|
return reflect.SliceOf(reflect.TypeOf(byte(0)))
|
||||||
|
case HashTy:
|
||||||
|
// hashtype currently not used
|
||||||
|
return reflect.ArrayOf(32, reflect.TypeOf(byte(0)))
|
||||||
|
case FixedPointTy:
|
||||||
|
// fixedpoint type currently not used
|
||||||
|
return reflect.ArrayOf(32, reflect.TypeOf(byte(0)))
|
||||||
|
case FunctionTy:
|
||||||
|
return reflect.ArrayOf(24, reflect.TypeOf(byte(0)))
|
||||||
|
default:
|
||||||
|
panic("Invalid type")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func overloadedArgName(rawName string, names map[string]string) (string, error) {
|
||||||
|
fieldName := ToCamelCase(rawName)
|
||||||
|
if fieldName == "" {
|
||||||
|
return "", errors.New("abi: purely anonymous or underscored field is not supported")
|
||||||
|
}
|
||||||
|
// Handle overloaded fieldNames
|
||||||
|
_, ok := names[fieldName]
|
||||||
|
for idx := 0; ok; idx++ {
|
||||||
|
fieldName = fmt.Sprintf("%s%d", ToCamelCase(rawName), idx)
|
||||||
|
_, ok = names[fieldName]
|
||||||
|
}
|
||||||
|
return fieldName, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// String implements Stringer.
|
||||||
func (t Type) String() (out string) {
|
func (t Type) String() (out string) {
|
||||||
return t.stringKind
|
return t.stringKind
|
||||||
}
|
}
|
||||||
@ -310,7 +346,7 @@ func (t Type) pack(v reflect.Value) ([]byte, error) {
|
|||||||
return append(ret, tail...), nil
|
return append(ret, tail...), nil
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return packElement(t, v), nil
|
return packElement(t, v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -350,7 +386,7 @@ func isDynamicType(t Type) bool {
|
|||||||
func getTypeSize(t Type) int {
|
func getTypeSize(t Type) int {
|
||||||
if t.T == ArrayTy && !isDynamicType(*t.Elem) {
|
if t.T == ArrayTy && !isDynamicType(*t.Elem) {
|
||||||
// Recursively calculate type size if it is a nested array
|
// Recursively calculate type size if it is a nested array
|
||||||
if t.Elem.T == ArrayTy {
|
if t.Elem.T == ArrayTy || t.Elem.T == TupleTy {
|
||||||
return t.Size * getTypeSize(*t.Elem)
|
return t.Size * getTypeSize(*t.Elem)
|
||||||
}
|
}
|
||||||
return t.Size * 32
|
return t.Size * 32
|
||||||
|
@ -36,58 +36,58 @@ func TestTypeRegexp(t *testing.T) {
|
|||||||
components []ArgumentMarshaling
|
components []ArgumentMarshaling
|
||||||
kind Type
|
kind Type
|
||||||
}{
|
}{
|
||||||
{"bool", nil, Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}},
|
{"bool", nil, Type{T: BoolTy, stringKind: "bool"}},
|
||||||
{"bool[]", nil, Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([]bool(nil)), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[]"}},
|
{"bool[]", nil, Type{T: SliceTy, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[]"}},
|
||||||
{"bool[2]", nil, Type{Size: 2, Kind: reflect.Array, T: ArrayTy, Type: reflect.TypeOf([2]bool{}), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[2]"}},
|
{"bool[2]", nil, Type{Size: 2, T: ArrayTy, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[2]"}},
|
||||||
{"bool[2][]", nil, Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([][2]bool{}), Elem: &Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]bool{}), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[2]"}, stringKind: "bool[2][]"}},
|
{"bool[2][]", nil, Type{T: SliceTy, Elem: &Type{T: ArrayTy, Size: 2, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[2]"}, stringKind: "bool[2][]"}},
|
||||||
{"bool[][]", nil, Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([][]bool{}), Elem: &Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([]bool{}), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[]"}, stringKind: "bool[][]"}},
|
{"bool[][]", nil, Type{T: SliceTy, Elem: &Type{T: SliceTy, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[]"}, stringKind: "bool[][]"}},
|
||||||
{"bool[][2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2][]bool{}), Elem: &Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([]bool{}), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[]"}, stringKind: "bool[][2]"}},
|
{"bool[][2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{T: SliceTy, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[]"}, stringKind: "bool[][2]"}},
|
||||||
{"bool[2][2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2][2]bool{}), Elem: &Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]bool{}), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[2]"}, stringKind: "bool[2][2]"}},
|
{"bool[2][2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{T: ArrayTy, Size: 2, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[2]"}, stringKind: "bool[2][2]"}},
|
||||||
{"bool[2][][2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2][][2]bool{}), Elem: &Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([][2]bool{}), Elem: &Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]bool{}), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[2]"}, stringKind: "bool[2][]"}, stringKind: "bool[2][][2]"}},
|
{"bool[2][][2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{T: SliceTy, Elem: &Type{T: ArrayTy, Size: 2, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[2]"}, stringKind: "bool[2][]"}, stringKind: "bool[2][][2]"}},
|
||||||
{"bool[2][2][2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2][2][2]bool{}), Elem: &Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2][2]bool{}), Elem: &Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]bool{}), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[2]"}, stringKind: "bool[2][2]"}, stringKind: "bool[2][2][2]"}},
|
{"bool[2][2][2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{T: ArrayTy, Size: 2, Elem: &Type{T: ArrayTy, Size: 2, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[2]"}, stringKind: "bool[2][2]"}, stringKind: "bool[2][2][2]"}},
|
||||||
{"bool[][][]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([][][]bool{}), Elem: &Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([][]bool{}), Elem: &Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([]bool{}), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[]"}, stringKind: "bool[][]"}, stringKind: "bool[][][]"}},
|
{"bool[][][]", nil, Type{T: SliceTy, Elem: &Type{T: SliceTy, Elem: &Type{T: SliceTy, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[]"}, stringKind: "bool[][]"}, stringKind: "bool[][][]"}},
|
||||||
{"bool[][2][]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([][2][]bool{}), Elem: &Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2][]bool{}), Elem: &Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([]bool{}), Elem: &Type{Kind: reflect.Bool, T: BoolTy, Type: reflect.TypeOf(bool(false)), stringKind: "bool"}, stringKind: "bool[]"}, stringKind: "bool[][2]"}, stringKind: "bool[][2][]"}},
|
{"bool[][2][]", nil, Type{T: SliceTy, Elem: &Type{T: ArrayTy, Size: 2, Elem: &Type{T: SliceTy, Elem: &Type{T: BoolTy, stringKind: "bool"}, stringKind: "bool[]"}, stringKind: "bool[][2]"}, stringKind: "bool[][2][]"}},
|
||||||
{"int8", nil, Type{Kind: reflect.Int8, Type: int8T, Size: 8, T: IntTy, stringKind: "int8"}},
|
{"int8", nil, Type{Size: 8, T: IntTy, stringKind: "int8"}},
|
||||||
{"int16", nil, Type{Kind: reflect.Int16, Type: int16T, Size: 16, T: IntTy, stringKind: "int16"}},
|
{"int16", nil, Type{Size: 16, T: IntTy, stringKind: "int16"}},
|
||||||
{"int32", nil, Type{Kind: reflect.Int32, Type: int32T, Size: 32, T: IntTy, stringKind: "int32"}},
|
{"int32", nil, Type{Size: 32, T: IntTy, stringKind: "int32"}},
|
||||||
{"int64", nil, Type{Kind: reflect.Int64, Type: int64T, Size: 64, T: IntTy, stringKind: "int64"}},
|
{"int64", nil, Type{Size: 64, T: IntTy, stringKind: "int64"}},
|
||||||
{"int256", nil, Type{Kind: reflect.Ptr, Type: bigT, Size: 256, T: IntTy, stringKind: "int256"}},
|
{"int256", nil, Type{Size: 256, T: IntTy, stringKind: "int256"}},
|
||||||
{"int8[]", nil, Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([]int8{}), Elem: &Type{Kind: reflect.Int8, Type: int8T, Size: 8, T: IntTy, stringKind: "int8"}, stringKind: "int8[]"}},
|
{"int8[]", nil, Type{T: SliceTy, Elem: &Type{Size: 8, T: IntTy, stringKind: "int8"}, stringKind: "int8[]"}},
|
||||||
{"int8[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]int8{}), Elem: &Type{Kind: reflect.Int8, Type: int8T, Size: 8, T: IntTy, stringKind: "int8"}, stringKind: "int8[2]"}},
|
{"int8[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 8, T: IntTy, stringKind: "int8"}, stringKind: "int8[2]"}},
|
||||||
{"int16[]", nil, Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([]int16{}), Elem: &Type{Kind: reflect.Int16, Type: int16T, Size: 16, T: IntTy, stringKind: "int16"}, stringKind: "int16[]"}},
|
{"int16[]", nil, Type{T: SliceTy, Elem: &Type{Size: 16, T: IntTy, stringKind: "int16"}, stringKind: "int16[]"}},
|
||||||
{"int16[2]", nil, Type{Size: 2, Kind: reflect.Array, T: ArrayTy, Type: reflect.TypeOf([2]int16{}), Elem: &Type{Kind: reflect.Int16, Type: int16T, Size: 16, T: IntTy, stringKind: "int16"}, stringKind: "int16[2]"}},
|
{"int16[2]", nil, Type{Size: 2, T: ArrayTy, Elem: &Type{Size: 16, T: IntTy, stringKind: "int16"}, stringKind: "int16[2]"}},
|
||||||
{"int32[]", nil, Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([]int32{}), Elem: &Type{Kind: reflect.Int32, Type: int32T, Size: 32, T: IntTy, stringKind: "int32"}, stringKind: "int32[]"}},
|
{"int32[]", nil, Type{T: SliceTy, Elem: &Type{Size: 32, T: IntTy, stringKind: "int32"}, stringKind: "int32[]"}},
|
||||||
{"int32[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]int32{}), Elem: &Type{Kind: reflect.Int32, Type: int32T, Size: 32, T: IntTy, stringKind: "int32"}, stringKind: "int32[2]"}},
|
{"int32[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 32, T: IntTy, stringKind: "int32"}, stringKind: "int32[2]"}},
|
||||||
{"int64[]", nil, Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([]int64{}), Elem: &Type{Kind: reflect.Int64, Type: int64T, Size: 64, T: IntTy, stringKind: "int64"}, stringKind: "int64[]"}},
|
{"int64[]", nil, Type{T: SliceTy, Elem: &Type{Size: 64, T: IntTy, stringKind: "int64"}, stringKind: "int64[]"}},
|
||||||
{"int64[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]int64{}), Elem: &Type{Kind: reflect.Int64, Type: int64T, Size: 64, T: IntTy, stringKind: "int64"}, stringKind: "int64[2]"}},
|
{"int64[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 64, T: IntTy, stringKind: "int64"}, stringKind: "int64[2]"}},
|
||||||
{"int256[]", nil, Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([]*big.Int{}), Elem: &Type{Kind: reflect.Ptr, Type: bigT, Size: 256, T: IntTy, stringKind: "int256"}, stringKind: "int256[]"}},
|
{"int256[]", nil, Type{T: SliceTy, Elem: &Type{Size: 256, T: IntTy, stringKind: "int256"}, stringKind: "int256[]"}},
|
||||||
{"int256[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]*big.Int{}), Elem: &Type{Kind: reflect.Ptr, Type: bigT, Size: 256, T: IntTy, stringKind: "int256"}, stringKind: "int256[2]"}},
|
{"int256[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 256, T: IntTy, stringKind: "int256"}, stringKind: "int256[2]"}},
|
||||||
{"uint8", nil, Type{Kind: reflect.Uint8, Type: uint8T, Size: 8, T: UintTy, stringKind: "uint8"}},
|
{"uint8", nil, Type{Size: 8, T: UintTy, stringKind: "uint8"}},
|
||||||
{"uint16", nil, Type{Kind: reflect.Uint16, Type: uint16T, Size: 16, T: UintTy, stringKind: "uint16"}},
|
{"uint16", nil, Type{Size: 16, T: UintTy, stringKind: "uint16"}},
|
||||||
{"uint32", nil, Type{Kind: reflect.Uint32, Type: uint32T, Size: 32, T: UintTy, stringKind: "uint32"}},
|
{"uint32", nil, Type{Size: 32, T: UintTy, stringKind: "uint32"}},
|
||||||
{"uint64", nil, Type{Kind: reflect.Uint64, Type: uint64T, Size: 64, T: UintTy, stringKind: "uint64"}},
|
{"uint64", nil, Type{Size: 64, T: UintTy, stringKind: "uint64"}},
|
||||||
{"uint256", nil, Type{Kind: reflect.Ptr, Type: bigT, Size: 256, T: UintTy, stringKind: "uint256"}},
|
{"uint256", nil, Type{Size: 256, T: UintTy, stringKind: "uint256"}},
|
||||||
{"uint8[]", nil, Type{Kind: reflect.Slice, T: SliceTy, Type: reflect.TypeOf([]uint8{}), Elem: &Type{Kind: reflect.Uint8, Type: uint8T, Size: 8, T: UintTy, stringKind: "uint8"}, stringKind: "uint8[]"}},
|
{"uint8[]", nil, Type{T: SliceTy, Elem: &Type{Size: 8, T: UintTy, stringKind: "uint8"}, stringKind: "uint8[]"}},
|
||||||
{"uint8[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]uint8{}), Elem: &Type{Kind: reflect.Uint8, Type: uint8T, Size: 8, T: UintTy, stringKind: "uint8"}, stringKind: "uint8[2]"}},
|
{"uint8[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 8, T: UintTy, stringKind: "uint8"}, stringKind: "uint8[2]"}},
|
||||||
{"uint16[]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([]uint16{}), Elem: &Type{Kind: reflect.Uint16, Type: uint16T, Size: 16, T: UintTy, stringKind: "uint16"}, stringKind: "uint16[]"}},
|
{"uint16[]", nil, Type{T: SliceTy, Elem: &Type{Size: 16, T: UintTy, stringKind: "uint16"}, stringKind: "uint16[]"}},
|
||||||
{"uint16[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]uint16{}), Elem: &Type{Kind: reflect.Uint16, Type: uint16T, Size: 16, T: UintTy, stringKind: "uint16"}, stringKind: "uint16[2]"}},
|
{"uint16[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 16, T: UintTy, stringKind: "uint16"}, stringKind: "uint16[2]"}},
|
||||||
{"uint32[]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([]uint32{}), Elem: &Type{Kind: reflect.Uint32, Type: uint32T, Size: 32, T: UintTy, stringKind: "uint32"}, stringKind: "uint32[]"}},
|
{"uint32[]", nil, Type{T: SliceTy, Elem: &Type{Size: 32, T: UintTy, stringKind: "uint32"}, stringKind: "uint32[]"}},
|
||||||
{"uint32[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]uint32{}), Elem: &Type{Kind: reflect.Uint32, Type: uint32T, Size: 32, T: UintTy, stringKind: "uint32"}, stringKind: "uint32[2]"}},
|
{"uint32[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 32, T: UintTy, stringKind: "uint32"}, stringKind: "uint32[2]"}},
|
||||||
{"uint64[]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([]uint64{}), Elem: &Type{Kind: reflect.Uint64, Type: uint64T, Size: 64, T: UintTy, stringKind: "uint64"}, stringKind: "uint64[]"}},
|
{"uint64[]", nil, Type{T: SliceTy, Elem: &Type{Size: 64, T: UintTy, stringKind: "uint64"}, stringKind: "uint64[]"}},
|
||||||
{"uint64[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]uint64{}), Elem: &Type{Kind: reflect.Uint64, Type: uint64T, Size: 64, T: UintTy, stringKind: "uint64"}, stringKind: "uint64[2]"}},
|
{"uint64[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 64, T: UintTy, stringKind: "uint64"}, stringKind: "uint64[2]"}},
|
||||||
{"uint256[]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([]*big.Int{}), Elem: &Type{Kind: reflect.Ptr, Type: bigT, Size: 256, T: UintTy, stringKind: "uint256"}, stringKind: "uint256[]"}},
|
{"uint256[]", nil, Type{T: SliceTy, Elem: &Type{Size: 256, T: UintTy, stringKind: "uint256"}, stringKind: "uint256[]"}},
|
||||||
{"uint256[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Type: reflect.TypeOf([2]*big.Int{}), Size: 2, Elem: &Type{Kind: reflect.Ptr, Type: bigT, Size: 256, T: UintTy, stringKind: "uint256"}, stringKind: "uint256[2]"}},
|
{"uint256[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 256, T: UintTy, stringKind: "uint256"}, stringKind: "uint256[2]"}},
|
||||||
{"bytes32", nil, Type{Kind: reflect.Array, T: FixedBytesTy, Size: 32, Type: reflect.TypeOf([32]byte{}), stringKind: "bytes32"}},
|
{"bytes32", nil, Type{T: FixedBytesTy, Size: 32, stringKind: "bytes32"}},
|
||||||
{"bytes[]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([][]byte{}), Elem: &Type{Kind: reflect.Slice, Type: reflect.TypeOf([]byte{}), T: BytesTy, stringKind: "bytes"}, stringKind: "bytes[]"}},
|
{"bytes[]", nil, Type{T: SliceTy, Elem: &Type{T: BytesTy, stringKind: "bytes"}, stringKind: "bytes[]"}},
|
||||||
{"bytes[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2][]byte{}), Elem: &Type{T: BytesTy, Type: reflect.TypeOf([]byte{}), Kind: reflect.Slice, stringKind: "bytes"}, stringKind: "bytes[2]"}},
|
{"bytes[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{T: BytesTy, stringKind: "bytes"}, stringKind: "bytes[2]"}},
|
||||||
{"bytes32[]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([][32]byte{}), Elem: &Type{Kind: reflect.Array, Type: reflect.TypeOf([32]byte{}), T: FixedBytesTy, Size: 32, stringKind: "bytes32"}, stringKind: "bytes32[]"}},
|
{"bytes32[]", nil, Type{T: SliceTy, Elem: &Type{T: FixedBytesTy, Size: 32, stringKind: "bytes32"}, stringKind: "bytes32[]"}},
|
||||||
{"bytes32[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2][32]byte{}), Elem: &Type{Kind: reflect.Array, T: FixedBytesTy, Size: 32, Type: reflect.TypeOf([32]byte{}), stringKind: "bytes32"}, stringKind: "bytes32[2]"}},
|
{"bytes32[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{T: FixedBytesTy, Size: 32, stringKind: "bytes32"}, stringKind: "bytes32[2]"}},
|
||||||
{"string", nil, Type{Kind: reflect.String, T: StringTy, Type: reflect.TypeOf(""), stringKind: "string"}},
|
{"string", nil, Type{T: StringTy, stringKind: "string"}},
|
||||||
{"string[]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([]string{}), Elem: &Type{Kind: reflect.String, Type: reflect.TypeOf(""), T: StringTy, stringKind: "string"}, stringKind: "string[]"}},
|
{"string[]", nil, Type{T: SliceTy, Elem: &Type{T: StringTy, stringKind: "string"}, stringKind: "string[]"}},
|
||||||
{"string[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]string{}), Elem: &Type{Kind: reflect.String, T: StringTy, Type: reflect.TypeOf(""), stringKind: "string"}, stringKind: "string[2]"}},
|
{"string[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{T: StringTy, stringKind: "string"}, stringKind: "string[2]"}},
|
||||||
{"address", nil, Type{Kind: reflect.Array, Type: addressT, Size: 20, T: AddressTy, stringKind: "address"}},
|
{"address", nil, Type{Size: 20, T: AddressTy, stringKind: "address"}},
|
||||||
{"address[]", nil, Type{T: SliceTy, Kind: reflect.Slice, Type: reflect.TypeOf([]common.Address{}), Elem: &Type{Kind: reflect.Array, Type: addressT, Size: 20, T: AddressTy, stringKind: "address"}, stringKind: "address[]"}},
|
{"address[]", nil, Type{T: SliceTy, Elem: &Type{Size: 20, T: AddressTy, stringKind: "address"}, stringKind: "address[]"}},
|
||||||
{"address[2]", nil, Type{Kind: reflect.Array, T: ArrayTy, Size: 2, Type: reflect.TypeOf([2]common.Address{}), Elem: &Type{Kind: reflect.Array, Type: addressT, Size: 20, T: AddressTy, stringKind: "address"}, stringKind: "address[2]"}},
|
{"address[2]", nil, Type{T: ArrayTy, Size: 2, Elem: &Type{Size: 20, T: AddressTy, stringKind: "address"}, stringKind: "address[2]"}},
|
||||||
// TODO when fixed types are implemented properly
|
// TODO when fixed types are implemented properly
|
||||||
// {"fixed", nil, Type{}},
|
// {"fixed", nil, Type{}},
|
||||||
// {"fixed128x128", nil, Type{}},
|
// {"fixed128x128", nil, Type{}},
|
||||||
@ -95,14 +95,14 @@ func TestTypeRegexp(t *testing.T) {
|
|||||||
// {"fixed[2]", nil, Type{}},
|
// {"fixed[2]", nil, Type{}},
|
||||||
// {"fixed128x128[]", nil, Type{}},
|
// {"fixed128x128[]", nil, Type{}},
|
||||||
// {"fixed128x128[2]", nil, Type{}},
|
// {"fixed128x128[2]", nil, Type{}},
|
||||||
{"tuple", []ArgumentMarshaling{{Name: "a", Type: "int64"}}, Type{Kind: reflect.Struct, T: TupleTy, Type: reflect.TypeOf(struct {
|
{"tuple", []ArgumentMarshaling{{Name: "a", Type: "int64"}}, Type{T: TupleTy, TupleType: reflect.TypeOf(struct {
|
||||||
A int64 `json:"a"`
|
A int64 `json:"a"`
|
||||||
}{}), stringKind: "(int64)",
|
}{}), stringKind: "(int64)",
|
||||||
TupleElems: []*Type{{Kind: reflect.Int64, T: IntTy, Type: reflect.TypeOf(int64(0)), Size: 64, stringKind: "int64"}}, TupleRawNames: []string{"a"}}},
|
TupleElems: []*Type{{T: IntTy, Size: 64, stringKind: "int64"}}, TupleRawNames: []string{"a"}}},
|
||||||
{"tuple with long name", []ArgumentMarshaling{{Name: "aTypicalParamName", Type: "int64"}}, Type{Kind: reflect.Struct, T: TupleTy, Type: reflect.TypeOf(struct {
|
{"tuple with long name", []ArgumentMarshaling{{Name: "aTypicalParamName", Type: "int64"}}, Type{T: TupleTy, TupleType: reflect.TypeOf(struct {
|
||||||
ATypicalParamName int64 `json:"aTypicalParamName"`
|
ATypicalParamName int64 `json:"aTypicalParamName"`
|
||||||
}{}), stringKind: "(int64)",
|
}{}), stringKind: "(int64)",
|
||||||
TupleElems: []*Type{{Kind: reflect.Int64, T: IntTy, Type: reflect.TypeOf(int64(0)), Size: 64, stringKind: "int64"}}, TupleRawNames: []string{"aTypicalParamName"}}},
|
TupleElems: []*Type{{T: IntTy, Size: 64, stringKind: "int64"}}, TupleRawNames: []string{"aTypicalParamName"}}},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
@ -255,7 +255,7 @@ func TestTypeCheck(t *testing.T) {
|
|||||||
{"bytes", nil, [2]byte{0, 1}, "abi: cannot use array as type slice as argument"},
|
{"bytes", nil, [2]byte{0, 1}, "abi: cannot use array as type slice as argument"},
|
||||||
{"bytes", nil, common.Hash{1}, "abi: cannot use array as type slice as argument"},
|
{"bytes", nil, common.Hash{1}, "abi: cannot use array as type slice as argument"},
|
||||||
{"string", nil, "hello world", ""},
|
{"string", nil, "hello world", ""},
|
||||||
{"string", nil, string(""), ""},
|
{"string", nil, "", ""},
|
||||||
{"string", nil, []byte{}, "abi: cannot use slice as type string as argument"},
|
{"string", nil, []byte{}, "abi: cannot use slice as type string as argument"},
|
||||||
{"bytes32[]", nil, [][32]byte{{}}, ""},
|
{"bytes32[]", nil, [][32]byte{{}}, ""},
|
||||||
{"function", nil, [24]byte{}, ""},
|
{"function", nil, [24]byte{}, ""},
|
||||||
@ -306,3 +306,63 @@ func TestTypeCheck(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestInternalType(t *testing.T) {
|
||||||
|
components := []ArgumentMarshaling{{Name: "a", Type: "int64"}}
|
||||||
|
internalType := "struct a.b[]"
|
||||||
|
kind := Type{
|
||||||
|
T: TupleTy,
|
||||||
|
TupleType: reflect.TypeOf(struct {
|
||||||
|
A int64 `json:"a"`
|
||||||
|
}{}),
|
||||||
|
stringKind: "(int64)",
|
||||||
|
TupleRawName: "ab[]",
|
||||||
|
TupleElems: []*Type{{T: IntTy, Size: 64, stringKind: "int64"}},
|
||||||
|
TupleRawNames: []string{"a"},
|
||||||
|
}
|
||||||
|
|
||||||
|
blob := "tuple"
|
||||||
|
typ, err := NewType(blob, internalType, components)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("type %q: failed to parse type string: %v", blob, err)
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(typ, kind) {
|
||||||
|
t.Errorf("type %q: parsed type mismatch:\nGOT %s\nWANT %s ", blob, spew.Sdump(typeWithoutStringer(typ)), spew.Sdump(typeWithoutStringer(kind)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetTypeSize(t *testing.T) {
|
||||||
|
var testCases = []struct {
|
||||||
|
typ string
|
||||||
|
components []ArgumentMarshaling
|
||||||
|
typSize int
|
||||||
|
}{
|
||||||
|
// simple array
|
||||||
|
{"uint256[2]", nil, 32 * 2},
|
||||||
|
{"address[3]", nil, 32 * 3},
|
||||||
|
{"bytes32[4]", nil, 32 * 4},
|
||||||
|
// array array
|
||||||
|
{"uint256[2][3][4]", nil, 32 * (2 * 3 * 4)},
|
||||||
|
// array tuple
|
||||||
|
{"tuple[2]", []ArgumentMarshaling{{Name: "x", Type: "bytes32"}, {Name: "y", Type: "bytes32"}}, (32 * 2) * 2},
|
||||||
|
// simple tuple
|
||||||
|
{"tuple", []ArgumentMarshaling{{Name: "x", Type: "uint256"}, {Name: "y", Type: "uint256"}}, 32 * 2},
|
||||||
|
// tuple array
|
||||||
|
{"tuple", []ArgumentMarshaling{{Name: "x", Type: "bytes32[2]"}}, 32 * 2},
|
||||||
|
// tuple tuple
|
||||||
|
{"tuple", []ArgumentMarshaling{{Name: "x", Type: "tuple", Components: []ArgumentMarshaling{{Name: "x", Type: "bytes32"}}}}, 32},
|
||||||
|
{"tuple", []ArgumentMarshaling{{Name: "x", Type: "tuple", Components: []ArgumentMarshaling{{Name: "x", Type: "bytes32[2]"}, {Name: "y", Type: "uint256"}}}}, 32 * (2 + 1)},
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, data := range testCases {
|
||||||
|
typ, err := NewType(data.typ, "", data.components)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("type %q: failed to parse type string: %v", data.typ, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
result := getTypeSize(typ)
|
||||||
|
if result != data.typSize {
|
||||||
|
t.Errorf("case %d type %q: get type size error: actual: %d expected: %d", i, data.typ, result, data.typSize)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -26,52 +26,54 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
maxUint256 = big.NewInt(0).Add(
|
// MaxUint256 is the maximum value that can be represented by a uint256.
|
||||||
big.NewInt(0).Exp(big.NewInt(2), big.NewInt(256), nil),
|
MaxUint256 = new(big.Int).Sub(new(big.Int).Lsh(common.Big1, 256), common.Big1)
|
||||||
big.NewInt(-1))
|
// MaxInt256 is the maximum value that can be represented by a int256.
|
||||||
maxInt256 = big.NewInt(0).Add(
|
MaxInt256 = new(big.Int).Sub(new(big.Int).Lsh(common.Big1, 255), common.Big1)
|
||||||
big.NewInt(0).Exp(big.NewInt(2), big.NewInt(255), nil),
|
|
||||||
big.NewInt(-1))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// reads the integer based on its kind
|
// ReadInteger reads the integer based on its kind and returns the appropriate value.
|
||||||
func readInteger(typ byte, kind reflect.Kind, b []byte) interface{} {
|
func ReadInteger(typ Type, b []byte) interface{} {
|
||||||
switch kind {
|
if typ.T == UintTy {
|
||||||
case reflect.Uint8:
|
switch typ.Size {
|
||||||
return b[len(b)-1]
|
case 8:
|
||||||
case reflect.Uint16:
|
return b[len(b)-1]
|
||||||
return binary.BigEndian.Uint16(b[len(b)-2:])
|
case 16:
|
||||||
case reflect.Uint32:
|
return binary.BigEndian.Uint16(b[len(b)-2:])
|
||||||
return binary.BigEndian.Uint32(b[len(b)-4:])
|
case 32:
|
||||||
case reflect.Uint64:
|
return binary.BigEndian.Uint32(b[len(b)-4:])
|
||||||
return binary.BigEndian.Uint64(b[len(b)-8:])
|
case 64:
|
||||||
case reflect.Int8:
|
return binary.BigEndian.Uint64(b[len(b)-8:])
|
||||||
|
default:
|
||||||
|
// the only case left for unsigned integer is uint256.
|
||||||
|
return new(big.Int).SetBytes(b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
switch typ.Size {
|
||||||
|
case 8:
|
||||||
return int8(b[len(b)-1])
|
return int8(b[len(b)-1])
|
||||||
case reflect.Int16:
|
case 16:
|
||||||
return int16(binary.BigEndian.Uint16(b[len(b)-2:]))
|
return int16(binary.BigEndian.Uint16(b[len(b)-2:]))
|
||||||
case reflect.Int32:
|
case 32:
|
||||||
return int32(binary.BigEndian.Uint32(b[len(b)-4:]))
|
return int32(binary.BigEndian.Uint32(b[len(b)-4:]))
|
||||||
case reflect.Int64:
|
case 64:
|
||||||
return int64(binary.BigEndian.Uint64(b[len(b)-8:]))
|
return int64(binary.BigEndian.Uint64(b[len(b)-8:]))
|
||||||
default:
|
default:
|
||||||
// the only case lefts for integer is int256/uint256.
|
// the only case left for integer is int256
|
||||||
// big.SetBytes can't tell if a number is negative, positive on itself.
|
// big.SetBytes can't tell if a number is negative or positive in itself.
|
||||||
// On EVM, if the returned number > max int256, it is negative.
|
// On EVM, if the returned number > max int256, it is negative.
|
||||||
|
// A number is > max int256 if the bit at position 255 is set.
|
||||||
ret := new(big.Int).SetBytes(b)
|
ret := new(big.Int).SetBytes(b)
|
||||||
if typ == UintTy {
|
if ret.Bit(255) == 1 {
|
||||||
return ret
|
ret.Add(MaxUint256, new(big.Int).Neg(ret))
|
||||||
}
|
ret.Add(ret, common.Big1)
|
||||||
|
|
||||||
if ret.Cmp(maxInt256) > 0 {
|
|
||||||
ret.Add(maxUint256, big.NewInt(0).Neg(ret))
|
|
||||||
ret.Add(ret, big.NewInt(1))
|
|
||||||
ret.Neg(ret)
|
ret.Neg(ret)
|
||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// reads a bool
|
// readBool reads a bool.
|
||||||
func readBool(word []byte) (bool, error) {
|
func readBool(word []byte) (bool, error) {
|
||||||
for _, b := range word[:31] {
|
for _, b := range word[:31] {
|
||||||
if b != 0 {
|
if b != 0 {
|
||||||
@ -89,7 +91,8 @@ func readBool(word []byte) (bool, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// A function type is simply the address with the function selection signature at the end.
|
// A function type is simply the address with the function selection signature at the end.
|
||||||
// This enforces that standard by always presenting it as a 24-array (address + sig = 24 bytes)
|
//
|
||||||
|
// readFunctionType enforces that standard by always presenting it as a 24-array (address + sig = 24 bytes)
|
||||||
func readFunctionType(t Type, word []byte) (funcTy [24]byte, err error) {
|
func readFunctionType(t Type, word []byte) (funcTy [24]byte, err error) {
|
||||||
if t.T != FunctionTy {
|
if t.T != FunctionTy {
|
||||||
return [24]byte{}, fmt.Errorf("abi: invalid type in call to make function type byte array")
|
return [24]byte{}, fmt.Errorf("abi: invalid type in call to make function type byte array")
|
||||||
@ -102,20 +105,20 @@ func readFunctionType(t Type, word []byte) (funcTy [24]byte, err error) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// through reflection, creates a fixed array to be read from
|
// ReadFixedBytes uses reflection to create a fixed array to be read from.
|
||||||
func readFixedBytes(t Type, word []byte) (interface{}, error) {
|
func ReadFixedBytes(t Type, word []byte) (interface{}, error) {
|
||||||
if t.T != FixedBytesTy {
|
if t.T != FixedBytesTy {
|
||||||
return nil, fmt.Errorf("abi: invalid type in call to make fixed byte array")
|
return nil, fmt.Errorf("abi: invalid type in call to make fixed byte array")
|
||||||
}
|
}
|
||||||
// convert
|
// convert
|
||||||
array := reflect.New(t.Type).Elem()
|
array := reflect.New(t.GetType()).Elem()
|
||||||
|
|
||||||
reflect.Copy(array, reflect.ValueOf(word[0:t.Size]))
|
reflect.Copy(array, reflect.ValueOf(word[0:t.Size]))
|
||||||
return array.Interface(), nil
|
return array.Interface(), nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// iteratively unpack elements
|
// forEachUnpack iteratively unpack elements.
|
||||||
func forEachUnpack(t Type, output []byte, start, size int) (interface{}, error) {
|
func forEachUnpack(t Type, output []byte, start, size int) (interface{}, error) {
|
||||||
if size < 0 {
|
if size < 0 {
|
||||||
return nil, fmt.Errorf("cannot marshal input to array, size is negative (%d)", size)
|
return nil, fmt.Errorf("cannot marshal input to array, size is negative (%d)", size)
|
||||||
@ -129,10 +132,10 @@ func forEachUnpack(t Type, output []byte, start, size int) (interface{}, error)
|
|||||||
|
|
||||||
if t.T == SliceTy {
|
if t.T == SliceTy {
|
||||||
// declare our slice
|
// declare our slice
|
||||||
refSlice = reflect.MakeSlice(t.Type, size, size)
|
refSlice = reflect.MakeSlice(t.GetType(), size, size)
|
||||||
} else if t.T == ArrayTy {
|
} else if t.T == ArrayTy {
|
||||||
// declare our array
|
// declare our array
|
||||||
refSlice = reflect.New(t.Type).Elem()
|
refSlice = reflect.New(t.GetType()).Elem()
|
||||||
} else {
|
} else {
|
||||||
return nil, fmt.Errorf("abi: invalid type in array/slice unpacking stage")
|
return nil, fmt.Errorf("abi: invalid type in array/slice unpacking stage")
|
||||||
}
|
}
|
||||||
@ -156,7 +159,7 @@ func forEachUnpack(t Type, output []byte, start, size int) (interface{}, error)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func forTupleUnpack(t Type, output []byte) (interface{}, error) {
|
func forTupleUnpack(t Type, output []byte) (interface{}, error) {
|
||||||
retval := reflect.New(t.Type).Elem()
|
retval := reflect.New(t.GetType()).Elem()
|
||||||
virtualArgs := 0
|
virtualArgs := 0
|
||||||
for index, elem := range t.TupleElems {
|
for index, elem := range t.TupleElems {
|
||||||
marshalledValue, err := toGoType((index+virtualArgs)*32, *elem, output)
|
marshalledValue, err := toGoType((index+virtualArgs)*32, *elem, output)
|
||||||
@ -216,21 +219,23 @@ func toGoType(index int, t Type, output []byte) (interface{}, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return forTupleUnpack(t, output[begin:])
|
return forTupleUnpack(t, output[begin:])
|
||||||
} else {
|
|
||||||
return forTupleUnpack(t, output[index:])
|
|
||||||
}
|
}
|
||||||
|
return forTupleUnpack(t, output[index:])
|
||||||
case SliceTy:
|
case SliceTy:
|
||||||
return forEachUnpack(t, output[begin:], 0, length)
|
return forEachUnpack(t, output[begin:], 0, length)
|
||||||
case ArrayTy:
|
case ArrayTy:
|
||||||
if isDynamicType(*t.Elem) {
|
if isDynamicType(*t.Elem) {
|
||||||
offset := int64(binary.BigEndian.Uint64(returnOutput[len(returnOutput)-8:]))
|
offset := binary.BigEndian.Uint64(returnOutput[len(returnOutput)-8:])
|
||||||
|
if offset > uint64(len(output)) {
|
||||||
|
return nil, fmt.Errorf("abi: toGoType offset greater than output length: offset: %d, len(output): %d", offset, len(output))
|
||||||
|
}
|
||||||
return forEachUnpack(t, output[offset:], 0, t.Size)
|
return forEachUnpack(t, output[offset:], 0, t.Size)
|
||||||
}
|
}
|
||||||
return forEachUnpack(t, output[index:], 0, t.Size)
|
return forEachUnpack(t, output[index:], 0, t.Size)
|
||||||
case StringTy: // variable arrays are written at the end of the return bytes
|
case StringTy: // variable arrays are written at the end of the return bytes
|
||||||
return string(output[begin : begin+length]), nil
|
return string(output[begin : begin+length]), nil
|
||||||
case IntTy, UintTy:
|
case IntTy, UintTy:
|
||||||
return readInteger(t.T, t.Kind, returnOutput), nil
|
return ReadInteger(t, returnOutput), nil
|
||||||
case BoolTy:
|
case BoolTy:
|
||||||
return readBool(returnOutput)
|
return readBool(returnOutput)
|
||||||
case AddressTy:
|
case AddressTy:
|
||||||
@ -240,7 +245,7 @@ func toGoType(index int, t Type, output []byte) (interface{}, error) {
|
|||||||
case BytesTy:
|
case BytesTy:
|
||||||
return output[begin : begin+length], nil
|
return output[begin : begin+length], nil
|
||||||
case FixedBytesTy:
|
case FixedBytesTy:
|
||||||
return readFixedBytes(t, returnOutput)
|
return ReadFixedBytes(t, returnOutput)
|
||||||
case FunctionTy:
|
case FunctionTy:
|
||||||
return readFunctionType(t, returnOutput)
|
return readFunctionType(t, returnOutput)
|
||||||
default:
|
default:
|
||||||
@ -248,7 +253,7 @@ func toGoType(index int, t Type, output []byte) (interface{}, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// interprets a 32 byte slice as an offset and then determines which indice to look to decode the type.
|
// lengthPrefixPointsTo interprets a 32 byte slice as an offset and then determines which indices to look to decode the type.
|
||||||
func lengthPrefixPointsTo(index int, output []byte) (start int, length int, err error) {
|
func lengthPrefixPointsTo(index int, output []byte) (start int, length int, err error) {
|
||||||
bigOffsetEnd := big.NewInt(0).SetBytes(output[index : index+32])
|
bigOffsetEnd := big.NewInt(0).SetBytes(output[index : index+32])
|
||||||
bigOffsetEnd.Add(bigOffsetEnd, common.Big32)
|
bigOffsetEnd.Add(bigOffsetEnd, common.Big32)
|
||||||
|
@ -30,6 +30,32 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// TestUnpack tests the general pack/unpack tests in packing_test.go
|
||||||
|
func TestUnpack(t *testing.T) {
|
||||||
|
for i, test := range packUnpackTests {
|
||||||
|
t.Run(strconv.Itoa(i)+" "+test.def, func(t *testing.T) {
|
||||||
|
//Unpack
|
||||||
|
def := fmt.Sprintf(`[{ "name" : "method", "type": "function", "outputs": %s}]`, test.def)
|
||||||
|
abi, err := JSON(strings.NewReader(def))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("invalid ABI definition %s: %v", def, err)
|
||||||
|
}
|
||||||
|
encb, err := hex.DecodeString(test.packed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("invalid hex %s: %v", test.packed, err)
|
||||||
|
}
|
||||||
|
out, err := abi.Unpack("method", encb)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("test %d (%v) failed: %v", i, test.def, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(test.unpacked, ConvertType(out[0], test.unpacked)) {
|
||||||
|
t.Errorf("test %d (%v) failed: expected %v, got %v", i, test.def, test.unpacked, out[0])
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type unpackTest struct {
|
type unpackTest struct {
|
||||||
def string // ABI definition JSON
|
def string // ABI definition JSON
|
||||||
enc string // evm return data
|
enc string // evm return data
|
||||||
@ -52,16 +78,6 @@ func (test unpackTest) checkError(err error) error {
|
|||||||
|
|
||||||
var unpackTests = []unpackTest{
|
var unpackTests = []unpackTest{
|
||||||
// Bools
|
// Bools
|
||||||
{
|
|
||||||
def: `[{ "type": "bool" }]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000001",
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{ "type": "bool" }]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: false,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
def: `[{ "type": "bool" }]`,
|
def: `[{ "type": "bool" }]`,
|
||||||
enc: "0000000000000000000000000000000000000000000000000001000000000001",
|
enc: "0000000000000000000000000000000000000000000000000001000000000001",
|
||||||
@ -75,11 +91,6 @@ var unpackTests = []unpackTest{
|
|||||||
err: "abi: improperly encoded boolean value",
|
err: "abi: improperly encoded boolean value",
|
||||||
},
|
},
|
||||||
// Integers
|
// Integers
|
||||||
{
|
|
||||||
def: `[{"type": "uint32"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000001",
|
|
||||||
want: uint32(1),
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
def: `[{"type": "uint32"}]`,
|
def: `[{"type": "uint32"}]`,
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000001",
|
enc: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
@ -92,16 +103,6 @@ var unpackTests = []unpackTest{
|
|||||||
want: uint16(0),
|
want: uint16(0),
|
||||||
err: "abi: cannot unmarshal *big.Int in to uint16",
|
err: "abi: cannot unmarshal *big.Int in to uint16",
|
||||||
},
|
},
|
||||||
{
|
|
||||||
def: `[{"type": "uint17"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000001",
|
|
||||||
want: big.NewInt(1),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int32"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000001",
|
|
||||||
want: int32(1),
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
def: `[{"type": "int32"}]`,
|
def: `[{"type": "int32"}]`,
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000001",
|
enc: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||||
@ -114,38 +115,10 @@ var unpackTests = []unpackTest{
|
|||||||
want: int16(0),
|
want: int16(0),
|
||||||
err: "abi: cannot unmarshal *big.Int in to int16",
|
err: "abi: cannot unmarshal *big.Int in to int16",
|
||||||
},
|
},
|
||||||
{
|
|
||||||
def: `[{"type": "int17"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000001",
|
|
||||||
want: big.NewInt(1),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int256"}]`,
|
|
||||||
enc: "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
want: big.NewInt(-1),
|
|
||||||
},
|
|
||||||
// Address
|
|
||||||
{
|
|
||||||
def: `[{"type": "address"}]`,
|
|
||||||
enc: "0000000000000000000000000100000000000000000000000000000000000000",
|
|
||||||
want: common.Address{1},
|
|
||||||
},
|
|
||||||
// Bytes
|
|
||||||
{
|
|
||||||
def: `[{"type": "bytes32"}]`,
|
|
||||||
enc: "0100000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: [32]byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
def: `[{"type": "bytes"}]`,
|
def: `[{"type": "bytes"}]`,
|
||||||
enc: "000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000200100000000000000000000000000000000000000000000000000000000000000",
|
enc: "000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000200100000000000000000000000000000000000000000000000000000000000000",
|
||||||
want: common.Hex2Bytes("0100000000000000000000000000000000000000000000000000000000000000"),
|
want: [32]byte{1},
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "bytes"}]`,
|
|
||||||
enc: "000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000200100000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: [32]byte{},
|
|
||||||
err: "abi: cannot unmarshal []uint8 in to [32]uint8",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
def: `[{"type": "bytes32"}]`,
|
def: `[{"type": "bytes32"}]`,
|
||||||
@ -153,245 +126,13 @@ var unpackTests = []unpackTest{
|
|||||||
want: []byte(nil),
|
want: []byte(nil),
|
||||||
err: "abi: cannot unmarshal [32]uint8 in to []uint8",
|
err: "abi: cannot unmarshal [32]uint8 in to []uint8",
|
||||||
},
|
},
|
||||||
{
|
|
||||||
def: `[{"type": "bytes32"}]`,
|
|
||||||
enc: "0100000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: [32]byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
|
|
||||||
},
|
|
||||||
// Functions
|
|
||||||
{
|
|
||||||
def: `[{"type": "function"}]`,
|
|
||||||
enc: "0100000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: [24]byte{1},
|
|
||||||
},
|
|
||||||
// Slice and Array
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []uint8{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: []uint8{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint256[]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: []*big.Int{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[2]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [2]uint8{1, 2},
|
|
||||||
},
|
|
||||||
// multi dimensional, if these pass, all types that don't require length prefix should pass
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[][]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: [][]uint8{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[][]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [][]uint8{{1, 2}, {1, 2}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[][]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003",
|
|
||||||
want: [][]uint8{{1, 2}, {1, 2, 3}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[2][2]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [2][2]uint8{{1, 2}, {1, 2}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[][2]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: [2][]uint8{{}, {}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[][2]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001",
|
|
||||||
want: [2][]uint8{{1}, {1}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[2][]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: [][2]uint8{},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[2][]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [][2]uint8{{1, 2}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint8[2][]"}]`,
|
|
||||||
enc: "000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [][2]uint8{{1, 2}, {1, 2}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint16[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []uint16{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint16[2]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [2]uint16{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint32[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []uint32{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint32[2]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [2]uint32{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint32[2][3][4]"}]`,
|
|
||||||
enc: "000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000d000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000f000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000110000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000001300000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000015000000000000000000000000000000000000000000000000000000000000001600000000000000000000000000000000000000000000000000000000000000170000000000000000000000000000000000000000000000000000000000000018",
|
|
||||||
want: [4][3][2]uint32{{{1, 2}, {3, 4}, {5, 6}}, {{7, 8}, {9, 10}, {11, 12}}, {{13, 14}, {15, 16}, {17, 18}}, {{19, 20}, {21, 22}, {23, 24}}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint64[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []uint64{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint64[2]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [2]uint64{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint256[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []*big.Int{big.NewInt(1), big.NewInt(2)},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint256[3]"}]`,
|
|
||||||
enc: "000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003",
|
|
||||||
want: [3]*big.Int{big.NewInt(1), big.NewInt(2), big.NewInt(3)},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "string[4]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000000548656c6c6f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005576f726c64000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b476f2d657468657265756d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008457468657265756d000000000000000000000000000000000000000000000000",
|
|
||||||
want: [4]string{"Hello", "World", "Go-ethereum", "Ethereum"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "string[]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000008457468657265756d000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b676f2d657468657265756d000000000000000000000000000000000000000000",
|
|
||||||
want: []string{"Ethereum", "go-ethereum"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "bytes[]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000003f0f0f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003f0f0f00000000000000000000000000000000000000000000000000000000000",
|
|
||||||
want: [][]byte{{0xf0, 0xf0, 0xf0}, {0xf0, 0xf0, 0xf0}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "uint256[2][][]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000c8000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000003e80000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000c8000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000003e8",
|
|
||||||
want: [][][2]*big.Int{{{big.NewInt(1), big.NewInt(200)}, {big.NewInt(1), big.NewInt(1000)}}, {{big.NewInt(1), big.NewInt(200)}, {big.NewInt(1), big.NewInt(1000)}}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int8[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []int8{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int8[2]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [2]int8{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int16[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []int16{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int16[2]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [2]int16{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int32[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []int32{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int32[2]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [2]int32{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int64[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []int64{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int64[2]"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: [2]int64{1, 2},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int256[]"}]`,
|
|
||||||
enc: "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: []*big.Int{big.NewInt(1), big.NewInt(2)},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"type": "int256[3]"}]`,
|
|
||||||
enc: "000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000003",
|
|
||||||
want: [3]*big.Int{big.NewInt(1), big.NewInt(2), big.NewInt(3)},
|
|
||||||
},
|
|
||||||
// struct outputs
|
|
||||||
{
|
|
||||||
def: `[{"name":"int1","type":"int256"},{"name":"int2","type":"int256"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: struct {
|
|
||||||
Int1 *big.Int
|
|
||||||
Int2 *big.Int
|
|
||||||
}{big.NewInt(1), big.NewInt(2)},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"name":"int_one","type":"int256"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: struct {
|
|
||||||
IntOne *big.Int
|
|
||||||
}{big.NewInt(1)},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"name":"int__one","type":"int256"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: struct {
|
|
||||||
IntOne *big.Int
|
|
||||||
}{big.NewInt(1)},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"name":"int_one_","type":"int256"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: struct {
|
|
||||||
IntOne *big.Int
|
|
||||||
}{big.NewInt(1)},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
def: `[{"name":"int_one","type":"int256"}, {"name":"intone","type":"int256"}]`,
|
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
want: struct {
|
|
||||||
IntOne *big.Int
|
|
||||||
Intone *big.Int
|
|
||||||
}{big.NewInt(1), big.NewInt(2)},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
def: `[{"name":"___","type":"int256"}]`,
|
def: `[{"name":"___","type":"int256"}]`,
|
||||||
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
||||||
want: struct {
|
want: struct {
|
||||||
IntOne *big.Int
|
IntOne *big.Int
|
||||||
Intone *big.Int
|
Intone *big.Int
|
||||||
}{},
|
}{IntOne: big.NewInt(1)},
|
||||||
err: "abi: purely underscored output cannot unpack to struct",
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
def: `[{"name":"int_one","type":"int256"},{"name":"IntOne","type":"int256"}]`,
|
def: `[{"name":"int_one","type":"int256"},{"name":"IntOne","type":"int256"}]`,
|
||||||
@ -438,12 +179,37 @@ var unpackTests = []unpackTest{
|
|||||||
}{},
|
}{},
|
||||||
err: "abi: purely underscored output cannot unpack to struct",
|
err: "abi: purely underscored output cannot unpack to struct",
|
||||||
},
|
},
|
||||||
|
// Make sure only the first argument is consumed
|
||||||
|
{
|
||||||
|
def: `[{"name":"int_one","type":"int256"}]`,
|
||||||
|
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
want: struct {
|
||||||
|
IntOne *big.Int
|
||||||
|
}{big.NewInt(1)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"name":"int__one","type":"int256"}]`,
|
||||||
|
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
want: struct {
|
||||||
|
IntOne *big.Int
|
||||||
|
}{big.NewInt(1)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
def: `[{"name":"int_one_","type":"int256"}]`,
|
||||||
|
enc: "00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002",
|
||||||
|
want: struct {
|
||||||
|
IntOne *big.Int
|
||||||
|
}{big.NewInt(1)},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnpack(t *testing.T) {
|
// TestLocalUnpackTests runs test specially designed only for unpacking.
|
||||||
|
// All test cases that can be used to test packing and unpacking should move to packing_test.go
|
||||||
|
func TestLocalUnpackTests(t *testing.T) {
|
||||||
for i, test := range unpackTests {
|
for i, test := range unpackTests {
|
||||||
t.Run(strconv.Itoa(i), func(t *testing.T) {
|
t.Run(strconv.Itoa(i), func(t *testing.T) {
|
||||||
def := fmt.Sprintf(`[{ "name" : "method", "outputs": %s}]`, test.def)
|
//Unpack
|
||||||
|
def := fmt.Sprintf(`[{ "name" : "method", "type": "function", "outputs": %s}]`, test.def)
|
||||||
abi, err := JSON(strings.NewReader(def))
|
abi, err := JSON(strings.NewReader(def))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("invalid ABI definition %s: %v", def, err)
|
t.Fatalf("invalid ABI definition %s: %v", def, err)
|
||||||
@ -453,7 +219,7 @@ func TestUnpack(t *testing.T) {
|
|||||||
t.Fatalf("invalid hex %s: %v", test.enc, err)
|
t.Fatalf("invalid hex %s: %v", test.enc, err)
|
||||||
}
|
}
|
||||||
outptr := reflect.New(reflect.TypeOf(test.want))
|
outptr := reflect.New(reflect.TypeOf(test.want))
|
||||||
err = abi.Unpack(outptr.Interface(), "method", encb)
|
err = abi.UnpackIntoInterface(outptr.Interface(), "method", encb)
|
||||||
if err := test.checkError(err); err != nil {
|
if err := test.checkError(err); err != nil {
|
||||||
t.Errorf("test %d (%v) failed: %v", i, test.def, err)
|
t.Errorf("test %d (%v) failed: %v", i, test.def, err)
|
||||||
return
|
return
|
||||||
@ -466,7 +232,7 @@ func TestUnpack(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnpackSetDynamicArrayOutput(t *testing.T) {
|
func TestUnpackIntoInterfaceSetDynamicArrayOutput(t *testing.T) {
|
||||||
abi, err := JSON(strings.NewReader(`[{"constant":true,"inputs":[],"name":"testDynamicFixedBytes15","outputs":[{"name":"","type":"bytes15[]"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"testDynamicFixedBytes32","outputs":[{"name":"","type":"bytes32[]"}],"payable":false,"stateMutability":"view","type":"function"}]`))
|
abi, err := JSON(strings.NewReader(`[{"constant":true,"inputs":[],"name":"testDynamicFixedBytes15","outputs":[{"name":"","type":"bytes15[]"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"testDynamicFixedBytes32","outputs":[{"name":"","type":"bytes32[]"}],"payable":false,"stateMutability":"view","type":"function"}]`))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -481,7 +247,7 @@ func TestUnpackSetDynamicArrayOutput(t *testing.T) {
|
|||||||
)
|
)
|
||||||
|
|
||||||
// test 32
|
// test 32
|
||||||
err = abi.Unpack(&out32, "testDynamicFixedBytes32", marshalledReturn32)
|
err = abi.UnpackIntoInterface(&out32, "testDynamicFixedBytes32", marshalledReturn32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -498,7 +264,7 @@ func TestUnpackSetDynamicArrayOutput(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// test 15
|
// test 15
|
||||||
err = abi.Unpack(&out15, "testDynamicFixedBytes32", marshalledReturn15)
|
err = abi.UnpackIntoInterface(&out15, "testDynamicFixedBytes32", marshalledReturn15)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -522,7 +288,7 @@ type methodMultiOutput struct {
|
|||||||
|
|
||||||
func methodMultiReturn(require *require.Assertions) (ABI, []byte, methodMultiOutput) {
|
func methodMultiReturn(require *require.Assertions) (ABI, []byte, methodMultiOutput) {
|
||||||
const definition = `[
|
const definition = `[
|
||||||
{ "name" : "multi", "constant" : false, "outputs": [ { "name": "Int", "type": "uint256" }, { "name": "String", "type": "string" } ] }]`
|
{ "name" : "multi", "type": "function", "outputs": [ { "name": "Int", "type": "uint256" }, { "name": "String", "type": "string" } ] }]`
|
||||||
var expected = methodMultiOutput{big.NewInt(1), "hello"}
|
var expected = methodMultiOutput{big.NewInt(1), "hello"}
|
||||||
|
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
@ -592,14 +358,14 @@ func TestMethodMultiReturn(t *testing.T) {
|
|||||||
}, {
|
}, {
|
||||||
&[]interface{}{new(int)},
|
&[]interface{}{new(int)},
|
||||||
&[]interface{}{},
|
&[]interface{}{},
|
||||||
"abi: insufficient number of elements in the list/array for unpack, want 2, got 1",
|
"abi: insufficient number of arguments for unpack, want 2, got 1",
|
||||||
"Can not unpack into a slice with wrong types",
|
"Can not unpack into a slice with wrong types",
|
||||||
}}
|
}}
|
||||||
for _, tc := range testCases {
|
for _, tc := range testCases {
|
||||||
tc := tc
|
tc := tc
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
require := require.New(t)
|
require := require.New(t)
|
||||||
err := abi.Unpack(tc.dest, "multi", data)
|
err := abi.UnpackIntoInterface(tc.dest, "multi", data)
|
||||||
if tc.error == "" {
|
if tc.error == "" {
|
||||||
require.Nil(err, "Should be able to unpack method outputs.")
|
require.Nil(err, "Should be able to unpack method outputs.")
|
||||||
require.Equal(tc.expected, tc.dest)
|
require.Equal(tc.expected, tc.dest)
|
||||||
@ -611,7 +377,7 @@ func TestMethodMultiReturn(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMultiReturnWithArray(t *testing.T) {
|
func TestMultiReturnWithArray(t *testing.T) {
|
||||||
const definition = `[{"name" : "multi", "outputs": [{"type": "uint64[3]"}, {"type": "uint64"}]}]`
|
const definition = `[{"name" : "multi", "type": "function", "outputs": [{"type": "uint64[3]"}, {"type": "uint64"}]}]`
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -622,7 +388,7 @@ func TestMultiReturnWithArray(t *testing.T) {
|
|||||||
|
|
||||||
ret1, ret1Exp := new([3]uint64), [3]uint64{9, 9, 9}
|
ret1, ret1Exp := new([3]uint64), [3]uint64{9, 9, 9}
|
||||||
ret2, ret2Exp := new(uint64), uint64(8)
|
ret2, ret2Exp := new(uint64), uint64(8)
|
||||||
if err := abi.Unpack(&[]interface{}{ret1, ret2}, "multi", buff.Bytes()); err != nil {
|
if err := abi.UnpackIntoInterface(&[]interface{}{ret1, ret2}, "multi", buff.Bytes()); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
if !reflect.DeepEqual(*ret1, ret1Exp) {
|
if !reflect.DeepEqual(*ret1, ret1Exp) {
|
||||||
@ -634,7 +400,7 @@ func TestMultiReturnWithArray(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMultiReturnWithStringArray(t *testing.T) {
|
func TestMultiReturnWithStringArray(t *testing.T) {
|
||||||
const definition = `[{"name" : "multi", "outputs": [{"name": "","type": "uint256[3]"},{"name": "","type": "address"},{"name": "","type": "string[2]"},{"name": "","type": "bool"}]}]`
|
const definition = `[{"name" : "multi", "type": "function", "outputs": [{"name": "","type": "uint256[3]"},{"name": "","type": "address"},{"name": "","type": "string[2]"},{"name": "","type": "bool"}]}]`
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -646,7 +412,7 @@ func TestMultiReturnWithStringArray(t *testing.T) {
|
|||||||
ret2, ret2Exp := new(common.Address), common.HexToAddress("ab1257528b3782fb40d7ed5f72e624b744dffb2f")
|
ret2, ret2Exp := new(common.Address), common.HexToAddress("ab1257528b3782fb40d7ed5f72e624b744dffb2f")
|
||||||
ret3, ret3Exp := new([2]string), [2]string{"Ethereum", "Hello, Ethereum!"}
|
ret3, ret3Exp := new([2]string), [2]string{"Ethereum", "Hello, Ethereum!"}
|
||||||
ret4, ret4Exp := new(bool), false
|
ret4, ret4Exp := new(bool), false
|
||||||
if err := abi.Unpack(&[]interface{}{ret1, ret2, ret3, ret4}, "multi", buff.Bytes()); err != nil {
|
if err := abi.UnpackIntoInterface(&[]interface{}{ret1, ret2, ret3, ret4}, "multi", buff.Bytes()); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
if !reflect.DeepEqual(*ret1, ret1Exp) {
|
if !reflect.DeepEqual(*ret1, ret1Exp) {
|
||||||
@ -664,7 +430,7 @@ func TestMultiReturnWithStringArray(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestMultiReturnWithStringSlice(t *testing.T) {
|
func TestMultiReturnWithStringSlice(t *testing.T) {
|
||||||
const definition = `[{"name" : "multi", "outputs": [{"name": "","type": "string[]"},{"name": "","type": "uint256[]"}]}]`
|
const definition = `[{"name" : "multi", "type": "function", "outputs": [{"name": "","type": "string[]"},{"name": "","type": "uint256[]"}]}]`
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -684,7 +450,7 @@ func TestMultiReturnWithStringSlice(t *testing.T) {
|
|||||||
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000065")) // output[1][1] value
|
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000065")) // output[1][1] value
|
||||||
ret1, ret1Exp := new([]string), []string{"ethereum", "go-ethereum"}
|
ret1, ret1Exp := new([]string), []string{"ethereum", "go-ethereum"}
|
||||||
ret2, ret2Exp := new([]*big.Int), []*big.Int{big.NewInt(100), big.NewInt(101)}
|
ret2, ret2Exp := new([]*big.Int), []*big.Int{big.NewInt(100), big.NewInt(101)}
|
||||||
if err := abi.Unpack(&[]interface{}{ret1, ret2}, "multi", buff.Bytes()); err != nil {
|
if err := abi.UnpackIntoInterface(&[]interface{}{ret1, ret2}, "multi", buff.Bytes()); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
if !reflect.DeepEqual(*ret1, ret1Exp) {
|
if !reflect.DeepEqual(*ret1, ret1Exp) {
|
||||||
@ -700,7 +466,7 @@ func TestMultiReturnWithDeeplyNestedArray(t *testing.T) {
|
|||||||
// values of nested static arrays count towards the size as well, and any element following
|
// values of nested static arrays count towards the size as well, and any element following
|
||||||
// after such nested array argument should be read with the correct offset,
|
// after such nested array argument should be read with the correct offset,
|
||||||
// so that it does not read content from the previous array argument.
|
// so that it does not read content from the previous array argument.
|
||||||
const definition = `[{"name" : "multi", "outputs": [{"type": "uint64[3][2][4]"}, {"type": "uint64"}]}]`
|
const definition = `[{"name" : "multi", "type": "function", "outputs": [{"type": "uint64[3][2][4]"}, {"type": "uint64"}]}]`
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -724,7 +490,7 @@ func TestMultiReturnWithDeeplyNestedArray(t *testing.T) {
|
|||||||
{{0x411, 0x412, 0x413}, {0x421, 0x422, 0x423}},
|
{{0x411, 0x412, 0x413}, {0x421, 0x422, 0x423}},
|
||||||
}
|
}
|
||||||
ret2, ret2Exp := new(uint64), uint64(0x9876)
|
ret2, ret2Exp := new(uint64), uint64(0x9876)
|
||||||
if err := abi.Unpack(&[]interface{}{ret1, ret2}, "multi", buff.Bytes()); err != nil {
|
if err := abi.UnpackIntoInterface(&[]interface{}{ret1, ret2}, "multi", buff.Bytes()); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
if !reflect.DeepEqual(*ret1, ret1Exp) {
|
if !reflect.DeepEqual(*ret1, ret1Exp) {
|
||||||
@ -737,15 +503,15 @@ func TestMultiReturnWithDeeplyNestedArray(t *testing.T) {
|
|||||||
|
|
||||||
func TestUnmarshal(t *testing.T) {
|
func TestUnmarshal(t *testing.T) {
|
||||||
const definition = `[
|
const definition = `[
|
||||||
{ "name" : "int", "constant" : false, "outputs": [ { "type": "uint256" } ] },
|
{ "name" : "int", "type": "function", "outputs": [ { "type": "uint256" } ] },
|
||||||
{ "name" : "bool", "constant" : false, "outputs": [ { "type": "bool" } ] },
|
{ "name" : "bool", "type": "function", "outputs": [ { "type": "bool" } ] },
|
||||||
{ "name" : "bytes", "constant" : false, "outputs": [ { "type": "bytes" } ] },
|
{ "name" : "bytes", "type": "function", "outputs": [ { "type": "bytes" } ] },
|
||||||
{ "name" : "fixed", "constant" : false, "outputs": [ { "type": "bytes32" } ] },
|
{ "name" : "fixed", "type": "function", "outputs": [ { "type": "bytes32" } ] },
|
||||||
{ "name" : "multi", "constant" : false, "outputs": [ { "type": "bytes" }, { "type": "bytes" } ] },
|
{ "name" : "multi", "type": "function", "outputs": [ { "type": "bytes" }, { "type": "bytes" } ] },
|
||||||
{ "name" : "intArraySingle", "constant" : false, "outputs": [ { "type": "uint256[3]" } ] },
|
{ "name" : "intArraySingle", "type": "function", "outputs": [ { "type": "uint256[3]" } ] },
|
||||||
{ "name" : "addressSliceSingle", "constant" : false, "outputs": [ { "type": "address[]" } ] },
|
{ "name" : "addressSliceSingle", "type": "function", "outputs": [ { "type": "address[]" } ] },
|
||||||
{ "name" : "addressSliceDouble", "constant" : false, "outputs": [ { "name": "a", "type": "address[]" }, { "name": "b", "type": "address[]" } ] },
|
{ "name" : "addressSliceDouble", "type": "function", "outputs": [ { "name": "a", "type": "address[]" }, { "name": "b", "type": "address[]" } ] },
|
||||||
{ "name" : "mixedBytes", "constant" : true, "outputs": [ { "name": "a", "type": "bytes" }, { "name": "b", "type": "bytes32" } ] }]`
|
{ "name" : "mixedBytes", "type": "function", "stateMutability" : "view", "outputs": [ { "name": "a", "type": "bytes" }, { "name": "b", "type": "bytes32" } ] }]`
|
||||||
|
|
||||||
abi, err := JSON(strings.NewReader(definition))
|
abi, err := JSON(strings.NewReader(definition))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -763,7 +529,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
buff.Write(common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000a"))
|
buff.Write(common.Hex2Bytes("000000000000000000000000000000000000000000000000000000000000000a"))
|
||||||
buff.Write(common.Hex2Bytes("0102000000000000000000000000000000000000000000000000000000000000"))
|
buff.Write(common.Hex2Bytes("0102000000000000000000000000000000000000000000000000000000000000"))
|
||||||
|
|
||||||
err = abi.Unpack(&mixedBytes, "mixedBytes", buff.Bytes())
|
err = abi.UnpackIntoInterface(&mixedBytes, "mixedBytes", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
} else {
|
} else {
|
||||||
@ -778,7 +544,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
|
|
||||||
// marshal int
|
// marshal int
|
||||||
var Int *big.Int
|
var Int *big.Int
|
||||||
err = abi.Unpack(&Int, "int", common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000001"))
|
err = abi.UnpackIntoInterface(&Int, "int", common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000001"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -789,7 +555,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
|
|
||||||
// marshal bool
|
// marshal bool
|
||||||
var Bool bool
|
var Bool bool
|
||||||
err = abi.Unpack(&Bool, "bool", common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000001"))
|
err = abi.UnpackIntoInterface(&Bool, "bool", common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000001"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -806,7 +572,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
buff.Write(bytesOut)
|
buff.Write(bytesOut)
|
||||||
|
|
||||||
var Bytes []byte
|
var Bytes []byte
|
||||||
err = abi.Unpack(&Bytes, "bytes", buff.Bytes())
|
err = abi.UnpackIntoInterface(&Bytes, "bytes", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -822,7 +588,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
bytesOut = common.RightPadBytes([]byte("hello"), 64)
|
bytesOut = common.RightPadBytes([]byte("hello"), 64)
|
||||||
buff.Write(bytesOut)
|
buff.Write(bytesOut)
|
||||||
|
|
||||||
err = abi.Unpack(&Bytes, "bytes", buff.Bytes())
|
err = abi.UnpackIntoInterface(&Bytes, "bytes", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -838,7 +604,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
bytesOut = common.RightPadBytes([]byte("hello"), 64)
|
bytesOut = common.RightPadBytes([]byte("hello"), 64)
|
||||||
buff.Write(bytesOut)
|
buff.Write(bytesOut)
|
||||||
|
|
||||||
err = abi.Unpack(&Bytes, "bytes", buff.Bytes())
|
err = abi.UnpackIntoInterface(&Bytes, "bytes", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -848,7 +614,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// marshal dynamic bytes output empty
|
// marshal dynamic bytes output empty
|
||||||
err = abi.Unpack(&Bytes, "bytes", nil)
|
err = abi.UnpackIntoInterface(&Bytes, "bytes", nil)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Error("expected error")
|
t.Error("expected error")
|
||||||
}
|
}
|
||||||
@ -859,7 +625,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000005"))
|
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000005"))
|
||||||
buff.Write(common.RightPadBytes([]byte("hello"), 32))
|
buff.Write(common.RightPadBytes([]byte("hello"), 32))
|
||||||
|
|
||||||
err = abi.Unpack(&Bytes, "bytes", buff.Bytes())
|
err = abi.UnpackIntoInterface(&Bytes, "bytes", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -873,7 +639,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
buff.Write(common.RightPadBytes([]byte("hello"), 32))
|
buff.Write(common.RightPadBytes([]byte("hello"), 32))
|
||||||
|
|
||||||
var hash common.Hash
|
var hash common.Hash
|
||||||
err = abi.Unpack(&hash, "fixed", buff.Bytes())
|
err = abi.UnpackIntoInterface(&hash, "fixed", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -886,12 +652,12 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
// marshal error
|
// marshal error
|
||||||
buff.Reset()
|
buff.Reset()
|
||||||
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000020"))
|
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000020"))
|
||||||
err = abi.Unpack(&Bytes, "bytes", buff.Bytes())
|
err = abi.UnpackIntoInterface(&Bytes, "bytes", buff.Bytes())
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Error("expected error")
|
t.Error("expected error")
|
||||||
}
|
}
|
||||||
|
|
||||||
err = abi.Unpack(&Bytes, "multi", make([]byte, 64))
|
err = abi.UnpackIntoInterface(&Bytes, "multi", make([]byte, 64))
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Error("expected error")
|
t.Error("expected error")
|
||||||
}
|
}
|
||||||
@ -902,7 +668,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000003"))
|
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000003"))
|
||||||
// marshal int array
|
// marshal int array
|
||||||
var intArray [3]*big.Int
|
var intArray [3]*big.Int
|
||||||
err = abi.Unpack(&intArray, "intArraySingle", buff.Bytes())
|
err = abi.UnpackIntoInterface(&intArray, "intArraySingle", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -923,7 +689,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
buff.Write(common.Hex2Bytes("0000000000000000000000000100000000000000000000000000000000000000"))
|
buff.Write(common.Hex2Bytes("0000000000000000000000000100000000000000000000000000000000000000"))
|
||||||
|
|
||||||
var outAddr []common.Address
|
var outAddr []common.Address
|
||||||
err = abi.Unpack(&outAddr, "addressSliceSingle", buff.Bytes())
|
err = abi.UnpackIntoInterface(&outAddr, "addressSliceSingle", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal("didn't expect error:", err)
|
t.Fatal("didn't expect error:", err)
|
||||||
}
|
}
|
||||||
@ -950,7 +716,7 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
A []common.Address
|
A []common.Address
|
||||||
B []common.Address
|
B []common.Address
|
||||||
}
|
}
|
||||||
err = abi.Unpack(&outAddrStruct, "addressSliceDouble", buff.Bytes())
|
err = abi.UnpackIntoInterface(&outAddrStruct, "addressSliceDouble", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal("didn't expect error:", err)
|
t.Fatal("didn't expect error:", err)
|
||||||
}
|
}
|
||||||
@ -978,14 +744,14 @@ func TestUnmarshal(t *testing.T) {
|
|||||||
buff.Reset()
|
buff.Reset()
|
||||||
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000100"))
|
buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000100"))
|
||||||
|
|
||||||
err = abi.Unpack(&outAddr, "addressSliceSingle", buff.Bytes())
|
err = abi.UnpackIntoInterface(&outAddr, "addressSliceSingle", buff.Bytes())
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Fatal("expected error:", err)
|
t.Fatal("expected error:", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnpackTuple(t *testing.T) {
|
func TestUnpackTuple(t *testing.T) {
|
||||||
const simpleTuple = `[{"name":"tuple","constant":false,"outputs":[{"type":"tuple","name":"ret","components":[{"type":"int256","name":"a"},{"type":"int256","name":"b"}]}]}]`
|
const simpleTuple = `[{"name":"tuple","type":"function","outputs":[{"type":"tuple","name":"ret","components":[{"type":"int256","name":"a"},{"type":"int256","name":"b"}]}]}]`
|
||||||
abi, err := JSON(strings.NewReader(simpleTuple))
|
abi, err := JSON(strings.NewReader(simpleTuple))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -996,25 +762,29 @@ func TestUnpackTuple(t *testing.T) {
|
|||||||
buff.Write(common.Hex2Bytes("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff")) // ret[b] = -1
|
buff.Write(common.Hex2Bytes("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff")) // ret[b] = -1
|
||||||
|
|
||||||
// If the result is single tuple, use struct as return value container directly.
|
// If the result is single tuple, use struct as return value container directly.
|
||||||
v := struct {
|
type v struct {
|
||||||
A *big.Int
|
A *big.Int
|
||||||
B *big.Int
|
B *big.Int
|
||||||
}{new(big.Int), new(big.Int)}
|
}
|
||||||
|
type r struct {
|
||||||
|
Result v
|
||||||
|
}
|
||||||
|
var ret0 = new(r)
|
||||||
|
err = abi.UnpackIntoInterface(ret0, "tuple", buff.Bytes())
|
||||||
|
|
||||||
err = abi.Unpack(&v, "tuple", buff.Bytes())
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
} else {
|
} else {
|
||||||
if v.A.Cmp(big.NewInt(1)) != 0 {
|
if ret0.Result.A.Cmp(big.NewInt(1)) != 0 {
|
||||||
t.Errorf("unexpected value unpacked: want %x, got %x", 1, v.A)
|
t.Errorf("unexpected value unpacked: want %x, got %x", 1, ret0.Result.A)
|
||||||
}
|
}
|
||||||
if v.B.Cmp(big.NewInt(-1)) != 0 {
|
if ret0.Result.B.Cmp(big.NewInt(-1)) != 0 {
|
||||||
t.Errorf("unexpected value unpacked: want %x, got %x", v.B, -1)
|
t.Errorf("unexpected value unpacked: want %x, got %x", -1, ret0.Result.B)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test nested tuple
|
// Test nested tuple
|
||||||
const nestedTuple = `[{"name":"tuple","constant":false,"outputs":[
|
const nestedTuple = `[{"name":"tuple","type":"function","outputs":[
|
||||||
{"type":"tuple","name":"s","components":[{"type":"uint256","name":"a"},{"type":"uint256[]","name":"b"},{"type":"tuple[]","name":"c","components":[{"name":"x", "type":"uint256"},{"name":"y","type":"uint256"}]}]},
|
{"type":"tuple","name":"s","components":[{"type":"uint256","name":"a"},{"type":"uint256[]","name":"b"},{"type":"tuple[]","name":"c","components":[{"name":"x", "type":"uint256"},{"name":"y","type":"uint256"}]}]},
|
||||||
{"type":"tuple","name":"t","components":[{"name":"x", "type":"uint256"},{"name":"y","type":"uint256"}]},
|
{"type":"tuple","name":"t","components":[{"name":"x", "type":"uint256"},{"name":"y","type":"uint256"}]},
|
||||||
{"type":"uint256","name":"a"}
|
{"type":"uint256","name":"a"}
|
||||||
@ -1073,7 +843,7 @@ func TestUnpackTuple(t *testing.T) {
|
|||||||
A: big.NewInt(1),
|
A: big.NewInt(1),
|
||||||
}
|
}
|
||||||
|
|
||||||
err = abi.Unpack(&ret, "tuple", buff.Bytes())
|
err = abi.UnpackIntoInterface(&ret, "tuple", buff.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
@ -1136,7 +906,7 @@ func TestOOMMaliciousInput(t *testing.T) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
for i, test := range oomTests {
|
for i, test := range oomTests {
|
||||||
def := fmt.Sprintf(`[{ "name" : "method", "outputs": %s}]`, test.def)
|
def := fmt.Sprintf(`[{ "name" : "method", "type": "function", "outputs": %s}]`, test.def)
|
||||||
abi, err := JSON(strings.NewReader(def))
|
abi, err := JSON(strings.NewReader(def))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("invalid ABI definition %s: %v", def, err)
|
t.Fatalf("invalid ABI definition %s: %v", def, err)
|
||||||
|
@ -21,7 +21,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"math/big"
|
"math/big"
|
||||||
|
|
||||||
ethereum "github.com/ethereum/go-ethereum"
|
"github.com/ethereum/go-ethereum"
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/core/types"
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
"github.com/ethereum/go-ethereum/event"
|
"github.com/ethereum/go-ethereum/event"
|
||||||
@ -88,7 +88,7 @@ type Wallet interface {
|
|||||||
// to discover non zero accounts and automatically add them to list of tracked
|
// to discover non zero accounts and automatically add them to list of tracked
|
||||||
// accounts.
|
// accounts.
|
||||||
//
|
//
|
||||||
// Note, self derivaton will increment the last component of the specified path
|
// Note, self derivation will increment the last component of the specified path
|
||||||
// opposed to decending into a child path to allow discovering accounts starting
|
// opposed to decending into a child path to allow discovering accounts starting
|
||||||
// from non zero components.
|
// from non zero components.
|
||||||
//
|
//
|
||||||
@ -113,7 +113,7 @@ type Wallet interface {
|
|||||||
SignData(account Account, mimeType string, data []byte) ([]byte, error)
|
SignData(account Account, mimeType string, data []byte) ([]byte, error)
|
||||||
|
|
||||||
// SignDataWithPassphrase is identical to SignData, but also takes a password
|
// SignDataWithPassphrase is identical to SignData, but also takes a password
|
||||||
// NOTE: there's an chance that an erroneous call might mistake the two strings, and
|
// NOTE: there's a chance that an erroneous call might mistake the two strings, and
|
||||||
// supply password in the mimetype field, or vice versa. Thus, an implementation
|
// supply password in the mimetype field, or vice versa. Thus, an implementation
|
||||||
// should never echo the mimetype or return the mimetype in the error-response
|
// should never echo the mimetype or return the mimetype in the error-response
|
||||||
SignDataWithPassphrase(account Account, passphrase, mimeType string, data []byte) ([]byte, error)
|
SignDataWithPassphrase(account Account, passphrase, mimeType string, data []byte) ([]byte, error)
|
||||||
@ -127,8 +127,10 @@ type Wallet interface {
|
|||||||
// a password to decrypt the account, or a PIN code o verify the transaction),
|
// a password to decrypt the account, or a PIN code o verify the transaction),
|
||||||
// an AuthNeededError instance will be returned, containing infos for the user
|
// an AuthNeededError instance will be returned, containing infos for the user
|
||||||
// about which fields or actions are needed. The user may retry by providing
|
// about which fields or actions are needed. The user may retry by providing
|
||||||
// the needed details via SignHashWithPassphrase, or by other means (e.g. unlock
|
// the needed details via SignTextWithPassphrase, or by other means (e.g. unlock
|
||||||
// the account in a keystore).
|
// the account in a keystore).
|
||||||
|
//
|
||||||
|
// This method should return the signature in 'canonical' format, with v 0 or 1
|
||||||
SignText(account Account, text []byte) ([]byte, error)
|
SignText(account Account, text []byte) ([]byte, error)
|
||||||
|
|
||||||
// SignTextWithPassphrase is identical to Signtext, but also takes a password
|
// SignTextWithPassphrase is identical to Signtext, but also takes a password
|
||||||
|
72
accounts/external/backend.go
vendored
72
accounts/external/backend.go
vendored
@ -27,10 +27,9 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||||
"github.com/ethereum/go-ethereum/core/types"
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
"github.com/ethereum/go-ethereum/event"
|
"github.com/ethereum/go-ethereum/event"
|
||||||
"github.com/ethereum/go-ethereum/internal/ethapi"
|
|
||||||
"github.com/ethereum/go-ethereum/log"
|
"github.com/ethereum/go-ethereum/log"
|
||||||
"github.com/ethereum/go-ethereum/rpc"
|
"github.com/ethereum/go-ethereum/rpc"
|
||||||
"github.com/ethereum/go-ethereum/signer/core"
|
"github.com/ethereum/go-ethereum/signer/core/apitypes"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ExternalBackend struct {
|
type ExternalBackend struct {
|
||||||
@ -131,6 +130,12 @@ func (api *ExternalSigner) Accounts() []accounts.Account {
|
|||||||
func (api *ExternalSigner) Contains(account accounts.Account) bool {
|
func (api *ExternalSigner) Contains(account accounts.Account) bool {
|
||||||
api.cacheMu.RLock()
|
api.cacheMu.RLock()
|
||||||
defer api.cacheMu.RUnlock()
|
defer api.cacheMu.RUnlock()
|
||||||
|
if api.cache == nil {
|
||||||
|
// If we haven't already fetched the accounts, it's time to do so now
|
||||||
|
api.cacheMu.RUnlock()
|
||||||
|
api.Accounts()
|
||||||
|
api.cacheMu.RLock()
|
||||||
|
}
|
||||||
for _, a := range api.cache {
|
for _, a := range api.cache {
|
||||||
if a.Address == account.Address && (account.URL == (accounts.URL{}) || account.URL == api.URL()) {
|
if a.Address == account.Address && (account.URL == (accounts.URL{}) || account.URL == api.URL()) {
|
||||||
return true
|
return true
|
||||||
@ -161,7 +166,7 @@ func (api *ExternalSigner) SignData(account accounts.Account, mimeType string, d
|
|||||||
hexutil.Encode(data)); err != nil {
|
hexutil.Encode(data)); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// If V is on 27/28-form, convert to to 0/1 for Clique
|
// If V is on 27/28-form, convert to 0/1 for Clique
|
||||||
if mimeType == accounts.MimetypeClique && (res[64] == 27 || res[64] == 28) {
|
if mimeType == accounts.MimetypeClique && (res[64] == 27 || res[64] == 28) {
|
||||||
res[64] -= 27 // Transform V from 27/28 to 0/1 for Clique use
|
res[64] -= 27 // Transform V from 27/28 to 0/1 for Clique use
|
||||||
}
|
}
|
||||||
@ -169,34 +174,71 @@ func (api *ExternalSigner) SignData(account accounts.Account, mimeType string, d
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (api *ExternalSigner) SignText(account accounts.Account, text []byte) ([]byte, error) {
|
func (api *ExternalSigner) SignText(account accounts.Account, text []byte) ([]byte, error) {
|
||||||
var res hexutil.Bytes
|
var signature hexutil.Bytes
|
||||||
var signAddress = common.NewMixedcaseAddress(account.Address)
|
var signAddress = common.NewMixedcaseAddress(account.Address)
|
||||||
if err := api.client.Call(&res, "account_signData",
|
if err := api.client.Call(&signature, "account_signData",
|
||||||
accounts.MimetypeTextPlain,
|
accounts.MimetypeTextPlain,
|
||||||
&signAddress, // Need to use the pointer here, because of how MarshalJSON is defined
|
&signAddress, // Need to use the pointer here, because of how MarshalJSON is defined
|
||||||
hexutil.Encode(text)); err != nil {
|
hexutil.Encode(text)); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return res, nil
|
if signature[64] == 27 || signature[64] == 28 {
|
||||||
|
// If clef is used as a backend, it may already have transformed
|
||||||
|
// the signature to ethereum-type signature.
|
||||||
|
signature[64] -= 27 // Transform V from Ethereum-legacy to 0/1
|
||||||
|
}
|
||||||
|
return signature, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// signTransactionResult represents the signinig result returned by clef.
|
||||||
|
type signTransactionResult struct {
|
||||||
|
Raw hexutil.Bytes `json:"raw"`
|
||||||
|
Tx *types.Transaction `json:"tx"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SignTx sends the transaction to the external signer.
|
||||||
|
// If chainID is nil, or tx.ChainID is zero, the chain ID will be assigned
|
||||||
|
// by the external signer. For non-legacy transactions, the chain ID of the
|
||||||
|
// transaction overrides the chainID parameter.
|
||||||
func (api *ExternalSigner) SignTx(account accounts.Account, tx *types.Transaction, chainID *big.Int) (*types.Transaction, error) {
|
func (api *ExternalSigner) SignTx(account accounts.Account, tx *types.Transaction, chainID *big.Int) (*types.Transaction, error) {
|
||||||
res := ethapi.SignTransactionResult{}
|
|
||||||
data := hexutil.Bytes(tx.Data())
|
data := hexutil.Bytes(tx.Data())
|
||||||
var to *common.MixedcaseAddress
|
var to *common.MixedcaseAddress
|
||||||
if tx.To() != nil {
|
if tx.To() != nil {
|
||||||
t := common.NewMixedcaseAddress(*tx.To())
|
t := common.NewMixedcaseAddress(*tx.To())
|
||||||
to = &t
|
to = &t
|
||||||
}
|
}
|
||||||
args := &core.SendTxArgs{
|
args := &apitypes.SendTxArgs{
|
||||||
Data: &data,
|
Data: &data,
|
||||||
Nonce: hexutil.Uint64(tx.Nonce()),
|
Nonce: hexutil.Uint64(tx.Nonce()),
|
||||||
Value: hexutil.Big(*tx.Value()),
|
Value: hexutil.Big(*tx.Value()),
|
||||||
Gas: hexutil.Uint64(tx.Gas()),
|
Gas: hexutil.Uint64(tx.Gas()),
|
||||||
GasPrice: hexutil.Big(*tx.GasPrice()),
|
To: to,
|
||||||
To: to,
|
From: common.NewMixedcaseAddress(account.Address),
|
||||||
From: common.NewMixedcaseAddress(account.Address),
|
|
||||||
}
|
}
|
||||||
|
switch tx.Type() {
|
||||||
|
case types.LegacyTxType, types.AccessListTxType:
|
||||||
|
args.GasPrice = (*hexutil.Big)(tx.GasPrice())
|
||||||
|
case types.DynamicFeeTxType:
|
||||||
|
args.MaxFeePerGas = (*hexutil.Big)(tx.GasFeeCap())
|
||||||
|
args.MaxPriorityFeePerGas = (*hexutil.Big)(tx.GasTipCap())
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported tx type %d", tx.Type())
|
||||||
|
}
|
||||||
|
// We should request the default chain id that we're operating with
|
||||||
|
// (the chain we're executing on)
|
||||||
|
if chainID != nil && chainID.Sign() != 0 {
|
||||||
|
args.ChainID = (*hexutil.Big)(chainID)
|
||||||
|
}
|
||||||
|
if tx.Type() != types.LegacyTxType {
|
||||||
|
// However, if the user asked for a particular chain id, then we should
|
||||||
|
// use that instead.
|
||||||
|
if tx.ChainId().Sign() != 0 {
|
||||||
|
args.ChainID = (*hexutil.Big)(tx.ChainId())
|
||||||
|
}
|
||||||
|
accessList := tx.AccessList()
|
||||||
|
args.AccessList = &accessList
|
||||||
|
}
|
||||||
|
var res signTransactionResult
|
||||||
if err := api.client.Call(&res, "account_signTransaction", args); err != nil {
|
if err := api.client.Call(&res, "account_signTransaction", args); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -150,3 +150,31 @@ func (path *DerivationPath) UnmarshalJSON(b []byte) error {
|
|||||||
*path, err = ParseDerivationPath(dp)
|
*path, err = ParseDerivationPath(dp)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DefaultIterator creates a BIP-32 path iterator, which progresses by increasing the last component:
|
||||||
|
// i.e. m/44'/60'/0'/0/0, m/44'/60'/0'/0/1, m/44'/60'/0'/0/2, ... m/44'/60'/0'/0/N.
|
||||||
|
func DefaultIterator(base DerivationPath) func() DerivationPath {
|
||||||
|
path := make(DerivationPath, len(base))
|
||||||
|
copy(path[:], base[:])
|
||||||
|
// Set it back by one, so the first call gives the first result
|
||||||
|
path[len(path)-1]--
|
||||||
|
return func() DerivationPath {
|
||||||
|
path[len(path)-1]++
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LedgerLiveIterator creates a bip44 path iterator for Ledger Live.
|
||||||
|
// Ledger Live increments the third component rather than the fifth component
|
||||||
|
// i.e. m/44'/60'/0'/0/0, m/44'/60'/1'/0/0, m/44'/60'/2'/0/0, ... m/44'/60'/N'/0/0.
|
||||||
|
func LedgerLiveIterator(base DerivationPath) func() DerivationPath {
|
||||||
|
path := make(DerivationPath, len(base))
|
||||||
|
copy(path[:], base[:])
|
||||||
|
// Set it back by one, so the first call gives the first result
|
||||||
|
path[2]--
|
||||||
|
return func() DerivationPath {
|
||||||
|
// ledgerLivePathIterator iterates on the third component
|
||||||
|
path[2]++
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
package accounts
|
package accounts
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
@ -61,7 +62,7 @@ func TestHDPathParsing(t *testing.T) {
|
|||||||
// Weird inputs just to ensure they work
|
// Weird inputs just to ensure they work
|
||||||
{" m / 44 '\n/\n 60 \n\n\t' /\n0 ' /\t\t 0", DerivationPath{0x80000000 + 44, 0x80000000 + 60, 0x80000000 + 0, 0}},
|
{" m / 44 '\n/\n 60 \n\n\t' /\n0 ' /\t\t 0", DerivationPath{0x80000000 + 44, 0x80000000 + 60, 0x80000000 + 0, 0}},
|
||||||
|
|
||||||
// Invaid derivation paths
|
// Invalid derivation paths
|
||||||
{"", nil}, // Empty relative derivation path
|
{"", nil}, // Empty relative derivation path
|
||||||
{"m", nil}, // Empty absolute derivation path
|
{"m", nil}, // Empty absolute derivation path
|
||||||
{"m/", nil}, // Missing last derivation component
|
{"m/", nil}, // Missing last derivation component
|
||||||
@ -77,3 +78,41 @@ func TestHDPathParsing(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func testDerive(t *testing.T, next func() DerivationPath, expected []string) {
|
||||||
|
t.Helper()
|
||||||
|
for i, want := range expected {
|
||||||
|
if have := next(); fmt.Sprintf("%v", have) != want {
|
||||||
|
t.Errorf("step %d, have %v, want %v", i, have, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHdPathIteration(t *testing.T) {
|
||||||
|
testDerive(t, DefaultIterator(DefaultBaseDerivationPath),
|
||||||
|
[]string{
|
||||||
|
"m/44'/60'/0'/0/0", "m/44'/60'/0'/0/1",
|
||||||
|
"m/44'/60'/0'/0/2", "m/44'/60'/0'/0/3",
|
||||||
|
"m/44'/60'/0'/0/4", "m/44'/60'/0'/0/5",
|
||||||
|
"m/44'/60'/0'/0/6", "m/44'/60'/0'/0/7",
|
||||||
|
"m/44'/60'/0'/0/8", "m/44'/60'/0'/0/9",
|
||||||
|
})
|
||||||
|
|
||||||
|
testDerive(t, DefaultIterator(LegacyLedgerBaseDerivationPath),
|
||||||
|
[]string{
|
||||||
|
"m/44'/60'/0'/0", "m/44'/60'/0'/1",
|
||||||
|
"m/44'/60'/0'/2", "m/44'/60'/0'/3",
|
||||||
|
"m/44'/60'/0'/4", "m/44'/60'/0'/5",
|
||||||
|
"m/44'/60'/0'/6", "m/44'/60'/0'/7",
|
||||||
|
"m/44'/60'/0'/8", "m/44'/60'/0'/9",
|
||||||
|
})
|
||||||
|
|
||||||
|
testDerive(t, LedgerLiveIterator(DefaultBaseDerivationPath),
|
||||||
|
[]string{
|
||||||
|
"m/44'/60'/0'/0/0", "m/44'/60'/1'/0/0",
|
||||||
|
"m/44'/60'/2'/0/0", "m/44'/60'/3'/0/0",
|
||||||
|
"m/44'/60'/4'/0/0", "m/44'/60'/5'/0/0",
|
||||||
|
"m/44'/60'/6'/0/0", "m/44'/60'/7'/0/0",
|
||||||
|
"m/44'/60'/8'/0/0", "m/44'/60'/9'/0/0",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
@ -262,7 +262,7 @@ func (ac *accountCache) scanAccounts() error {
|
|||||||
switch {
|
switch {
|
||||||
case err != nil:
|
case err != nil:
|
||||||
log.Debug("Failed to decode keystore key", "path", path, "err", err)
|
log.Debug("Failed to decode keystore key", "path", path, "err", err)
|
||||||
case (addr == common.Address{}):
|
case addr == common.Address{}:
|
||||||
log.Debug("Failed to decode keystore key", "path", path, "err", "missing or zero address")
|
log.Debug("Failed to decode keystore key", "path", path, "err", "missing or zero address")
|
||||||
default:
|
default:
|
||||||
return &accounts.Account{
|
return &accounts.Account{
|
||||||
|
@ -96,7 +96,7 @@ func TestWatchNoDir(t *testing.T) {
|
|||||||
|
|
||||||
// Create ks but not the directory that it watches.
|
// Create ks but not the directory that it watches.
|
||||||
rand.Seed(time.Now().UnixNano())
|
rand.Seed(time.Now().UnixNano())
|
||||||
dir := filepath.Join(os.TempDir(), fmt.Sprintf("eth-keystore-watch-test-%d-%d", os.Getpid(), rand.Int()))
|
dir := filepath.Join(os.TempDir(), fmt.Sprintf("eth-keystore-watchnodir-test-%d-%d", os.Getpid(), rand.Int()))
|
||||||
ks := NewKeyStore(dir, LightScryptN, LightScryptP)
|
ks := NewKeyStore(dir, LightScryptN, LightScryptP)
|
||||||
|
|
||||||
list := ks.Accounts()
|
list := ks.Accounts()
|
||||||
@ -322,7 +322,7 @@ func TestUpdatedKeyfileContents(t *testing.T) {
|
|||||||
|
|
||||||
// Create a temporary kesytore to test with
|
// Create a temporary kesytore to test with
|
||||||
rand.Seed(time.Now().UnixNano())
|
rand.Seed(time.Now().UnixNano())
|
||||||
dir := filepath.Join(os.TempDir(), fmt.Sprintf("eth-keystore-watch-test-%d-%d", os.Getpid(), rand.Int()))
|
dir := filepath.Join(os.TempDir(), fmt.Sprintf("eth-keystore-updatedkeyfilecontents-test-%d-%d", os.Getpid(), rand.Int()))
|
||||||
ks := NewKeyStore(dir, LightScryptN, LightScryptP)
|
ks := NewKeyStore(dir, LightScryptN, LightScryptP)
|
||||||
|
|
||||||
list := ks.Accounts()
|
list := ks.Accounts()
|
||||||
|
@ -32,7 +32,7 @@ import (
|
|||||||
type fileCache struct {
|
type fileCache struct {
|
||||||
all mapset.Set // Set of all files from the keystore folder
|
all mapset.Set // Set of all files from the keystore folder
|
||||||
lastMod time.Time // Last time instance when a file was modified
|
lastMod time.Time // Last time instance when a file was modified
|
||||||
mu sync.RWMutex
|
mu sync.Mutex
|
||||||
}
|
}
|
||||||
|
|
||||||
// scan performs a new scan on the given directory, compares against the already
|
// scan performs a new scan on the given directory, compares against the already
|
||||||
|
@ -32,7 +32,7 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/accounts"
|
"github.com/ethereum/go-ethereum/accounts"
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
"github.com/pborman/uuid"
|
"github.com/google/uuid"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -110,7 +110,10 @@ func (k *Key) UnmarshalJSON(j []byte) (err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
u := new(uuid.UUID)
|
u := new(uuid.UUID)
|
||||||
*u = uuid.Parse(keyJSON.Id)
|
*u, err = uuid.Parse(keyJSON.Id)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
k.Id = *u
|
k.Id = *u
|
||||||
addr, err := hex.DecodeString(keyJSON.Address)
|
addr, err := hex.DecodeString(keyJSON.Address)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -128,7 +131,10 @@ func (k *Key) UnmarshalJSON(j []byte) (err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func newKeyFromECDSA(privateKeyECDSA *ecdsa.PrivateKey) *Key {
|
func newKeyFromECDSA(privateKeyECDSA *ecdsa.PrivateKey) *Key {
|
||||||
id := uuid.NewRandom()
|
id, err := uuid.NewRandom()
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("Could not create random uuid: %v", err))
|
||||||
|
}
|
||||||
key := &Key{
|
key := &Key{
|
||||||
Id: id,
|
Id: id,
|
||||||
Address: crypto.PubkeyToAddress(privateKeyECDSA.PublicKey),
|
Address: crypto.PubkeyToAddress(privateKeyECDSA.PublicKey),
|
||||||
|
@ -24,7 +24,6 @@ import (
|
|||||||
"crypto/ecdsa"
|
"crypto/ecdsa"
|
||||||
crand "crypto/rand"
|
crand "crypto/rand"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"math/big"
|
"math/big"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@ -44,6 +43,10 @@ var (
|
|||||||
ErrLocked = accounts.NewAuthNeededError("password or unlock")
|
ErrLocked = accounts.NewAuthNeededError("password or unlock")
|
||||||
ErrNoMatch = errors.New("no key for given address or file")
|
ErrNoMatch = errors.New("no key for given address or file")
|
||||||
ErrDecrypt = errors.New("could not decrypt key with given password")
|
ErrDecrypt = errors.New("could not decrypt key with given password")
|
||||||
|
|
||||||
|
// ErrAccountAlreadyExists is returned if an account attempted to import is
|
||||||
|
// already present in the keystore.
|
||||||
|
ErrAccountAlreadyExists = errors.New("account already exists")
|
||||||
)
|
)
|
||||||
|
|
||||||
// KeyStoreType is the reflect type of a keystore backend.
|
// KeyStoreType is the reflect type of a keystore backend.
|
||||||
@ -67,7 +70,8 @@ type KeyStore struct {
|
|||||||
updateScope event.SubscriptionScope // Subscription scope tracking current live listeners
|
updateScope event.SubscriptionScope // Subscription scope tracking current live listeners
|
||||||
updating bool // Whether the event notification loop is running
|
updating bool // Whether the event notification loop is running
|
||||||
|
|
||||||
mu sync.RWMutex
|
mu sync.RWMutex
|
||||||
|
importMu sync.Mutex // Import Mutex locks the import to prevent two insertions from racing
|
||||||
}
|
}
|
||||||
|
|
||||||
type unlocked struct {
|
type unlocked struct {
|
||||||
@ -279,11 +283,9 @@ func (ks *KeyStore) SignTx(a accounts.Account, tx *types.Transaction, chainID *b
|
|||||||
if !found {
|
if !found {
|
||||||
return nil, ErrLocked
|
return nil, ErrLocked
|
||||||
}
|
}
|
||||||
// Depending on the presence of the chain ID, sign with EIP155 or homestead
|
// Depending on the presence of the chain ID, sign with 2718 or homestead
|
||||||
if chainID != nil {
|
signer := types.LatestSignerForChainID(chainID)
|
||||||
return types.SignTx(tx, types.NewEIP155Signer(chainID), unlockedKey.PrivateKey)
|
return types.SignTx(tx, signer, unlockedKey.PrivateKey)
|
||||||
}
|
|
||||||
return types.SignTx(tx, types.HomesteadSigner{}, unlockedKey.PrivateKey)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignHashWithPassphrase signs hash if the private key matching the given address
|
// SignHashWithPassphrase signs hash if the private key matching the given address
|
||||||
@ -306,12 +308,9 @@ func (ks *KeyStore) SignTxWithPassphrase(a accounts.Account, passphrase string,
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer zeroKey(key.PrivateKey)
|
defer zeroKey(key.PrivateKey)
|
||||||
|
// Depending on the presence of the chain ID, sign with or without replay protection.
|
||||||
// Depending on the presence of the chain ID, sign with EIP155 or homestead
|
signer := types.LatestSignerForChainID(chainID)
|
||||||
if chainID != nil {
|
return types.SignTx(tx, signer, key.PrivateKey)
|
||||||
return types.SignTx(tx, types.NewEIP155Signer(chainID), key.PrivateKey)
|
|
||||||
}
|
|
||||||
return types.SignTx(tx, types.HomesteadSigner{}, key.PrivateKey)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unlock unlocks the given account indefinitely.
|
// Unlock unlocks the given account indefinitely.
|
||||||
@ -443,14 +442,27 @@ func (ks *KeyStore) Import(keyJSON []byte, passphrase, newPassphrase string) (ac
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return accounts.Account{}, err
|
return accounts.Account{}, err
|
||||||
}
|
}
|
||||||
|
ks.importMu.Lock()
|
||||||
|
defer ks.importMu.Unlock()
|
||||||
|
|
||||||
|
if ks.cache.hasAddress(key.Address) {
|
||||||
|
return accounts.Account{
|
||||||
|
Address: key.Address,
|
||||||
|
}, ErrAccountAlreadyExists
|
||||||
|
}
|
||||||
return ks.importKey(key, newPassphrase)
|
return ks.importKey(key, newPassphrase)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ImportECDSA stores the given key into the key directory, encrypting it with the passphrase.
|
// ImportECDSA stores the given key into the key directory, encrypting it with the passphrase.
|
||||||
func (ks *KeyStore) ImportECDSA(priv *ecdsa.PrivateKey, passphrase string) (accounts.Account, error) {
|
func (ks *KeyStore) ImportECDSA(priv *ecdsa.PrivateKey, passphrase string) (accounts.Account, error) {
|
||||||
|
ks.importMu.Lock()
|
||||||
|
defer ks.importMu.Unlock()
|
||||||
|
|
||||||
key := newKeyFromECDSA(priv)
|
key := newKeyFromECDSA(priv)
|
||||||
if ks.cache.hasAddress(key.Address) {
|
if ks.cache.hasAddress(key.Address) {
|
||||||
return accounts.Account{}, fmt.Errorf("account already exists")
|
return accounts.Account{
|
||||||
|
Address: key.Address,
|
||||||
|
}, ErrAccountAlreadyExists
|
||||||
}
|
}
|
||||||
return ks.importKey(key, passphrase)
|
return ks.importKey(key, passphrase)
|
||||||
}
|
}
|
||||||
|
@ -23,11 +23,14 @@ import (
|
|||||||
"runtime"
|
"runtime"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"sync/atomic"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/accounts"
|
"github.com/ethereum/go-ethereum/accounts"
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
"github.com/ethereum/go-ethereum/event"
|
"github.com/ethereum/go-ethereum/event"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -333,11 +336,95 @@ func TestWalletNotifications(t *testing.T) {
|
|||||||
|
|
||||||
// Shut down the event collector and check events.
|
// Shut down the event collector and check events.
|
||||||
sub.Unsubscribe()
|
sub.Unsubscribe()
|
||||||
<-updates
|
for ev := range updates {
|
||||||
|
events = append(events, walletEvent{ev, ev.Wallet.Accounts()[0]})
|
||||||
|
}
|
||||||
checkAccounts(t, live, ks.Wallets())
|
checkAccounts(t, live, ks.Wallets())
|
||||||
checkEvents(t, wantEvents, events)
|
checkEvents(t, wantEvents, events)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TestImportExport tests the import functionality of a keystore.
|
||||||
|
func TestImportECDSA(t *testing.T) {
|
||||||
|
dir, ks := tmpKeyStore(t, true)
|
||||||
|
defer os.RemoveAll(dir)
|
||||||
|
key, err := crypto.GenerateKey()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to generate key: %v", key)
|
||||||
|
}
|
||||||
|
if _, err = ks.ImportECDSA(key, "old"); err != nil {
|
||||||
|
t.Errorf("importing failed: %v", err)
|
||||||
|
}
|
||||||
|
if _, err = ks.ImportECDSA(key, "old"); err == nil {
|
||||||
|
t.Errorf("importing same key twice succeeded")
|
||||||
|
}
|
||||||
|
if _, err = ks.ImportECDSA(key, "new"); err == nil {
|
||||||
|
t.Errorf("importing same key twice succeeded")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestImportECDSA tests the import and export functionality of a keystore.
|
||||||
|
func TestImportExport(t *testing.T) {
|
||||||
|
dir, ks := tmpKeyStore(t, true)
|
||||||
|
defer os.RemoveAll(dir)
|
||||||
|
acc, err := ks.NewAccount("old")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to create account: %v", acc)
|
||||||
|
}
|
||||||
|
json, err := ks.Export(acc, "old", "new")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to export account: %v", acc)
|
||||||
|
}
|
||||||
|
dir2, ks2 := tmpKeyStore(t, true)
|
||||||
|
defer os.RemoveAll(dir2)
|
||||||
|
if _, err = ks2.Import(json, "old", "old"); err == nil {
|
||||||
|
t.Errorf("importing with invalid password succeeded")
|
||||||
|
}
|
||||||
|
acc2, err := ks2.Import(json, "new", "new")
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("importing failed: %v", err)
|
||||||
|
}
|
||||||
|
if acc.Address != acc2.Address {
|
||||||
|
t.Error("imported account does not match exported account")
|
||||||
|
}
|
||||||
|
if _, err = ks2.Import(json, "new", "new"); err == nil {
|
||||||
|
t.Errorf("importing a key twice succeeded")
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestImportRace tests the keystore on races.
|
||||||
|
// This test should fail under -race if importing races.
|
||||||
|
func TestImportRace(t *testing.T) {
|
||||||
|
dir, ks := tmpKeyStore(t, true)
|
||||||
|
defer os.RemoveAll(dir)
|
||||||
|
acc, err := ks.NewAccount("old")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to create account: %v", acc)
|
||||||
|
}
|
||||||
|
json, err := ks.Export(acc, "old", "new")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to export account: %v", acc)
|
||||||
|
}
|
||||||
|
dir2, ks2 := tmpKeyStore(t, true)
|
||||||
|
defer os.RemoveAll(dir2)
|
||||||
|
var atom uint32
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
wg.Add(2)
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
go func() {
|
||||||
|
defer wg.Done()
|
||||||
|
if _, err := ks2.Import(json, "new", "new"); err != nil {
|
||||||
|
atomic.AddUint32(&atom, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
if atom != 1 {
|
||||||
|
t.Errorf("Import is racy")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// checkAccounts checks that all known live accounts are present in the wallet list.
|
// checkAccounts checks that all known live accounts are present in the wallet list.
|
||||||
func checkAccounts(t *testing.T, live map[common.Address]accounts.Account, wallets []accounts.Wallet) {
|
func checkAccounts(t *testing.T, live map[common.Address]accounts.Account, wallets []accounts.Wallet) {
|
||||||
if len(live) != len(wallets) {
|
if len(live) != len(wallets) {
|
||||||
|
@ -42,7 +42,7 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/common/math"
|
"github.com/ethereum/go-ethereum/common/math"
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
"github.com/pborman/uuid"
|
"github.com/google/uuid"
|
||||||
"golang.org/x/crypto/pbkdf2"
|
"golang.org/x/crypto/pbkdf2"
|
||||||
"golang.org/x/crypto/scrypt"
|
"golang.org/x/crypto/scrypt"
|
||||||
)
|
)
|
||||||
@ -123,6 +123,7 @@ func (ks keyStorePassphrase) StoreKey(filename string, key *Key, auth string) er
|
|||||||
"Please file a ticket at:\n\n" +
|
"Please file a ticket at:\n\n" +
|
||||||
"https://github.com/ethereum/go-ethereum/issues." +
|
"https://github.com/ethereum/go-ethereum/issues." +
|
||||||
"The error was : %s"
|
"The error was : %s"
|
||||||
|
//lint:ignore ST1005 This is a message for the user
|
||||||
return fmt.Errorf(msg, tmpName, err)
|
return fmt.Errorf(msg, tmpName, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -227,9 +228,12 @@ func DecryptKey(keyjson []byte, auth string) (*Key, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
key := crypto.ToECDSAUnsafe(keyBytes)
|
key := crypto.ToECDSAUnsafe(keyBytes)
|
||||||
|
id, err := uuid.FromBytes(keyId)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
return &Key{
|
return &Key{
|
||||||
Id: uuid.UUID(keyId),
|
Id: id,
|
||||||
Address: crypto.PubkeyToAddress(key.PublicKey),
|
Address: crypto.PubkeyToAddress(key.PublicKey),
|
||||||
PrivateKey: key,
|
PrivateKey: key,
|
||||||
}, nil
|
}, nil
|
||||||
@ -237,7 +241,7 @@ func DecryptKey(keyjson []byte, auth string) (*Key, error) {
|
|||||||
|
|
||||||
func DecryptDataV3(cryptoJson CryptoJSON, auth string) ([]byte, error) {
|
func DecryptDataV3(cryptoJson CryptoJSON, auth string) ([]byte, error) {
|
||||||
if cryptoJson.Cipher != "aes-128-ctr" {
|
if cryptoJson.Cipher != "aes-128-ctr" {
|
||||||
return nil, fmt.Errorf("Cipher not supported: %v", cryptoJson.Cipher)
|
return nil, fmt.Errorf("cipher not supported: %v", cryptoJson.Cipher)
|
||||||
}
|
}
|
||||||
mac, err := hex.DecodeString(cryptoJson.MAC)
|
mac, err := hex.DecodeString(cryptoJson.MAC)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -273,9 +277,13 @@ func DecryptDataV3(cryptoJson CryptoJSON, auth string) ([]byte, error) {
|
|||||||
|
|
||||||
func decryptKeyV3(keyProtected *encryptedKeyJSONV3, auth string) (keyBytes []byte, keyId []byte, err error) {
|
func decryptKeyV3(keyProtected *encryptedKeyJSONV3, auth string) (keyBytes []byte, keyId []byte, err error) {
|
||||||
if keyProtected.Version != version {
|
if keyProtected.Version != version {
|
||||||
return nil, nil, fmt.Errorf("Version not supported: %v", keyProtected.Version)
|
return nil, nil, fmt.Errorf("version not supported: %v", keyProtected.Version)
|
||||||
}
|
}
|
||||||
keyId = uuid.Parse(keyProtected.Id)
|
keyUUID, err := uuid.Parse(keyProtected.Id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
keyId = keyUUID[:]
|
||||||
plainText, err := DecryptDataV3(keyProtected.Crypto, auth)
|
plainText, err := DecryptDataV3(keyProtected.Crypto, auth)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
@ -284,7 +292,11 @@ func decryptKeyV3(keyProtected *encryptedKeyJSONV3, auth string) (keyBytes []byt
|
|||||||
}
|
}
|
||||||
|
|
||||||
func decryptKeyV1(keyProtected *encryptedKeyJSONV1, auth string) (keyBytes []byte, keyId []byte, err error) {
|
func decryptKeyV1(keyProtected *encryptedKeyJSONV1, auth string) (keyBytes []byte, keyId []byte, err error) {
|
||||||
keyId = uuid.Parse(keyProtected.Id)
|
keyUUID, err := uuid.Parse(keyProtected.Id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
keyId = keyUUID[:]
|
||||||
mac, err := hex.DecodeString(keyProtected.Crypto.MAC)
|
mac, err := hex.DecodeString(keyProtected.Crypto.MAC)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
@ -335,13 +347,13 @@ func getKDFKey(cryptoJSON CryptoJSON, auth string) ([]byte, error) {
|
|||||||
c := ensureInt(cryptoJSON.KDFParams["c"])
|
c := ensureInt(cryptoJSON.KDFParams["c"])
|
||||||
prf := cryptoJSON.KDFParams["prf"].(string)
|
prf := cryptoJSON.KDFParams["prf"].(string)
|
||||||
if prf != "hmac-sha256" {
|
if prf != "hmac-sha256" {
|
||||||
return nil, fmt.Errorf("Unsupported PBKDF2 PRF: %s", prf)
|
return nil, fmt.Errorf("unsupported PBKDF2 PRF: %s", prf)
|
||||||
}
|
}
|
||||||
key := pbkdf2.Key(authArray, salt, c, dkLen, sha256.New)
|
key := pbkdf2.Key(authArray, salt, c, dkLen, sha256.New)
|
||||||
return key, nil
|
return key, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, fmt.Errorf("Unsupported KDF: %s", cryptoJSON.KDF)
|
return nil, fmt.Errorf("unsupported KDF: %s", cryptoJSON.KDF)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: can we do without this when unmarshalling dynamic JSON?
|
// TODO: can we do without this when unmarshalling dynamic JSON?
|
||||||
|
@ -27,7 +27,7 @@ import (
|
|||||||
|
|
||||||
"github.com/ethereum/go-ethereum/accounts"
|
"github.com/ethereum/go-ethereum/accounts"
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
"github.com/pborman/uuid"
|
"github.com/google/uuid"
|
||||||
"golang.org/x/crypto/pbkdf2"
|
"golang.org/x/crypto/pbkdf2"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -37,7 +37,10 @@ func importPreSaleKey(keyStore keyStore, keyJSON []byte, password string) (accou
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return accounts.Account{}, nil, err
|
return accounts.Account{}, nil, err
|
||||||
}
|
}
|
||||||
key.Id = uuid.NewRandom()
|
key.Id, err = uuid.NewRandom()
|
||||||
|
if err != nil {
|
||||||
|
return accounts.Account{}, nil, err
|
||||||
|
}
|
||||||
a := accounts.Account{
|
a := accounts.Account{
|
||||||
Address: key.Address,
|
Address: key.Address,
|
||||||
URL: accounts.URL{
|
URL: accounts.URL{
|
||||||
@ -86,7 +89,7 @@ func decryptPreSaleKey(fileContent []byte, password string) (key *Key, err error
|
|||||||
ecKey := crypto.ToECDSAUnsafe(ethPriv)
|
ecKey := crypto.ToECDSAUnsafe(ethPriv)
|
||||||
|
|
||||||
key = &Key{
|
key = &Key{
|
||||||
Id: nil,
|
Id: uuid.UUID{},
|
||||||
Address: crypto.PubkeyToAddress(ecKey.PublicKey),
|
Address: crypto.PubkeyToAddress(ecKey.PublicKey),
|
||||||
PrivateKey: ecKey,
|
PrivateKey: ecKey,
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ package keystore
|
|||||||
import (
|
import (
|
||||||
"math/big"
|
"math/big"
|
||||||
|
|
||||||
ethereum "github.com/ethereum/go-ethereum"
|
"github.com/ethereum/go-ethereum"
|
||||||
"github.com/ethereum/go-ethereum/accounts"
|
"github.com/ethereum/go-ethereum/accounts"
|
||||||
"github.com/ethereum/go-ethereum/core/types"
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
@ -58,7 +58,7 @@ func (w *keystoreWallet) Open(passphrase string) error { return nil }
|
|||||||
func (w *keystoreWallet) Close() error { return nil }
|
func (w *keystoreWallet) Close() error { return nil }
|
||||||
|
|
||||||
// Accounts implements accounts.Wallet, returning an account list consisting of
|
// Accounts implements accounts.Wallet, returning an account list consisting of
|
||||||
// a single account that the plain kestore wallet contains.
|
// a single account that the plain keystore wallet contains.
|
||||||
func (w *keystoreWallet) Accounts() []accounts.Account {
|
func (w *keystoreWallet) Accounts() []accounts.Account {
|
||||||
return []accounts.Account{w.account}
|
return []accounts.Account{w.account}
|
||||||
}
|
}
|
||||||
@ -93,12 +93,12 @@ func (w *keystoreWallet) signHash(account accounts.Account, hash []byte) ([]byte
|
|||||||
return w.keystore.SignHash(account, hash)
|
return w.keystore.SignHash(account, hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignData signs keccak256(data). The mimetype parameter describes the type of data being signed
|
// SignData signs keccak256(data). The mimetype parameter describes the type of data being signed.
|
||||||
func (w *keystoreWallet) SignData(account accounts.Account, mimeType string, data []byte) ([]byte, error) {
|
func (w *keystoreWallet) SignData(account accounts.Account, mimeType string, data []byte) ([]byte, error) {
|
||||||
return w.signHash(account, crypto.Keccak256(data))
|
return w.signHash(account, crypto.Keccak256(data))
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignDataWithPassphrase signs keccak256(data). The mimetype parameter describes the type of data being signed
|
// SignDataWithPassphrase signs keccak256(data). The mimetype parameter describes the type of data being signed.
|
||||||
func (w *keystoreWallet) SignDataWithPassphrase(account accounts.Account, passphrase, mimeType string, data []byte) ([]byte, error) {
|
func (w *keystoreWallet) SignDataWithPassphrase(account accounts.Account, passphrase, mimeType string, data []byte) ([]byte, error) {
|
||||||
// Make sure the requested account is contained within
|
// Make sure the requested account is contained within
|
||||||
if !w.Contains(account) {
|
if !w.Contains(account) {
|
||||||
@ -108,12 +108,14 @@ func (w *keystoreWallet) SignDataWithPassphrase(account accounts.Account, passph
|
|||||||
return w.keystore.SignHashWithPassphrase(account, passphrase, crypto.Keccak256(data))
|
return w.keystore.SignHashWithPassphrase(account, passphrase, crypto.Keccak256(data))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SignText implements accounts.Wallet, attempting to sign the hash of
|
||||||
|
// the given text with the given account.
|
||||||
func (w *keystoreWallet) SignText(account accounts.Account, text []byte) ([]byte, error) {
|
func (w *keystoreWallet) SignText(account accounts.Account, text []byte) ([]byte, error) {
|
||||||
return w.signHash(account, accounts.TextHash(text))
|
return w.signHash(account, accounts.TextHash(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignTextWithPassphrase implements accounts.Wallet, attempting to sign the
|
// SignTextWithPassphrase implements accounts.Wallet, attempting to sign the
|
||||||
// given hash with the given account using passphrase as extra authentication.
|
// hash of the given text with the given account using passphrase as extra authentication.
|
||||||
func (w *keystoreWallet) SignTextWithPassphrase(account accounts.Account, passphrase string, text []byte) ([]byte, error) {
|
func (w *keystoreWallet) SignTextWithPassphrase(account accounts.Account, passphrase string, text []byte) ([]byte, error) {
|
||||||
// Make sure the requested account is contained within
|
// Make sure the requested account is contained within
|
||||||
if !w.Contains(account) {
|
if !w.Contains(account) {
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
// You should have received a copy of the GNU Lesser General Public License
|
// You should have received a copy of the GNU Lesser General Public License
|
||||||
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
//go:build (darwin && !ios && cgo) || freebsd || (linux && !arm64) || netbsd || solaris
|
||||||
// +build darwin,!ios,cgo freebsd linux,!arm64 netbsd solaris
|
// +build darwin,!ios,cgo freebsd linux,!arm64 netbsd solaris
|
||||||
|
|
||||||
package keystore
|
package keystore
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
// You should have received a copy of the GNU Lesser General Public License
|
// You should have received a copy of the GNU Lesser General Public License
|
||||||
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
//go:build (darwin && !cgo) || ios || (linux && arm64) || windows || (!darwin && !freebsd && !linux && !netbsd && !solaris)
|
||||||
// +build darwin,!cgo ios linux,arm64 windows !darwin,!freebsd,!linux,!netbsd,!solaris
|
// +build darwin,!cgo ios linux,arm64 windows !darwin,!freebsd,!linux,!netbsd,!solaris
|
||||||
|
|
||||||
// This is the fallback implementation of directory watching.
|
// This is the fallback implementation of directory watching.
|
||||||
|
@ -25,6 +25,10 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/event"
|
"github.com/ethereum/go-ethereum/event"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// managerSubBufferSize determines how many incoming wallet events
|
||||||
|
// the manager will buffer in its channel.
|
||||||
|
const managerSubBufferSize = 50
|
||||||
|
|
||||||
// Config contains the settings of the global account manager.
|
// Config contains the settings of the global account manager.
|
||||||
//
|
//
|
||||||
// TODO(rjl493456442, karalabe, holiman): Get rid of this when account management
|
// TODO(rjl493456442, karalabe, holiman): Get rid of this when account management
|
||||||
@ -33,18 +37,27 @@ type Config struct {
|
|||||||
InsecureUnlockAllowed bool // Whether account unlocking in insecure environment is allowed
|
InsecureUnlockAllowed bool // Whether account unlocking in insecure environment is allowed
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// newBackendEvent lets the manager know it should
|
||||||
|
// track the given backend for wallet updates.
|
||||||
|
type newBackendEvent struct {
|
||||||
|
backend Backend
|
||||||
|
processed chan struct{} // Informs event emitter that backend has been integrated
|
||||||
|
}
|
||||||
|
|
||||||
// Manager is an overarching account manager that can communicate with various
|
// Manager is an overarching account manager that can communicate with various
|
||||||
// backends for signing transactions.
|
// backends for signing transactions.
|
||||||
type Manager struct {
|
type Manager struct {
|
||||||
config *Config // Global account manager configurations
|
config *Config // Global account manager configurations
|
||||||
backends map[reflect.Type][]Backend // Index of backends currently registered
|
backends map[reflect.Type][]Backend // Index of backends currently registered
|
||||||
updaters []event.Subscription // Wallet update subscriptions for all backends
|
updaters []event.Subscription // Wallet update subscriptions for all backends
|
||||||
updates chan WalletEvent // Subscription sink for backend wallet changes
|
updates chan WalletEvent // Subscription sink for backend wallet changes
|
||||||
wallets []Wallet // Cache of all wallets from all registered backends
|
newBackends chan newBackendEvent // Incoming backends to be tracked by the manager
|
||||||
|
wallets []Wallet // Cache of all wallets from all registered backends
|
||||||
|
|
||||||
feed event.Feed // Wallet feed notifying of arrivals/departures
|
feed event.Feed // Wallet feed notifying of arrivals/departures
|
||||||
|
|
||||||
quit chan chan error
|
quit chan chan error
|
||||||
|
term chan struct{} // Channel is closed upon termination of the update loop
|
||||||
lock sync.RWMutex
|
lock sync.RWMutex
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -57,7 +70,7 @@ func NewManager(config *Config, backends ...Backend) *Manager {
|
|||||||
wallets = merge(wallets, backend.Wallets()...)
|
wallets = merge(wallets, backend.Wallets()...)
|
||||||
}
|
}
|
||||||
// Subscribe to wallet notifications from all backends
|
// Subscribe to wallet notifications from all backends
|
||||||
updates := make(chan WalletEvent, 4*len(backends))
|
updates := make(chan WalletEvent, managerSubBufferSize)
|
||||||
|
|
||||||
subs := make([]event.Subscription, len(backends))
|
subs := make([]event.Subscription, len(backends))
|
||||||
for i, backend := range backends {
|
for i, backend := range backends {
|
||||||
@ -65,12 +78,14 @@ func NewManager(config *Config, backends ...Backend) *Manager {
|
|||||||
}
|
}
|
||||||
// Assemble the account manager and return
|
// Assemble the account manager and return
|
||||||
am := &Manager{
|
am := &Manager{
|
||||||
config: config,
|
config: config,
|
||||||
backends: make(map[reflect.Type][]Backend),
|
backends: make(map[reflect.Type][]Backend),
|
||||||
updaters: subs,
|
updaters: subs,
|
||||||
updates: updates,
|
updates: updates,
|
||||||
wallets: wallets,
|
newBackends: make(chan newBackendEvent),
|
||||||
quit: make(chan chan error),
|
wallets: wallets,
|
||||||
|
quit: make(chan chan error),
|
||||||
|
term: make(chan struct{}),
|
||||||
}
|
}
|
||||||
for _, backend := range backends {
|
for _, backend := range backends {
|
||||||
kind := reflect.TypeOf(backend)
|
kind := reflect.TypeOf(backend)
|
||||||
@ -93,6 +108,14 @@ func (am *Manager) Config() *Config {
|
|||||||
return am.config
|
return am.config
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AddBackend starts the tracking of an additional backend for wallet updates.
|
||||||
|
// cmd/geth assumes once this func returns the backends have been already integrated.
|
||||||
|
func (am *Manager) AddBackend(backend Backend) {
|
||||||
|
done := make(chan struct{})
|
||||||
|
am.newBackends <- newBackendEvent{backend, done}
|
||||||
|
<-done
|
||||||
|
}
|
||||||
|
|
||||||
// update is the wallet event loop listening for notifications from the backends
|
// update is the wallet event loop listening for notifications from the backends
|
||||||
// and updating the cache of wallets.
|
// and updating the cache of wallets.
|
||||||
func (am *Manager) update() {
|
func (am *Manager) update() {
|
||||||
@ -122,10 +145,22 @@ func (am *Manager) update() {
|
|||||||
|
|
||||||
// Notify any listeners of the event
|
// Notify any listeners of the event
|
||||||
am.feed.Send(event)
|
am.feed.Send(event)
|
||||||
|
case event := <-am.newBackends:
|
||||||
|
am.lock.Lock()
|
||||||
|
// Update caches
|
||||||
|
backend := event.backend
|
||||||
|
am.wallets = merge(am.wallets, backend.Wallets()...)
|
||||||
|
am.updaters = append(am.updaters, backend.Subscribe(am.updates))
|
||||||
|
kind := reflect.TypeOf(backend)
|
||||||
|
am.backends[kind] = append(am.backends[kind], backend)
|
||||||
|
am.lock.Unlock()
|
||||||
|
close(event.processed)
|
||||||
case errc := <-am.quit:
|
case errc := <-am.quit:
|
||||||
// Manager terminating, return
|
// Manager terminating, return
|
||||||
errc <- nil
|
errc <- nil
|
||||||
|
// Signals event emitters the loop is not receiving values
|
||||||
|
// to prevent them from getting stuck.
|
||||||
|
close(am.term)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -133,6 +168,9 @@ func (am *Manager) update() {
|
|||||||
|
|
||||||
// Backends retrieves the backend(s) with the given type from the account manager.
|
// Backends retrieves the backend(s) with the given type from the account manager.
|
||||||
func (am *Manager) Backends(kind reflect.Type) []Backend {
|
func (am *Manager) Backends(kind reflect.Type) []Backend {
|
||||||
|
am.lock.RLock()
|
||||||
|
defer am.lock.RUnlock()
|
||||||
|
|
||||||
return am.backends[kind]
|
return am.backends[kind]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,6 +179,11 @@ func (am *Manager) Wallets() []Wallet {
|
|||||||
am.lock.RLock()
|
am.lock.RLock()
|
||||||
defer am.lock.RUnlock()
|
defer am.lock.RUnlock()
|
||||||
|
|
||||||
|
return am.walletsNoLock()
|
||||||
|
}
|
||||||
|
|
||||||
|
// walletsNoLock returns all registered wallets. Callers must hold am.lock.
|
||||||
|
func (am *Manager) walletsNoLock() []Wallet {
|
||||||
cpy := make([]Wallet, len(am.wallets))
|
cpy := make([]Wallet, len(am.wallets))
|
||||||
copy(cpy, am.wallets)
|
copy(cpy, am.wallets)
|
||||||
return cpy
|
return cpy
|
||||||
@ -155,7 +198,7 @@ func (am *Manager) Wallet(url string) (Wallet, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, wallet := range am.Wallets() {
|
for _, wallet := range am.walletsNoLock() {
|
||||||
if wallet.URL() == parsed {
|
if wallet.URL() == parsed {
|
||||||
return wallet, nil
|
return wallet, nil
|
||||||
}
|
}
|
||||||
|
@ -31,12 +31,16 @@
|
|||||||
Write down the URL (`keycard://044def09` in this example). Then ask `geth` to open the wallet:
|
Write down the URL (`keycard://044def09` in this example). Then ask `geth` to open the wallet:
|
||||||
|
|
||||||
```
|
```
|
||||||
> personal.openWallet("keycard://044def09")
|
> personal.openWallet("keycard://044def09", "pairing password")
|
||||||
Please enter the pairing password:
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Enter the pairing password that you have received during card initialization. Same with the PIN that you will subsequently be
|
The pairing password has been generated during the card initialization process.
|
||||||
asked for.
|
|
||||||
|
The process needs to be repeated once more with the PIN:
|
||||||
|
|
||||||
|
```
|
||||||
|
> personal.openWallet("keycard://044def09", "PIN number")
|
||||||
|
```
|
||||||
|
|
||||||
If everything goes well, you should see your new account when typing `personal` on the console:
|
If everything goes well, you should see your new account when typing `personal` on the console:
|
||||||
|
|
||||||
|
@ -220,7 +220,7 @@ func (hub *Hub) refreshWallets() {
|
|||||||
// Mark the reader as present
|
// Mark the reader as present
|
||||||
seen[reader] = struct{}{}
|
seen[reader] = struct{}{}
|
||||||
|
|
||||||
// If we alreay know about this card, skip to the next reader, otherwise clean up
|
// If we already know about this card, skip to the next reader, otherwise clean up
|
||||||
if wallet, ok := hub.wallets[reader]; ok {
|
if wallet, ok := hub.wallets[reader]; ok {
|
||||||
if err := wallet.ping(); err == nil {
|
if err := wallet.ping(); err == nil {
|
||||||
continue
|
continue
|
||||||
|
@ -20,6 +20,7 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"crypto/aes"
|
"crypto/aes"
|
||||||
"crypto/cipher"
|
"crypto/cipher"
|
||||||
|
"crypto/elliptic"
|
||||||
"crypto/rand"
|
"crypto/rand"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"crypto/sha512"
|
"crypto/sha512"
|
||||||
@ -27,7 +28,6 @@ import (
|
|||||||
|
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
pcsc "github.com/gballet/go-libpcsclite"
|
pcsc "github.com/gballet/go-libpcsclite"
|
||||||
"github.com/wsddn/go-ecdh"
|
|
||||||
"golang.org/x/crypto/pbkdf2"
|
"golang.org/x/crypto/pbkdf2"
|
||||||
"golang.org/x/text/unicode/norm"
|
"golang.org/x/text/unicode/norm"
|
||||||
)
|
)
|
||||||
@ -63,26 +63,19 @@ type SecureChannelSession struct {
|
|||||||
// NewSecureChannelSession creates a new secure channel for the given card and public key.
|
// NewSecureChannelSession creates a new secure channel for the given card and public key.
|
||||||
func NewSecureChannelSession(card *pcsc.Card, keyData []byte) (*SecureChannelSession, error) {
|
func NewSecureChannelSession(card *pcsc.Card, keyData []byte) (*SecureChannelSession, error) {
|
||||||
// Generate an ECDSA keypair for ourselves
|
// Generate an ECDSA keypair for ourselves
|
||||||
gen := ecdh.NewEllipticECDH(crypto.S256())
|
key, err := crypto.GenerateKey()
|
||||||
private, public, err := gen.GenerateKey(rand.Reader)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
cardPublic, err := crypto.UnmarshalPubkey(keyData)
|
||||||
cardPublic, ok := gen.Unmarshal(keyData)
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("Could not unmarshal public key from card")
|
|
||||||
}
|
|
||||||
|
|
||||||
secret, err := gen.GenerateSharedSecret(private, cardPublic)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("could not unmarshal public key from card: %v", err)
|
||||||
}
|
}
|
||||||
|
secret, _ := key.Curve.ScalarMult(cardPublic.X, cardPublic.Y, key.D.Bytes())
|
||||||
return &SecureChannelSession{
|
return &SecureChannelSession{
|
||||||
card: card,
|
card: card,
|
||||||
secret: secret,
|
secret: secret.Bytes(),
|
||||||
publicKey: gen.Marshal(public),
|
publicKey: elliptic.Marshal(crypto.S256(), key.PublicKey.X, key.PublicKey.Y),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,7 +102,7 @@ func (s *SecureChannelSession) Pair(pairingPassword []byte) error {
|
|||||||
cardChallenge := response.Data[32:64]
|
cardChallenge := response.Data[32:64]
|
||||||
|
|
||||||
if !bytes.Equal(expectedCryptogram, cardCryptogram) {
|
if !bytes.Equal(expectedCryptogram, cardCryptogram) {
|
||||||
return fmt.Errorf("Invalid card cryptogram %v != %v", expectedCryptogram, cardCryptogram)
|
return fmt.Errorf("invalid card cryptogram %v != %v", expectedCryptogram, cardCryptogram)
|
||||||
}
|
}
|
||||||
|
|
||||||
md.Reset()
|
md.Reset()
|
||||||
@ -132,7 +125,7 @@ func (s *SecureChannelSession) Pair(pairingPassword []byte) error {
|
|||||||
// Unpair disestablishes an existing pairing.
|
// Unpair disestablishes an existing pairing.
|
||||||
func (s *SecureChannelSession) Unpair() error {
|
func (s *SecureChannelSession) Unpair() error {
|
||||||
if s.PairingKey == nil {
|
if s.PairingKey == nil {
|
||||||
return fmt.Errorf("Cannot unpair: not paired")
|
return fmt.Errorf("cannot unpair: not paired")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := s.transmitEncrypted(claSCWallet, insUnpair, s.PairingIndex, 0, []byte{})
|
_, err := s.transmitEncrypted(claSCWallet, insUnpair, s.PairingIndex, 0, []byte{})
|
||||||
@ -148,7 +141,7 @@ func (s *SecureChannelSession) Unpair() error {
|
|||||||
// Open initializes the secure channel.
|
// Open initializes the secure channel.
|
||||||
func (s *SecureChannelSession) Open() error {
|
func (s *SecureChannelSession) Open() error {
|
||||||
if s.iv != nil {
|
if s.iv != nil {
|
||||||
return fmt.Errorf("Session already opened")
|
return fmt.Errorf("session already opened")
|
||||||
}
|
}
|
||||||
|
|
||||||
response, err := s.open()
|
response, err := s.open()
|
||||||
@ -185,11 +178,11 @@ func (s *SecureChannelSession) mutuallyAuthenticate() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if response.Sw1 != 0x90 || response.Sw2 != 0x00 {
|
if response.Sw1 != 0x90 || response.Sw2 != 0x00 {
|
||||||
return fmt.Errorf("Got unexpected response from MUTUALLY_AUTHENTICATE: 0x%x%x", response.Sw1, response.Sw2)
|
return fmt.Errorf("got unexpected response from MUTUALLY_AUTHENTICATE: 0x%x%x", response.Sw1, response.Sw2)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(response.Data) != scSecretLength {
|
if len(response.Data) != scSecretLength {
|
||||||
return fmt.Errorf("Response from MUTUALLY_AUTHENTICATE was %d bytes, expected %d", len(response.Data), scSecretLength)
|
return fmt.Errorf("response from MUTUALLY_AUTHENTICATE was %d bytes, expected %d", len(response.Data), scSecretLength)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -222,7 +215,7 @@ func (s *SecureChannelSession) pair(p1 uint8, data []byte) (*responseAPDU, error
|
|||||||
// transmitEncrypted sends an encrypted message, and decrypts and returns the response.
|
// transmitEncrypted sends an encrypted message, and decrypts and returns the response.
|
||||||
func (s *SecureChannelSession) transmitEncrypted(cla, ins, p1, p2 byte, data []byte) (*responseAPDU, error) {
|
func (s *SecureChannelSession) transmitEncrypted(cla, ins, p1, p2 byte, data []byte) (*responseAPDU, error) {
|
||||||
if s.iv == nil {
|
if s.iv == nil {
|
||||||
return nil, fmt.Errorf("Channel not open")
|
return nil, fmt.Errorf("channel not open")
|
||||||
}
|
}
|
||||||
|
|
||||||
data, err := s.encryptAPDU(data)
|
data, err := s.encryptAPDU(data)
|
||||||
@ -261,14 +254,14 @@ func (s *SecureChannelSession) transmitEncrypted(cla, ins, p1, p2 byte, data []b
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if !bytes.Equal(s.iv, rmac) {
|
if !bytes.Equal(s.iv, rmac) {
|
||||||
return nil, fmt.Errorf("Invalid MAC in response")
|
return nil, fmt.Errorf("invalid MAC in response")
|
||||||
}
|
}
|
||||||
|
|
||||||
rapdu := &responseAPDU{}
|
rapdu := &responseAPDU{}
|
||||||
rapdu.deserialize(plainData)
|
rapdu.deserialize(plainData)
|
||||||
|
|
||||||
if rapdu.Sw1 != sw1Ok {
|
if rapdu.Sw1 != sw1Ok {
|
||||||
return nil, fmt.Errorf("Unexpected response status Cla=0x%x, Ins=0x%x, Sw=0x%x%x", cla, ins, rapdu.Sw1, rapdu.Sw2)
|
return nil, fmt.Errorf("unexpected response status Cla=0x%x, Ins=0x%x, Sw=0x%x%x", cla, ins, rapdu.Sw1, rapdu.Sw2)
|
||||||
}
|
}
|
||||||
|
|
||||||
return rapdu, nil
|
return rapdu, nil
|
||||||
@ -277,7 +270,7 @@ func (s *SecureChannelSession) transmitEncrypted(cla, ins, p1, p2 byte, data []b
|
|||||||
// encryptAPDU is an internal method that serializes and encrypts an APDU.
|
// encryptAPDU is an internal method that serializes and encrypts an APDU.
|
||||||
func (s *SecureChannelSession) encryptAPDU(data []byte) ([]byte, error) {
|
func (s *SecureChannelSession) encryptAPDU(data []byte) ([]byte, error) {
|
||||||
if len(data) > maxPayloadSize {
|
if len(data) > maxPayloadSize {
|
||||||
return nil, fmt.Errorf("Payload of %d bytes exceeds maximum of %d", len(data), maxPayloadSize)
|
return nil, fmt.Errorf("payload of %d bytes exceeds maximum of %d", len(data), maxPayloadSize)
|
||||||
}
|
}
|
||||||
data = pad(data, 0x80)
|
data = pad(data, 0x80)
|
||||||
|
|
||||||
@ -323,10 +316,10 @@ func unpad(data []byte, terminator byte) ([]byte, error) {
|
|||||||
case terminator:
|
case terminator:
|
||||||
return data[:len(data)-i], nil
|
return data[:len(data)-i], nil
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("Expected end of padding, got %d", data[len(data)-i])
|
return nil, fmt.Errorf("expected end of padding, got %d", data[len(data)-i])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("Expected end of padding, got 0")
|
return nil, fmt.Errorf("expected end of padding, got 0")
|
||||||
}
|
}
|
||||||
|
|
||||||
// updateIV is an internal method that updates the initialization vector after
|
// updateIV is an internal method that updates the initialization vector after
|
||||||
|
@ -33,7 +33,7 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
ethereum "github.com/ethereum/go-ethereum"
|
"github.com/ethereum/go-ethereum"
|
||||||
"github.com/ethereum/go-ethereum/accounts"
|
"github.com/ethereum/go-ethereum/accounts"
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/core/types"
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
@ -167,7 +167,7 @@ func transmit(card *pcsc.Card, command *commandAPDU) (*responseAPDU, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if response.Sw1 != sw1Ok {
|
if response.Sw1 != sw1Ok {
|
||||||
return nil, fmt.Errorf("Unexpected insecure response status Cla=0x%x, Ins=0x%x, Sw=0x%x%x", command.Cla, command.Ins, response.Sw1, response.Sw2)
|
return nil, fmt.Errorf("unexpected insecure response status Cla=0x%x, Ins=0x%x, Sw=0x%x%x", command.Cla, command.Ins, response.Sw1, response.Sw2)
|
||||||
}
|
}
|
||||||
|
|
||||||
return response, nil
|
return response, nil
|
||||||
@ -252,7 +252,7 @@ func (w *Wallet) release() error {
|
|||||||
// with the wallet.
|
// with the wallet.
|
||||||
func (w *Wallet) pair(puk []byte) error {
|
func (w *Wallet) pair(puk []byte) error {
|
||||||
if w.session.paired() {
|
if w.session.paired() {
|
||||||
return fmt.Errorf("Wallet already paired")
|
return fmt.Errorf("wallet already paired")
|
||||||
}
|
}
|
||||||
pairing, err := w.session.pair(puk)
|
pairing, err := w.session.pair(puk)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -312,15 +312,15 @@ func (w *Wallet) Status() (string, error) {
|
|||||||
}
|
}
|
||||||
switch {
|
switch {
|
||||||
case !w.session.verified && status.PinRetryCount == 0 && status.PukRetryCount == 0:
|
case !w.session.verified && status.PinRetryCount == 0 && status.PukRetryCount == 0:
|
||||||
return fmt.Sprintf("Bricked, waiting for full wipe"), nil
|
return "Bricked, waiting for full wipe", nil
|
||||||
case !w.session.verified && status.PinRetryCount == 0:
|
case !w.session.verified && status.PinRetryCount == 0:
|
||||||
return fmt.Sprintf("Blocked, waiting for PUK (%d attempts left) and new PIN", status.PukRetryCount), nil
|
return fmt.Sprintf("Blocked, waiting for PUK (%d attempts left) and new PIN", status.PukRetryCount), nil
|
||||||
case !w.session.verified:
|
case !w.session.verified:
|
||||||
return fmt.Sprintf("Locked, waiting for PIN (%d attempts left)", status.PinRetryCount), nil
|
return fmt.Sprintf("Locked, waiting for PIN (%d attempts left)", status.PinRetryCount), nil
|
||||||
case !status.Initialized:
|
case !status.Initialized:
|
||||||
return fmt.Sprintf("Empty, waiting for initialization"), nil
|
return "Empty, waiting for initialization", nil
|
||||||
default:
|
default:
|
||||||
return fmt.Sprintf("Online"), nil
|
return "Online", nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -362,7 +362,7 @@ func (w *Wallet) Open(passphrase string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
// Pairing succeeded, fall through to PIN checks. This will of course fail,
|
// Pairing succeeded, fall through to PIN checks. This will of course fail,
|
||||||
// but we can't return ErrPINNeeded directly here becase we don't know whether
|
// but we can't return ErrPINNeeded directly here because we don't know whether
|
||||||
// a PIN check or a PIN reset is needed.
|
// a PIN check or a PIN reset is needed.
|
||||||
passphrase = ""
|
passphrase = ""
|
||||||
}
|
}
|
||||||
@ -637,7 +637,7 @@ func (w *Wallet) Derive(path accounts.DerivationPath, pin bool) (accounts.Accoun
|
|||||||
// to discover non zero accounts and automatically add them to list of tracked
|
// to discover non zero accounts and automatically add them to list of tracked
|
||||||
// accounts.
|
// accounts.
|
||||||
//
|
//
|
||||||
// Note, self derivaton will increment the last component of the specified path
|
// Note, self derivation will increment the last component of the specified path
|
||||||
// opposed to decending into a child path to allow discovering accounts starting
|
// opposed to decending into a child path to allow discovering accounts starting
|
||||||
// from non zero components.
|
// from non zero components.
|
||||||
//
|
//
|
||||||
@ -699,7 +699,7 @@ func (w *Wallet) signHash(account accounts.Account, hash []byte) ([]byte, error)
|
|||||||
// the needed details via SignTxWithPassphrase, or by other means (e.g. unlock
|
// the needed details via SignTxWithPassphrase, or by other means (e.g. unlock
|
||||||
// the account in a keystore).
|
// the account in a keystore).
|
||||||
func (w *Wallet) SignTx(account accounts.Account, tx *types.Transaction, chainID *big.Int) (*types.Transaction, error) {
|
func (w *Wallet) SignTx(account accounts.Account, tx *types.Transaction, chainID *big.Int) (*types.Transaction, error) {
|
||||||
signer := types.NewEIP155Signer(chainID)
|
signer := types.LatestSignerForChainID(chainID)
|
||||||
hash := signer.Hash(tx)
|
hash := signer.Hash(tx)
|
||||||
sig, err := w.signHash(account, hash[:])
|
sig, err := w.signHash(account, hash[:])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -773,12 +773,12 @@ func (w *Wallet) findAccountPath(account accounts.Account) (accounts.DerivationP
|
|||||||
|
|
||||||
// Look for the path in the URL
|
// Look for the path in the URL
|
||||||
if account.URL.Scheme != w.Hub.scheme {
|
if account.URL.Scheme != w.Hub.scheme {
|
||||||
return nil, fmt.Errorf("Scheme %s does not match wallet scheme %s", account.URL.Scheme, w.Hub.scheme)
|
return nil, fmt.Errorf("scheme %s does not match wallet scheme %s", account.URL.Scheme, w.Hub.scheme)
|
||||||
}
|
}
|
||||||
|
|
||||||
parts := strings.SplitN(account.URL.Path, "/", 2)
|
parts := strings.SplitN(account.URL.Path, "/", 2)
|
||||||
if len(parts) != 2 {
|
if len(parts) != 2 {
|
||||||
return nil, fmt.Errorf("Invalid URL format: %s", account.URL)
|
return nil, fmt.Errorf("invalid URL format: %s", account.URL)
|
||||||
}
|
}
|
||||||
|
|
||||||
if parts[0] != fmt.Sprintf("%x", w.PublicKey[1:3]) {
|
if parts[0] != fmt.Sprintf("%x", w.PublicKey[1:3]) {
|
||||||
@ -813,7 +813,7 @@ func (s *Session) pair(secret []byte) (smartcardPairing, error) {
|
|||||||
// unpair deletes an existing pairing.
|
// unpair deletes an existing pairing.
|
||||||
func (s *Session) unpair() error {
|
func (s *Session) unpair() error {
|
||||||
if !s.verified {
|
if !s.verified {
|
||||||
return fmt.Errorf("Unpair requires that the PIN be verified")
|
return fmt.Errorf("unpair requires that the PIN be verified")
|
||||||
}
|
}
|
||||||
return s.Channel.Unpair()
|
return s.Channel.Unpair()
|
||||||
}
|
}
|
||||||
@ -850,7 +850,7 @@ func (s *Session) paired() bool {
|
|||||||
// authenticate uses an existing pairing to establish a secure channel.
|
// authenticate uses an existing pairing to establish a secure channel.
|
||||||
func (s *Session) authenticate(pairing smartcardPairing) error {
|
func (s *Session) authenticate(pairing smartcardPairing) error {
|
||||||
if !bytes.Equal(s.Wallet.PublicKey, pairing.PublicKey) {
|
if !bytes.Equal(s.Wallet.PublicKey, pairing.PublicKey) {
|
||||||
return fmt.Errorf("Cannot pair using another wallet's pairing; %x != %x", s.Wallet.PublicKey, pairing.PublicKey)
|
return fmt.Errorf("cannot pair using another wallet's pairing; %x != %x", s.Wallet.PublicKey, pairing.PublicKey)
|
||||||
}
|
}
|
||||||
s.Channel.PairingKey = pairing.PairingKey
|
s.Channel.PairingKey = pairing.PairingKey
|
||||||
s.Channel.PairingIndex = pairing.PairingIndex
|
s.Channel.PairingIndex = pairing.PairingIndex
|
||||||
@ -879,6 +879,7 @@ func (s *Session) walletStatus() (*walletStatus, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// derivationPath fetches the wallet's current derivation path from the card.
|
// derivationPath fetches the wallet's current derivation path from the card.
|
||||||
|
//lint:ignore U1000 needs to be added to the console interface
|
||||||
func (s *Session) derivationPath() (accounts.DerivationPath, error) {
|
func (s *Session) derivationPath() (accounts.DerivationPath, error) {
|
||||||
response, err := s.Channel.transmitEncrypted(claSCWallet, insStatus, statusP1Path, 0, nil)
|
response, err := s.Channel.transmitEncrypted(claSCWallet, insStatus, statusP1Path, 0, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -993,12 +994,14 @@ func (s *Session) derive(path accounts.DerivationPath) (accounts.Account, error)
|
|||||||
}
|
}
|
||||||
|
|
||||||
// keyExport contains information on an exported keypair.
|
// keyExport contains information on an exported keypair.
|
||||||
|
//lint:ignore U1000 needs to be added to the console interface
|
||||||
type keyExport struct {
|
type keyExport struct {
|
||||||
PublicKey []byte `asn1:"tag:0"`
|
PublicKey []byte `asn1:"tag:0"`
|
||||||
PrivateKey []byte `asn1:"tag:1,optional"`
|
PrivateKey []byte `asn1:"tag:1,optional"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// publicKey returns the public key for the current derivation path.
|
// publicKey returns the public key for the current derivation path.
|
||||||
|
//lint:ignore U1000 needs to be added to the console interface
|
||||||
func (s *Session) publicKey() ([]byte, error) {
|
func (s *Session) publicKey() ([]byte, error) {
|
||||||
response, err := s.Channel.transmitEncrypted(claSCWallet, insExportKey, exportP1Any, exportP2Pubkey, nil)
|
response, err := s.Channel.transmitEncrypted(claSCWallet, insExportKey, exportP1Any, exportP2Pubkey, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -64,7 +64,7 @@ func (u URL) String() string {
|
|||||||
func (u URL) TerminalString() string {
|
func (u URL) TerminalString() string {
|
||||||
url := u.String()
|
url := u.String()
|
||||||
if len(url) > 32 {
|
if len(url) > 32 {
|
||||||
return url[:31] + "…"
|
return url[:31] + ".."
|
||||||
}
|
}
|
||||||
return url
|
return url
|
||||||
}
|
}
|
||||||
|
@ -52,8 +52,10 @@ const (
|
|||||||
ledgerOpRetrieveAddress ledgerOpcode = 0x02 // Returns the public key and Ethereum address for a given BIP 32 path
|
ledgerOpRetrieveAddress ledgerOpcode = 0x02 // Returns the public key and Ethereum address for a given BIP 32 path
|
||||||
ledgerOpSignTransaction ledgerOpcode = 0x04 // Signs an Ethereum transaction after having the user validate the parameters
|
ledgerOpSignTransaction ledgerOpcode = 0x04 // Signs an Ethereum transaction after having the user validate the parameters
|
||||||
ledgerOpGetConfiguration ledgerOpcode = 0x06 // Returns specific wallet application configuration
|
ledgerOpGetConfiguration ledgerOpcode = 0x06 // Returns specific wallet application configuration
|
||||||
|
ledgerOpSignTypedMessage ledgerOpcode = 0x0c // Signs an Ethereum message following the EIP 712 specification
|
||||||
|
|
||||||
ledgerP1DirectlyFetchAddress ledgerParam1 = 0x00 // Return address directly from the wallet
|
ledgerP1DirectlyFetchAddress ledgerParam1 = 0x00 // Return address directly from the wallet
|
||||||
|
ledgerP1InitTypedMessageData ledgerParam1 = 0x00 // First chunk of Typed Message data
|
||||||
ledgerP1InitTransactionData ledgerParam1 = 0x00 // First transaction data block for signing
|
ledgerP1InitTransactionData ledgerParam1 = 0x00 // First transaction data block for signing
|
||||||
ledgerP1ContTransactionData ledgerParam1 = 0x80 // Subsequent transaction data block for signing
|
ledgerP1ContTransactionData ledgerParam1 = 0x80 // Subsequent transaction data block for signing
|
||||||
ledgerP2DiscardAddressChainCode ledgerParam2 = 0x00 // Do not return the chain code along with the address
|
ledgerP2DiscardAddressChainCode ledgerParam2 = 0x00 // Do not return the chain code along with the address
|
||||||
@ -163,12 +165,31 @@ func (w *ledgerDriver) SignTx(path accounts.DerivationPath, tx *types.Transactio
|
|||||||
}
|
}
|
||||||
// Ensure the wallet is capable of signing the given transaction
|
// Ensure the wallet is capable of signing the given transaction
|
||||||
if chainID != nil && w.version[0] <= 1 && w.version[1] <= 0 && w.version[2] <= 2 {
|
if chainID != nil && w.version[0] <= 1 && w.version[1] <= 0 && w.version[2] <= 2 {
|
||||||
|
//lint:ignore ST1005 brand name displayed on the console
|
||||||
return common.Address{}, nil, fmt.Errorf("Ledger v%d.%d.%d doesn't support signing this transaction, please update to v1.0.3 at least", w.version[0], w.version[1], w.version[2])
|
return common.Address{}, nil, fmt.Errorf("Ledger v%d.%d.%d doesn't support signing this transaction, please update to v1.0.3 at least", w.version[0], w.version[1], w.version[2])
|
||||||
}
|
}
|
||||||
// All infos gathered and metadata checks out, request signing
|
// All infos gathered and metadata checks out, request signing
|
||||||
return w.ledgerSign(path, tx, chainID)
|
return w.ledgerSign(path, tx, chainID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SignTypedMessage implements usbwallet.driver, sending the message to the Ledger and
|
||||||
|
// waiting for the user to sign or deny the transaction.
|
||||||
|
//
|
||||||
|
// Note: this was introduced in the ledger 1.5.0 firmware
|
||||||
|
func (w *ledgerDriver) SignTypedMessage(path accounts.DerivationPath, domainHash []byte, messageHash []byte) ([]byte, error) {
|
||||||
|
// If the Ethereum app doesn't run, abort
|
||||||
|
if w.offline() {
|
||||||
|
return nil, accounts.ErrWalletClosed
|
||||||
|
}
|
||||||
|
// Ensure the wallet is capable of signing the given transaction
|
||||||
|
if w.version[0] < 1 && w.version[1] < 5 {
|
||||||
|
//lint:ignore ST1005 brand name displayed on the console
|
||||||
|
return nil, fmt.Errorf("Ledger version >= 1.5.0 required for EIP-712 signing (found version v%d.%d.%d)", w.version[0], w.version[1], w.version[2])
|
||||||
|
}
|
||||||
|
// All infos gathered and metadata checks out, request signing
|
||||||
|
return w.ledgerSignTypedMessage(path, domainHash, messageHash)
|
||||||
|
}
|
||||||
|
|
||||||
// ledgerVersion retrieves the current version of the Ethereum wallet app running
|
// ledgerVersion retrieves the current version of the Ethereum wallet app running
|
||||||
// on the Ledger wallet.
|
// on the Ledger wallet.
|
||||||
//
|
//
|
||||||
@ -366,6 +387,68 @@ func (w *ledgerDriver) ledgerSign(derivationPath []uint32, tx *types.Transaction
|
|||||||
return sender, signed, nil
|
return sender, signed, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ledgerSignTypedMessage sends the transaction to the Ledger wallet, and waits for the user
|
||||||
|
// to confirm or deny the transaction.
|
||||||
|
//
|
||||||
|
// The signing protocol is defined as follows:
|
||||||
|
//
|
||||||
|
// CLA | INS | P1 | P2 | Lc | Le
|
||||||
|
// ----+-----+----+-----------------------------+-----+---
|
||||||
|
// E0 | 0C | 00 | implementation version : 00 | variable | variable
|
||||||
|
//
|
||||||
|
// Where the input is:
|
||||||
|
//
|
||||||
|
// Description | Length
|
||||||
|
// -------------------------------------------------+----------
|
||||||
|
// Number of BIP 32 derivations to perform (max 10) | 1 byte
|
||||||
|
// First derivation index (big endian) | 4 bytes
|
||||||
|
// ... | 4 bytes
|
||||||
|
// Last derivation index (big endian) | 4 bytes
|
||||||
|
// domain hash | 32 bytes
|
||||||
|
// message hash | 32 bytes
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// And the output data is:
|
||||||
|
//
|
||||||
|
// Description | Length
|
||||||
|
// ------------+---------
|
||||||
|
// signature V | 1 byte
|
||||||
|
// signature R | 32 bytes
|
||||||
|
// signature S | 32 bytes
|
||||||
|
func (w *ledgerDriver) ledgerSignTypedMessage(derivationPath []uint32, domainHash []byte, messageHash []byte) ([]byte, error) {
|
||||||
|
// Flatten the derivation path into the Ledger request
|
||||||
|
path := make([]byte, 1+4*len(derivationPath))
|
||||||
|
path[0] = byte(len(derivationPath))
|
||||||
|
for i, component := range derivationPath {
|
||||||
|
binary.BigEndian.PutUint32(path[1+4*i:], component)
|
||||||
|
}
|
||||||
|
// Create the 712 message
|
||||||
|
payload := append(path, domainHash...)
|
||||||
|
payload = append(payload, messageHash...)
|
||||||
|
|
||||||
|
// Send the request and wait for the response
|
||||||
|
var (
|
||||||
|
op = ledgerP1InitTypedMessageData
|
||||||
|
reply []byte
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
// Send the message over, ensuring it's processed correctly
|
||||||
|
reply, err = w.ledgerExchange(ledgerOpSignTypedMessage, op, 0, payload)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract the Ethereum signature and do a sanity validation
|
||||||
|
if len(reply) != crypto.SignatureLength {
|
||||||
|
return nil, errors.New("reply lacks signature")
|
||||||
|
}
|
||||||
|
signature := append(reply[1:], reply[0])
|
||||||
|
return signature, nil
|
||||||
|
}
|
||||||
|
|
||||||
// ledgerExchange performs a data exchange with the Ledger wallet, sending it a
|
// ledgerExchange performs a data exchange with the Ledger wallet, sending it a
|
||||||
// message and retrieving the response.
|
// message and retrieving the response.
|
||||||
//
|
//
|
||||||
|
@ -185,6 +185,10 @@ func (w *trezorDriver) SignTx(path accounts.DerivationPath, tx *types.Transactio
|
|||||||
return w.trezorSign(path, tx, chainID)
|
return w.trezorSign(path, tx, chainID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (w *trezorDriver) SignTypedMessage(path accounts.DerivationPath, domainHash []byte, messageHash []byte) ([]byte, error) {
|
||||||
|
return nil, accounts.ErrNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
// trezorDerive sends a derivation request to the Trezor device and returns the
|
// trezorDerive sends a derivation request to the Trezor device and returns the
|
||||||
// Ethereum address located on that path.
|
// Ethereum address located on that path.
|
||||||
func (w *trezorDriver) trezorDerive(derivationPath []uint32) (common.Address, error) {
|
func (w *trezorDriver) trezorDerive(derivationPath []uint32) (common.Address, error) {
|
||||||
@ -255,9 +259,11 @@ func (w *trezorDriver) trezorSign(derivationPath []uint32, tx *types.Transaction
|
|||||||
if chainID == nil {
|
if chainID == nil {
|
||||||
signer = new(types.HomesteadSigner)
|
signer = new(types.HomesteadSigner)
|
||||||
} else {
|
} else {
|
||||||
|
// Trezor backend does not support typed transactions yet.
|
||||||
signer = types.NewEIP155Signer(chainID)
|
signer = types.NewEIP155Signer(chainID)
|
||||||
signature[64] -= byte(chainID.Uint64()*2 + 35)
|
signature[64] -= byte(chainID.Uint64()*2 + 35)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Inject the final signature into the transaction and sanity check the sender
|
// Inject the final signature into the transaction and sanity check the sender
|
||||||
signed, err := tx.WithSignature(signer, signature)
|
signed, err := tx.WithSignature(signer, signature)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -25,7 +25,7 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
ethereum "github.com/ethereum/go-ethereum"
|
"github.com/ethereum/go-ethereum"
|
||||||
"github.com/ethereum/go-ethereum/accounts"
|
"github.com/ethereum/go-ethereum/accounts"
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/core/types"
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
@ -67,6 +67,8 @@ type driver interface {
|
|||||||
// SignTx sends the transaction to the USB device and waits for the user to confirm
|
// SignTx sends the transaction to the USB device and waits for the user to confirm
|
||||||
// or deny the transaction.
|
// or deny the transaction.
|
||||||
SignTx(path accounts.DerivationPath, tx *types.Transaction, chainID *big.Int) (common.Address, *types.Transaction, error)
|
SignTx(path accounts.DerivationPath, tx *types.Transaction, chainID *big.Int) (common.Address, *types.Transaction, error)
|
||||||
|
|
||||||
|
SignTypedMessage(path accounts.DerivationPath, messageHash []byte, domainHash []byte) ([]byte, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// wallet represents the common functionality shared by all USB hardware
|
// wallet represents the common functionality shared by all USB hardware
|
||||||
@ -368,18 +370,22 @@ func (w *wallet) selfDerive() {
|
|||||||
w.log.Warn("USB wallet nonce retrieval failed", "err", err)
|
w.log.Warn("USB wallet nonce retrieval failed", "err", err)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
// If the next account is empty, stop self-derivation, but add for the last base path
|
// We've just self-derived a new account, start tracking it locally
|
||||||
|
// unless the account was empty.
|
||||||
|
path := make(accounts.DerivationPath, len(nextPaths[i]))
|
||||||
|
copy(path[:], nextPaths[i][:])
|
||||||
if balance.Sign() == 0 && nonce == 0 {
|
if balance.Sign() == 0 && nonce == 0 {
|
||||||
empty = true
|
empty = true
|
||||||
|
// If it indeed was empty, make a log output for it anyway. In the case
|
||||||
|
// of legacy-ledger, the first account on the legacy-path will
|
||||||
|
// be shown to the user, even if we don't actively track it
|
||||||
if i < len(nextAddrs)-1 {
|
if i < len(nextAddrs)-1 {
|
||||||
|
w.log.Info("Skipping trakcking first account on legacy path, use personal.deriveAccount(<url>,<path>, false) to track",
|
||||||
|
"path", path, "address", nextAddrs[i])
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// We've just self-derived a new account, start tracking it locally
|
|
||||||
path := make(accounts.DerivationPath, len(nextPaths[i]))
|
|
||||||
copy(path[:], nextPaths[i][:])
|
|
||||||
paths = append(paths, path)
|
paths = append(paths, path)
|
||||||
|
|
||||||
account := accounts.Account{
|
account := accounts.Account{
|
||||||
Address: nextAddrs[i],
|
Address: nextAddrs[i],
|
||||||
URL: accounts.URL{Scheme: w.url.Scheme, Path: fmt.Sprintf("%s/%s", w.url.Path, path)},
|
URL: accounts.URL{Scheme: w.url.Scheme, Path: fmt.Sprintf("%s/%s", w.url.Path, path)},
|
||||||
@ -489,7 +495,7 @@ func (w *wallet) Derive(path accounts.DerivationPath, pin bool) (accounts.Accoun
|
|||||||
// to discover non zero accounts and automatically add them to list of tracked
|
// to discover non zero accounts and automatically add them to list of tracked
|
||||||
// accounts.
|
// accounts.
|
||||||
//
|
//
|
||||||
// Note, self derivaton will increment the last component of the specified path
|
// Note, self derivation will increment the last component of the specified path
|
||||||
// opposed to decending into a child path to allow discovering accounts starting
|
// opposed to decending into a child path to allow discovering accounts starting
|
||||||
// from non zero components.
|
// from non zero components.
|
||||||
//
|
//
|
||||||
@ -520,7 +526,46 @@ func (w *wallet) signHash(account accounts.Account, hash []byte) ([]byte, error)
|
|||||||
|
|
||||||
// SignData signs keccak256(data). The mimetype parameter describes the type of data being signed
|
// SignData signs keccak256(data). The mimetype parameter describes the type of data being signed
|
||||||
func (w *wallet) SignData(account accounts.Account, mimeType string, data []byte) ([]byte, error) {
|
func (w *wallet) SignData(account accounts.Account, mimeType string, data []byte) ([]byte, error) {
|
||||||
return w.signHash(account, crypto.Keccak256(data))
|
|
||||||
|
// Unless we are doing 712 signing, simply dispatch to signHash
|
||||||
|
if !(mimeType == accounts.MimetypeTypedData && len(data) == 66 && data[0] == 0x19 && data[1] == 0x01) {
|
||||||
|
return w.signHash(account, crypto.Keccak256(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// dispatch to 712 signing if the mimetype is TypedData and the format matches
|
||||||
|
w.stateLock.RLock() // Comms have own mutex, this is for the state fields
|
||||||
|
defer w.stateLock.RUnlock()
|
||||||
|
|
||||||
|
// If the wallet is closed, abort
|
||||||
|
if w.device == nil {
|
||||||
|
return nil, accounts.ErrWalletClosed
|
||||||
|
}
|
||||||
|
// Make sure the requested account is contained within
|
||||||
|
path, ok := w.paths[account.Address]
|
||||||
|
if !ok {
|
||||||
|
return nil, accounts.ErrUnknownAccount
|
||||||
|
}
|
||||||
|
// All infos gathered and metadata checks out, request signing
|
||||||
|
<-w.commsLock
|
||||||
|
defer func() { w.commsLock <- struct{}{} }()
|
||||||
|
|
||||||
|
// Ensure the device isn't screwed with while user confirmation is pending
|
||||||
|
// TODO(karalabe): remove if hotplug lands on Windows
|
||||||
|
w.hub.commsLock.Lock()
|
||||||
|
w.hub.commsPend++
|
||||||
|
w.hub.commsLock.Unlock()
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
w.hub.commsLock.Lock()
|
||||||
|
w.hub.commsPend--
|
||||||
|
w.hub.commsLock.Unlock()
|
||||||
|
}()
|
||||||
|
// Sign the transaction
|
||||||
|
signature, err := w.driver.SignTypedMessage(path, data[2:34], data[34:66])
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return signature, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignDataWithPassphrase implements accounts.Wallet, attempting to sign the given
|
// SignDataWithPassphrase implements accounts.Wallet, attempting to sign the given
|
||||||
|
73
appveyor.yml
73
appveyor.yml
@ -1,40 +1,57 @@
|
|||||||
os: Visual Studio 2015
|
|
||||||
|
|
||||||
# Clone directly into GOPATH.
|
|
||||||
clone_folder: C:\gopath\src\github.com\ethereum\go-ethereum
|
|
||||||
clone_depth: 5
|
clone_depth: 5
|
||||||
version: "{branch}.{build}"
|
version: "{branch}.{build}"
|
||||||
|
|
||||||
|
image:
|
||||||
|
- Ubuntu
|
||||||
|
- Visual Studio 2019
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
global:
|
|
||||||
GOPATH: C:\gopath
|
|
||||||
CC: gcc.exe
|
|
||||||
matrix:
|
matrix:
|
||||||
- GETH_ARCH: amd64
|
- GETH_ARCH: amd64
|
||||||
MSYS2_ARCH: x86_64
|
GETH_MINGW: 'C:\msys64\mingw64'
|
||||||
MSYS2_BITS: 64
|
|
||||||
MSYSTEM: MINGW64
|
|
||||||
PATH: C:\msys64\mingw64\bin\;C:\Program Files (x86)\NSIS\;%PATH%
|
|
||||||
- GETH_ARCH: 386
|
- GETH_ARCH: 386
|
||||||
MSYS2_ARCH: i686
|
GETH_MINGW: 'C:\msys64\mingw32'
|
||||||
MSYS2_BITS: 32
|
|
||||||
MSYSTEM: MINGW32
|
|
||||||
PATH: C:\msys64\mingw32\bin\;C:\Program Files (x86)\NSIS\;%PATH%
|
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- git submodule update --init
|
- git submodule update --init --depth 1
|
||||||
- rmdir C:\go /s /q
|
|
||||||
- appveyor DownloadFile https://dl.google.com/go/go1.13.4.windows-%GETH_ARCH%.zip
|
|
||||||
- 7z x go1.13.4.windows-%GETH_ARCH%.zip -y -oC:\ > NUL
|
|
||||||
- go version
|
- go version
|
||||||
- gcc --version
|
|
||||||
|
|
||||||
build_script:
|
for:
|
||||||
- go run build\ci.go install
|
# Linux has its own script without -arch and -cc.
|
||||||
|
# The linux builder also runs lint.
|
||||||
|
- matrix:
|
||||||
|
only:
|
||||||
|
- image: Ubuntu
|
||||||
|
build_script:
|
||||||
|
- go run build/ci.go lint
|
||||||
|
- go run build/ci.go install -dlgo
|
||||||
|
test_script:
|
||||||
|
- go run build/ci.go test -dlgo -coverage
|
||||||
|
|
||||||
after_build:
|
# linux/386 is disabled.
|
||||||
- go run build\ci.go archive -type zip -signer WINDOWS_SIGNING_KEY -upload gethstore/builds
|
- matrix:
|
||||||
- go run build\ci.go nsis -signer WINDOWS_SIGNING_KEY -upload gethstore/builds
|
exclude:
|
||||||
|
- image: Ubuntu
|
||||||
|
GETH_ARCH: 386
|
||||||
|
|
||||||
test_script:
|
# Windows builds for amd64 + 386.
|
||||||
- set CGO_ENABLED=1
|
- matrix:
|
||||||
- go run build\ci.go test -coverage
|
only:
|
||||||
|
- image: Visual Studio 2019
|
||||||
|
environment:
|
||||||
|
# We use gcc from MSYS2 because it is the most recent compiler version available on
|
||||||
|
# AppVeyor. Note: gcc.exe only works properly if the corresponding bin/ directory is
|
||||||
|
# contained in PATH.
|
||||||
|
GETH_CC: '%GETH_MINGW%\bin\gcc.exe'
|
||||||
|
PATH: '%GETH_MINGW%\bin;C:\Program Files (x86)\NSIS\;%PATH%'
|
||||||
|
build_script:
|
||||||
|
- 'echo %GETH_ARCH%'
|
||||||
|
- 'echo %GETH_CC%'
|
||||||
|
- '%GETH_CC% --version'
|
||||||
|
- go run build/ci.go install -dlgo -arch %GETH_ARCH% -cc %GETH_CC%
|
||||||
|
after_build:
|
||||||
|
# Upload builds. Note that ci.go makes this a no-op PR builds.
|
||||||
|
- go run build/ci.go archive -arch %GETH_ARCH% -type zip -signer WINDOWS_SIGNING_KEY -upload gethstore/builds
|
||||||
|
- go run build/ci.go nsis -arch %GETH_ARCH% -signer WINDOWS_SIGNING_KEY -upload gethstore/builds
|
||||||
|
test_script:
|
||||||
|
- go run build/ci.go test -dlgo -arch %GETH_ARCH% -cc %GETH_CC% -coverage
|
||||||
|
37
build/checksums.txt
Normal file
37
build/checksums.txt
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# This file contains sha256 checksums of optional build dependencies.
|
||||||
|
|
||||||
|
2255eb3e4e824dd7d5fcdc2e7f84534371c186312e546fb1086a34c17752f431 go1.17.2.src.tar.gz
|
||||||
|
7914497a302a132a465d33f5ee044ce05568bacdb390ab805cb75a3435a23f94 go1.17.2.darwin-amd64.tar.gz
|
||||||
|
ce8771bd3edfb5b28104084b56bbb532eeb47fbb7769c3e664c6223712c30904 go1.17.2.darwin-arm64.tar.gz
|
||||||
|
8cea5b8d1f8e8cbb58069bfed58954c71c5b1aca2f3c857765dae83bf724d0d7 go1.17.2.freebsd-386.tar.gz
|
||||||
|
c96e57218fb03e74d683ad63b1684d44c89d5e5b994f36102b33dce21b58499a go1.17.2.freebsd-amd64.tar.gz
|
||||||
|
8617f2e40d51076983502894181ae639d1d8101bfbc4d7463a2b442f239f5596 go1.17.2.linux-386.tar.gz
|
||||||
|
f242a9db6a0ad1846de7b6d94d507915d14062660616a61ef7c808a76e4f1676 go1.17.2.linux-amd64.tar.gz
|
||||||
|
a5a43c9cdabdb9f371d56951b14290eba8ce2f9b0db48fb5fc657943984fd4fc go1.17.2.linux-arm64.tar.gz
|
||||||
|
04d16105008230a9763005be05606f7eb1c683a3dbf0fbfed4034b23889cb7f2 go1.17.2.linux-armv6l.tar.gz
|
||||||
|
12e2dc7e0ffeebe77083f267ef6705fec1621cdf2ed6489b3af04a13597ed68d go1.17.2.linux-ppc64le.tar.gz
|
||||||
|
c4b2349a8d11350ca038b8c57f3cc58dc0b31284bcbed4f7fca39aeed28b4a51 go1.17.2.linux-s390x.tar.gz
|
||||||
|
8a85257a351996fdf045fe95ed5fdd6917dd48636d562dd11dedf193005a53e0 go1.17.2.windows-386.zip
|
||||||
|
fa6da0b829a66f5fab7e4e312fd6aa1b2d8f045c7ecee83b3d00f6fe5306759a go1.17.2.windows-amd64.zip
|
||||||
|
00575c85dc7a129ba892685a456b27a3f3670f71c8bfde1c5ad151f771d55df7 go1.17.2.windows-arm64.zip
|
||||||
|
|
||||||
|
d4bd25b9814eeaa2134197dd2c7671bb791eae786d42010d9d788af20dee4bfa golangci-lint-1.42.0-darwin-amd64.tar.gz
|
||||||
|
e56859c04a2ad5390c6a497b1acb1cc9329ecb1010260c6faae9b5a4c35b35ea golangci-lint-1.42.0-darwin-arm64.tar.gz
|
||||||
|
14d912a3fa856830339472fc4dc341933adf15f37bdb7130bbbfcf960ecf4809 golangci-lint-1.42.0-freebsd-386.tar.gz
|
||||||
|
337257fccc9baeb5ee1cd7e70c153e9d9f59d3afde46d631659500048afbdf80 golangci-lint-1.42.0-freebsd-amd64.tar.gz
|
||||||
|
6debcc266b629359fdd8eef4f4abb05a621604079d27016265afb5b4593b0eff golangci-lint-1.42.0-freebsd-armv6.tar.gz
|
||||||
|
878f0e190169db2ce9dde8cefbd99adc4fe28b90b68686bbfcfcc2085e6d693e golangci-lint-1.42.0-freebsd-armv7.tar.gz
|
||||||
|
42c78e31faf62b225363eff1b1d2aa74f9dbcb75686c8914aa3e90d6af65cece golangci-lint-1.42.0-linux-386.tar.gz
|
||||||
|
6937f62f8e2329e94822dc11c10b871ace5557ae1fcc4ee2f9980cd6aecbc159 golangci-lint-1.42.0-linux-amd64.tar.gz
|
||||||
|
2cf8d23d96cd854a537b355dab2962b960b88a06b615232599f066afd233f246 golangci-lint-1.42.0-linux-arm64.tar.gz
|
||||||
|
08b003d1ed61367473886defc957af5301066e62338e5d96a319c34dadc4c1d1 golangci-lint-1.42.0-linux-armv6.tar.gz
|
||||||
|
c7c00ec4845e806a1f32685f5b150219e180bd6d6a9d584be8d27f0c41d7a1bf golangci-lint-1.42.0-linux-armv7.tar.gz
|
||||||
|
3650fcf29eb3d8ee326d77791a896b15259eb2d5bf77437dc72e7efe5af6bd40 golangci-lint-1.42.0-linux-mips64.tar.gz
|
||||||
|
f51ae003fdbca4fef78ba73e2eb736a939c8eaa178cd452234213b489da5a420 golangci-lint-1.42.0-linux-mips64le.tar.gz
|
||||||
|
1b0bb7b8b22cc4ea7da44fd5ad5faaf6111d0677e01cc6f961b62a96537de2c6 golangci-lint-1.42.0-linux-ppc64le.tar.gz
|
||||||
|
8cb56927eb75e572450efbe0ff0f9cf3f56dc9faa81d9e8d30d6559fc1d06e6d golangci-lint-1.42.0-linux-riscv64.tar.gz
|
||||||
|
5ac41cd31825a176b21505a371a7b307cd9cdf17df0f35bbb3bf1466f9356ccc golangci-lint-1.42.0-linux-s390x.tar.gz
|
||||||
|
e1cebd2af621ac4b64c20937df92c3819264f2174c92f51e196db1e64ae097e0 golangci-lint-1.42.0-windows-386.zip
|
||||||
|
7e70fcde8e87a17cae0455df07d257ebc86669f3968d568e12727fa24bbe9883 golangci-lint-1.42.0-windows-amd64.zip
|
||||||
|
59da7ce1bda432616bfc28ae663e52c3675adee8d9bf5959fafd657c159576ab golangci-lint-1.42.0-windows-armv6.zip
|
||||||
|
65f62dda937bfcede0326ac77abe947ce1548931e6e13298ca036cb31f224db5 golangci-lint-1.42.0-windows-armv7.zip
|
657
build/ci.go
657
build/ci.go
@ -14,6 +14,7 @@
|
|||||||
// You should have received a copy of the GNU Lesser General Public License
|
// You should have received a copy of the GNU Lesser General Public License
|
||||||
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
//go:build none
|
||||||
// +build none
|
// +build none
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -26,13 +27,12 @@ Available commands are:
|
|||||||
install [ -arch architecture ] [ -cc compiler ] [ packages... ] -- builds packages and executables
|
install [ -arch architecture ] [ -cc compiler ] [ packages... ] -- builds packages and executables
|
||||||
test [ -coverage ] [ packages... ] -- runs the tests
|
test [ -coverage ] [ packages... ] -- runs the tests
|
||||||
lint -- runs certain pre-selected linters
|
lint -- runs certain pre-selected linters
|
||||||
archive [ -arch architecture ] [ -type zip|tar ] [ -signer key-envvar ] [ -upload dest ] -- archives build artifacts
|
archive [ -arch architecture ] [ -type zip|tar ] [ -signer key-envvar ] [ -signify key-envvar ] [ -upload dest ] -- archives build artifacts
|
||||||
importkeys -- imports signing keys from env
|
importkeys -- imports signing keys from env
|
||||||
debsrc [ -signer key-id ] [ -upload dest ] -- creates a debian source package
|
debsrc [ -signer key-id ] [ -upload dest ] -- creates a debian source package
|
||||||
nsis -- creates a Windows NSIS installer
|
nsis -- creates a Windows NSIS installer
|
||||||
aar [ -local ] [ -sign key-id ] [-deploy repo] [ -upload dest ] -- creates an Android archive
|
aar [ -local ] [ -sign key-id ] [-deploy repo] [ -upload dest ] -- creates an Android archive
|
||||||
xcode [ -local ] [ -sign key-id ] [-deploy repo] [ -upload dest ] -- creates an iOS XCode framework
|
xcode [ -local ] [ -sign key-id ] [-deploy repo] [ -upload dest ] -- creates an iOS XCode framework
|
||||||
xgo [ -alltools ] [ options ] -- cross builds according to options
|
|
||||||
purge [ -store blobstore ] [ -days threshold ] -- purges old archives from the blobstore
|
purge [ -store blobstore ] [ -days threshold ] -- purges old archives from the blobstore
|
||||||
|
|
||||||
For all commands, -n prevents execution of external programs (dry run mode).
|
For all commands, -n prevents execution of external programs (dry run mode).
|
||||||
@ -46,19 +46,20 @@ import (
|
|||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/parser"
|
|
||||||
"go/token"
|
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
"github.com/cespare/cp"
|
||||||
|
"github.com/ethereum/go-ethereum/crypto/signify"
|
||||||
"github.com/ethereum/go-ethereum/internal/build"
|
"github.com/ethereum/go-ethereum/internal/build"
|
||||||
"github.com/ethereum/go-ethereum/params"
|
"github.com/ethereum/go-ethereum/params"
|
||||||
)
|
)
|
||||||
@ -79,7 +80,6 @@ var (
|
|||||||
executablePath("geth"),
|
executablePath("geth"),
|
||||||
executablePath("puppeth"),
|
executablePath("puppeth"),
|
||||||
executablePath("rlpdump"),
|
executablePath("rlpdump"),
|
||||||
executablePath("wnode"),
|
|
||||||
executablePath("clef"),
|
executablePath("clef"),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,10 +109,6 @@ var (
|
|||||||
BinaryName: "rlpdump",
|
BinaryName: "rlpdump",
|
||||||
Description: "Developer utility tool that prints RLP structures.",
|
Description: "Developer utility tool that prints RLP structures.",
|
||||||
},
|
},
|
||||||
{
|
|
||||||
BinaryName: "wnode",
|
|
||||||
Description: "Ethereum Whisper diagnostic tool",
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
BinaryName: "clef",
|
BinaryName: "clef",
|
||||||
Description: "Ethereum account management tool.",
|
Description: "Ethereum account management tool.",
|
||||||
@ -120,7 +116,6 @@ var (
|
|||||||
}
|
}
|
||||||
|
|
||||||
// A debian package is created for all executables listed here.
|
// A debian package is created for all executables listed here.
|
||||||
|
|
||||||
debEthereum = debPackage{
|
debEthereum = debPackage{
|
||||||
Name: "ethereum",
|
Name: "ethereum",
|
||||||
Version: params.Version,
|
Version: params.Version,
|
||||||
@ -134,23 +129,25 @@ var (
|
|||||||
|
|
||||||
// Distros for which packages are created.
|
// Distros for which packages are created.
|
||||||
// Note: vivid is unsupported because there is no golang-1.6 package for it.
|
// Note: vivid is unsupported because there is no golang-1.6 package for it.
|
||||||
// Note: wily is unsupported because it was officially deprecated on Launchpad.
|
// Note: the following Ubuntu releases have been officially deprecated on Launchpad:
|
||||||
// Note: yakkety is unsupported because it was officially deprecated on Launchpad.
|
// wily, yakkety, zesty, artful, cosmic, disco, eoan, groovy
|
||||||
// Note: zesty is unsupported because it was officially deprecated on Launchpad.
|
|
||||||
// Note: artful is unsupported because it was officially deprecated on Launchpad.
|
|
||||||
// Note: cosmic is unsupported because it was officially deprecated on Launchpad.
|
|
||||||
debDistroGoBoots = map[string]string{
|
debDistroGoBoots = map[string]string{
|
||||||
"trusty": "golang-1.11",
|
"trusty": "golang-1.11",
|
||||||
"xenial": "golang-go",
|
"xenial": "golang-go",
|
||||||
"bionic": "golang-go",
|
"bionic": "golang-go",
|
||||||
"disco": "golang-go",
|
"focal": "golang-go",
|
||||||
"eoan": "golang-go",
|
"hirsute": "golang-go",
|
||||||
}
|
}
|
||||||
|
|
||||||
debGoBootPaths = map[string]string{
|
debGoBootPaths = map[string]string{
|
||||||
"golang-1.11": "/usr/lib/go-1.11",
|
"golang-1.11": "/usr/lib/go-1.11",
|
||||||
"golang-go": "/usr/lib/go",
|
"golang-go": "/usr/lib/go",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is the version of go that will be downloaded by
|
||||||
|
//
|
||||||
|
// go run ci.go install -dlgo
|
||||||
|
dlgoVersion = "1.17.2"
|
||||||
)
|
)
|
||||||
|
|
||||||
var GOBIN, _ = filepath.Abs(filepath.Join("build", "bin"))
|
var GOBIN, _ = filepath.Abs(filepath.Join("build", "bin"))
|
||||||
@ -180,6 +177,8 @@ func main() {
|
|||||||
doLint(os.Args[2:])
|
doLint(os.Args[2:])
|
||||||
case "archive":
|
case "archive":
|
||||||
doArchive(os.Args[2:])
|
doArchive(os.Args[2:])
|
||||||
|
case "docker":
|
||||||
|
doDocker(os.Args[2:])
|
||||||
case "debsrc":
|
case "debsrc":
|
||||||
doDebianSource(os.Args[2:])
|
doDebianSource(os.Args[2:])
|
||||||
case "nsis":
|
case "nsis":
|
||||||
@ -188,8 +187,6 @@ func main() {
|
|||||||
doAndroidArchive(os.Args[2:])
|
doAndroidArchive(os.Args[2:])
|
||||||
case "xcode":
|
case "xcode":
|
||||||
doXCodeFramework(os.Args[2:])
|
doXCodeFramework(os.Args[2:])
|
||||||
case "xgo":
|
|
||||||
doXgo(os.Args[2:])
|
|
||||||
case "purge":
|
case "purge":
|
||||||
doPurge(os.Args[2:])
|
doPurge(os.Args[2:])
|
||||||
default:
|
default:
|
||||||
@ -201,184 +198,163 @@ func main() {
|
|||||||
|
|
||||||
func doInstall(cmdline []string) {
|
func doInstall(cmdline []string) {
|
||||||
var (
|
var (
|
||||||
|
dlgo = flag.Bool("dlgo", false, "Download Go and build with it")
|
||||||
arch = flag.String("arch", "", "Architecture to cross build for")
|
arch = flag.String("arch", "", "Architecture to cross build for")
|
||||||
cc = flag.String("cc", "", "C compiler to cross build with")
|
cc = flag.String("cc", "", "C compiler to cross build with")
|
||||||
)
|
)
|
||||||
flag.CommandLine.Parse(cmdline)
|
flag.CommandLine.Parse(cmdline)
|
||||||
|
|
||||||
|
// Configure the toolchain.
|
||||||
|
tc := build.GoToolchain{GOARCH: *arch, CC: *cc}
|
||||||
|
if *dlgo {
|
||||||
|
csdb := build.MustLoadChecksums("build/checksums.txt")
|
||||||
|
tc.Root = build.DownloadGo(csdb, dlgoVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure the build.
|
||||||
env := build.Env()
|
env := build.Env()
|
||||||
|
gobuild := tc.Go("build", buildFlags(env)...)
|
||||||
|
|
||||||
// Check Go version. People regularly open issues about compilation
|
// arm64 CI builders are memory-constrained and can't handle concurrent builds,
|
||||||
// failure with outdated Go. This should save them the trouble.
|
// better disable it. This check isn't the best, it should probably
|
||||||
if !strings.Contains(runtime.Version(), "devel") {
|
// check for something in env instead.
|
||||||
// Figure out the minor version number since we can't textually compare (1.10 < 1.9)
|
if env.CI && runtime.GOARCH == "arm64" {
|
||||||
var minor int
|
gobuild.Args = append(gobuild.Args, "-p", "1")
|
||||||
fmt.Sscanf(strings.TrimPrefix(runtime.Version(), "go1."), "%d", &minor)
|
|
||||||
|
|
||||||
if minor < 9 {
|
|
||||||
log.Println("You have Go version", runtime.Version())
|
|
||||||
log.Println("go-ethereum requires at least Go version 1.9 and cannot")
|
|
||||||
log.Println("be compiled with an earlier version. Please upgrade your Go installation.")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Compile packages given as arguments, or everything if there are no arguments.
|
|
||||||
packages := []string{"./..."}
|
|
||||||
if flag.NArg() > 0 {
|
|
||||||
packages = flag.Args()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if *arch == "" || *arch == runtime.GOARCH {
|
// We use -trimpath to avoid leaking local paths into the built executables.
|
||||||
goinstall := goTool("install", buildFlags(env)...)
|
gobuild.Args = append(gobuild.Args, "-trimpath")
|
||||||
goinstall.Args = append(goinstall.Args, "-v")
|
|
||||||
goinstall.Args = append(goinstall.Args, packages...)
|
|
||||||
build.MustRun(goinstall)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// If we are cross compiling to ARMv5 ARMv6 or ARMv7, clean any previous builds
|
|
||||||
if *arch == "arm" {
|
|
||||||
os.RemoveAll(filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_arm"))
|
|
||||||
for _, path := range filepath.SplitList(build.GOPATH()) {
|
|
||||||
os.RemoveAll(filepath.Join(path, "pkg", runtime.GOOS+"_arm"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Seems we are cross compiling, work around forbidden GOBIN
|
|
||||||
goinstall := goToolArch(*arch, *cc, "install", buildFlags(env)...)
|
|
||||||
goinstall.Args = append(goinstall.Args, "-v")
|
|
||||||
goinstall.Args = append(goinstall.Args, []string{"-buildmode", "archive"}...)
|
|
||||||
goinstall.Args = append(goinstall.Args, packages...)
|
|
||||||
build.MustRun(goinstall)
|
|
||||||
|
|
||||||
if cmds, err := ioutil.ReadDir("cmd"); err == nil {
|
// Show packages during build.
|
||||||
for _, cmd := range cmds {
|
gobuild.Args = append(gobuild.Args, "-v")
|
||||||
pkgs, err := parser.ParseDir(token.NewFileSet(), filepath.Join(".", "cmd", cmd.Name()), nil, parser.PackageClauseOnly)
|
|
||||||
if err != nil {
|
// Now we choose what we're even building.
|
||||||
log.Fatal(err)
|
// Default: collect all 'main' packages in cmd/ and build those.
|
||||||
}
|
packages := flag.Args()
|
||||||
for name := range pkgs {
|
if len(packages) == 0 {
|
||||||
if name == "main" {
|
packages = build.FindMainPackages("./cmd")
|
||||||
gobuild := goToolArch(*arch, *cc, "build", buildFlags(env)...)
|
}
|
||||||
gobuild.Args = append(gobuild.Args, "-v")
|
|
||||||
gobuild.Args = append(gobuild.Args, []string{"-o", executablePath(cmd.Name())}...)
|
// Do the build!
|
||||||
gobuild.Args = append(gobuild.Args, "."+string(filepath.Separator)+filepath.Join("cmd", cmd.Name()))
|
for _, pkg := range packages {
|
||||||
build.MustRun(gobuild)
|
args := make([]string, len(gobuild.Args))
|
||||||
break
|
copy(args, gobuild.Args)
|
||||||
}
|
args = append(args, "-o", executablePath(path.Base(pkg)))
|
||||||
}
|
args = append(args, pkg)
|
||||||
}
|
build.MustRun(&exec.Cmd{Path: gobuild.Path, Args: args, Env: gobuild.Env})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// buildFlags returns the go tool flags for building.
|
||||||
func buildFlags(env build.Environment) (flags []string) {
|
func buildFlags(env build.Environment) (flags []string) {
|
||||||
var ld []string
|
var ld []string
|
||||||
if env.Commit != "" {
|
if env.Commit != "" {
|
||||||
ld = append(ld, "-X", "main.gitCommit="+env.Commit)
|
ld = append(ld, "-X", "main.gitCommit="+env.Commit)
|
||||||
ld = append(ld, "-X", "main.gitDate="+env.Date)
|
ld = append(ld, "-X", "main.gitDate="+env.Date)
|
||||||
}
|
}
|
||||||
|
// Strip DWARF on darwin. This used to be required for certain things,
|
||||||
|
// and there is no downside to this, so we just keep doing it.
|
||||||
if runtime.GOOS == "darwin" {
|
if runtime.GOOS == "darwin" {
|
||||||
ld = append(ld, "-s")
|
ld = append(ld, "-s")
|
||||||
}
|
}
|
||||||
|
// Enforce the stacksize to 8M, which is the case on most platforms apart from
|
||||||
|
// alpine Linux.
|
||||||
|
if runtime.GOOS == "linux" {
|
||||||
|
ld = append(ld, "-extldflags", "-Wl,-z,stack-size=0x800000")
|
||||||
|
}
|
||||||
if len(ld) > 0 {
|
if len(ld) > 0 {
|
||||||
flags = append(flags, "-ldflags", strings.Join(ld, " "))
|
flags = append(flags, "-ldflags", strings.Join(ld, " "))
|
||||||
}
|
}
|
||||||
return flags
|
return flags
|
||||||
}
|
}
|
||||||
|
|
||||||
func goTool(subcmd string, args ...string) *exec.Cmd {
|
|
||||||
return goToolArch(runtime.GOARCH, os.Getenv("CC"), subcmd, args...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func goToolArch(arch string, cc string, subcmd string, args ...string) *exec.Cmd {
|
|
||||||
cmd := build.GoTool(subcmd, args...)
|
|
||||||
cmd.Env = []string{"GOPATH=" + build.GOPATH()}
|
|
||||||
if arch == "" || arch == runtime.GOARCH {
|
|
||||||
cmd.Env = append(cmd.Env, "GOBIN="+GOBIN)
|
|
||||||
} else {
|
|
||||||
cmd.Env = append(cmd.Env, "CGO_ENABLED=1")
|
|
||||||
cmd.Env = append(cmd.Env, "GOARCH="+arch)
|
|
||||||
}
|
|
||||||
if cc != "" {
|
|
||||||
cmd.Env = append(cmd.Env, "CC="+cc)
|
|
||||||
}
|
|
||||||
for _, e := range os.Environ() {
|
|
||||||
if strings.HasPrefix(e, "GOPATH=") || strings.HasPrefix(e, "GOBIN=") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
cmd.Env = append(cmd.Env, e)
|
|
||||||
}
|
|
||||||
return cmd
|
|
||||||
}
|
|
||||||
|
|
||||||
// Running The Tests
|
// Running The Tests
|
||||||
//
|
//
|
||||||
// "tests" also includes static analysis tools such as vet.
|
// "tests" also includes static analysis tools such as vet.
|
||||||
|
|
||||||
func doTest(cmdline []string) {
|
func doTest(cmdline []string) {
|
||||||
coverage := flag.Bool("coverage", false, "Whether to record code coverage")
|
var (
|
||||||
|
dlgo = flag.Bool("dlgo", false, "Download Go and build with it")
|
||||||
|
arch = flag.String("arch", "", "Run tests for given architecture")
|
||||||
|
cc = flag.String("cc", "", "Sets C compiler binary")
|
||||||
|
coverage = flag.Bool("coverage", false, "Whether to record code coverage")
|
||||||
|
verbose = flag.Bool("v", false, "Whether to log verbosely")
|
||||||
|
race = flag.Bool("race", false, "Execute the race detector")
|
||||||
|
)
|
||||||
flag.CommandLine.Parse(cmdline)
|
flag.CommandLine.Parse(cmdline)
|
||||||
env := build.Env()
|
|
||||||
|
// Configure the toolchain.
|
||||||
|
tc := build.GoToolchain{GOARCH: *arch, CC: *cc}
|
||||||
|
if *dlgo {
|
||||||
|
csdb := build.MustLoadChecksums("build/checksums.txt")
|
||||||
|
tc.Root = build.DownloadGo(csdb, dlgoVersion)
|
||||||
|
}
|
||||||
|
gotest := tc.Go("test")
|
||||||
|
|
||||||
|
// Test a single package at a time. CI builders are slow
|
||||||
|
// and some tests run into timeouts under load.
|
||||||
|
gotest.Args = append(gotest.Args, "-p", "1")
|
||||||
|
if *coverage {
|
||||||
|
gotest.Args = append(gotest.Args, "-covermode=atomic", "-cover")
|
||||||
|
}
|
||||||
|
if *verbose {
|
||||||
|
gotest.Args = append(gotest.Args, "-v")
|
||||||
|
}
|
||||||
|
if *race {
|
||||||
|
gotest.Args = append(gotest.Args, "-race")
|
||||||
|
}
|
||||||
|
|
||||||
packages := []string{"./..."}
|
packages := []string{"./..."}
|
||||||
if len(flag.CommandLine.Args()) > 0 {
|
if len(flag.CommandLine.Args()) > 0 {
|
||||||
packages = flag.CommandLine.Args()
|
packages = flag.CommandLine.Args()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run the actual tests.
|
|
||||||
// Test a single package at a time. CI builders are slow
|
|
||||||
// and some tests run into timeouts under load.
|
|
||||||
gotest := goTool("test", buildFlags(env)...)
|
|
||||||
gotest.Args = append(gotest.Args, "-p", "1", "-timeout", "5m", "--short")
|
|
||||||
if *coverage {
|
|
||||||
gotest.Args = append(gotest.Args, "-covermode=atomic", "-cover")
|
|
||||||
}
|
|
||||||
|
|
||||||
gotest.Args = append(gotest.Args, packages...)
|
gotest.Args = append(gotest.Args, packages...)
|
||||||
build.MustRun(gotest)
|
build.MustRun(gotest)
|
||||||
}
|
}
|
||||||
|
|
||||||
// runs gometalinter on requested packages
|
// doLint runs golangci-lint on requested packages.
|
||||||
func doLint(cmdline []string) {
|
func doLint(cmdline []string) {
|
||||||
|
var (
|
||||||
|
cachedir = flag.String("cachedir", "./build/cache", "directory for caching golangci-lint binary.")
|
||||||
|
)
|
||||||
flag.CommandLine.Parse(cmdline)
|
flag.CommandLine.Parse(cmdline)
|
||||||
|
|
||||||
packages := []string{"./..."}
|
packages := []string{"./..."}
|
||||||
if len(flag.CommandLine.Args()) > 0 {
|
if len(flag.CommandLine.Args()) > 0 {
|
||||||
packages = flag.CommandLine.Args()
|
packages = flag.CommandLine.Args()
|
||||||
}
|
}
|
||||||
// Get metalinter and install all supported linters
|
|
||||||
build.MustRun(goTool("get", "gopkg.in/alecthomas/gometalinter.v2"))
|
|
||||||
build.MustRunCommand(filepath.Join(GOBIN, "gometalinter.v2"), "--install")
|
|
||||||
|
|
||||||
// Run fast linters batched together
|
linter := downloadLinter(*cachedir)
|
||||||
configs := []string{
|
lflags := []string{"run", "--config", ".golangci.yml"}
|
||||||
"--vendor",
|
build.MustRunCommand(linter, append(lflags, packages...)...)
|
||||||
"--tests",
|
fmt.Println("You have achieved perfection.")
|
||||||
"--deadline=2m",
|
}
|
||||||
"--disable-all",
|
|
||||||
"--enable=goimports",
|
|
||||||
"--enable=varcheck",
|
|
||||||
"--enable=vet",
|
|
||||||
"--enable=gofmt",
|
|
||||||
"--enable=misspell",
|
|
||||||
"--enable=goconst",
|
|
||||||
"--min-occurrences=6", // for goconst
|
|
||||||
}
|
|
||||||
build.MustRunCommand(filepath.Join(GOBIN, "gometalinter.v2"), append(configs, packages...)...)
|
|
||||||
|
|
||||||
// Run slow linters one by one
|
// downloadLinter downloads and unpacks golangci-lint.
|
||||||
for _, linter := range []string{"unconvert", "gosimple"} {
|
func downloadLinter(cachedir string) string {
|
||||||
configs = []string{"--vendor", "--tests", "--deadline=10m", "--disable-all", "--enable=" + linter}
|
const version = "1.42.0"
|
||||||
build.MustRunCommand(filepath.Join(GOBIN, "gometalinter.v2"), append(configs, packages...)...)
|
|
||||||
|
csdb := build.MustLoadChecksums("build/checksums.txt")
|
||||||
|
base := fmt.Sprintf("golangci-lint-%s-%s-%s", version, runtime.GOOS, runtime.GOARCH)
|
||||||
|
url := fmt.Sprintf("https://github.com/golangci/golangci-lint/releases/download/v%s/%s.tar.gz", version, base)
|
||||||
|
archivePath := filepath.Join(cachedir, base+".tar.gz")
|
||||||
|
if err := csdb.DownloadFile(url, archivePath); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
if err := build.ExtractArchive(archivePath, cachedir); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return filepath.Join(cachedir, base, "golangci-lint")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Release Packaging
|
// Release Packaging
|
||||||
func doArchive(cmdline []string) {
|
func doArchive(cmdline []string) {
|
||||||
var (
|
var (
|
||||||
arch = flag.String("arch", runtime.GOARCH, "Architecture cross packaging")
|
arch = flag.String("arch", runtime.GOARCH, "Architecture cross packaging")
|
||||||
atype = flag.String("type", "zip", "Type of archive to write (zip|tar)")
|
atype = flag.String("type", "zip", "Type of archive to write (zip|tar)")
|
||||||
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. LINUX_SIGNING_KEY)`)
|
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. LINUX_SIGNING_KEY)`)
|
||||||
upload = flag.String("upload", "", `Destination to upload the archives (usually "gethstore/builds")`)
|
signify = flag.String("signify", "", `Environment variable holding the signify key (e.g. LINUX_SIGNIFY_KEY)`)
|
||||||
ext string
|
upload = flag.String("upload", "", `Destination to upload the archives (usually "gethstore/builds")`)
|
||||||
|
ext string
|
||||||
)
|
)
|
||||||
flag.CommandLine.Parse(cmdline)
|
flag.CommandLine.Parse(cmdline)
|
||||||
switch *atype {
|
switch *atype {
|
||||||
@ -391,8 +367,7 @@ func doArchive(cmdline []string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
env = build.Env()
|
env = build.Env()
|
||||||
|
|
||||||
basegeth = archiveBasename(*arch, params.ArchiveVersion(env.Commit))
|
basegeth = archiveBasename(*arch, params.ArchiveVersion(env.Commit))
|
||||||
geth = "geth-" + basegeth + ext
|
geth = "geth-" + basegeth + ext
|
||||||
alltools = "geth-alltools-" + basegeth + ext
|
alltools = "geth-alltools-" + basegeth + ext
|
||||||
@ -405,7 +380,7 @@ func doArchive(cmdline []string) {
|
|||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
for _, archive := range []string{geth, alltools} {
|
for _, archive := range []string{geth, alltools} {
|
||||||
if err := archiveUpload(archive, *upload, *signer); err != nil {
|
if err := archiveUpload(archive, *upload, *signer, *signify); err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -425,7 +400,7 @@ func archiveBasename(arch string, archiveVersion string) string {
|
|||||||
return platform + "-" + archiveVersion
|
return platform + "-" + archiveVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
func archiveUpload(archive string, blobstore string, signer string) error {
|
func archiveUpload(archive string, blobstore string, signer string, signifyVar string) error {
|
||||||
// If signing was requested, generate the signature files
|
// If signing was requested, generate the signature files
|
||||||
if signer != "" {
|
if signer != "" {
|
||||||
key := getenvBase64(signer)
|
key := getenvBase64(signer)
|
||||||
@ -433,6 +408,14 @@ func archiveUpload(archive string, blobstore string, signer string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if signifyVar != "" {
|
||||||
|
key := os.Getenv(signifyVar)
|
||||||
|
untrustedComment := "verify with geth-release.pub"
|
||||||
|
trustedComment := fmt.Sprintf("%s (%s)", archive, time.Now().UTC().Format(time.RFC1123))
|
||||||
|
if err := signify.SignFile(archive, archive+".sig", key, untrustedComment, trustedComment); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
// If uploading to Azure was requested, push the archive possibly with its signature
|
// If uploading to Azure was requested, push the archive possibly with its signature
|
||||||
if blobstore != "" {
|
if blobstore != "" {
|
||||||
auth := build.AzureBlobstoreConfig{
|
auth := build.AzureBlobstoreConfig{
|
||||||
@ -448,6 +431,11 @@ func archiveUpload(archive string, blobstore string, signer string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if signifyVar != "" {
|
||||||
|
if err := build.AzureBlobstoreUpload(archive+".sig", filepath.Base(archive+".sig"), auth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -455,34 +443,199 @@ func archiveUpload(archive string, blobstore string, signer string) error {
|
|||||||
// skips archiving for some build configurations.
|
// skips archiving for some build configurations.
|
||||||
func maybeSkipArchive(env build.Environment) {
|
func maybeSkipArchive(env build.Environment) {
|
||||||
if env.IsPullRequest {
|
if env.IsPullRequest {
|
||||||
log.Printf("skipping because this is a PR build")
|
log.Printf("skipping archive creation because this is a PR build")
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
if env.IsCronJob {
|
if env.IsCronJob {
|
||||||
log.Printf("skipping because this is a cron job")
|
log.Printf("skipping archive creation because this is a cron job")
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
if env.Branch != "master" && !strings.HasPrefix(env.Tag, "v1.") {
|
if env.Branch != "master" && !strings.HasPrefix(env.Tag, "v1.") {
|
||||||
log.Printf("skipping because branch %q, tag %q is not on the whitelist", env.Branch, env.Tag)
|
log.Printf("skipping archive creation because branch %q, tag %q is not on the inclusion list", env.Branch, env.Tag)
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Builds the docker images and optionally uploads them to Docker Hub.
|
||||||
|
func doDocker(cmdline []string) {
|
||||||
|
var (
|
||||||
|
image = flag.Bool("image", false, `Whether to build and push an arch specific docker image`)
|
||||||
|
manifest = flag.String("manifest", "", `Push a multi-arch docker image for the specified architectures (usually "amd64,arm64")`)
|
||||||
|
upload = flag.String("upload", "", `Where to upload the docker image (usually "ethereum/client-go")`)
|
||||||
|
)
|
||||||
|
flag.CommandLine.Parse(cmdline)
|
||||||
|
|
||||||
|
// Skip building and pushing docker images for PR builds
|
||||||
|
env := build.Env()
|
||||||
|
maybeSkipArchive(env)
|
||||||
|
|
||||||
|
// Retrieve the upload credentials and authenticate
|
||||||
|
user := getenvBase64("DOCKER_HUB_USERNAME")
|
||||||
|
pass := getenvBase64("DOCKER_HUB_PASSWORD")
|
||||||
|
|
||||||
|
if len(user) > 0 && len(pass) > 0 {
|
||||||
|
auther := exec.Command("docker", "login", "-u", string(user), "--password-stdin")
|
||||||
|
auther.Stdin = bytes.NewReader(pass)
|
||||||
|
build.MustRun(auther)
|
||||||
|
}
|
||||||
|
// Retrieve the version infos to build and push to the following paths:
|
||||||
|
// - ethereum/client-go:latest - Pushes to the master branch, Geth only
|
||||||
|
// - ethereum/client-go:stable - Version tag publish on GitHub, Geth only
|
||||||
|
// - ethereum/client-go:alltools-latest - Pushes to the master branch, Geth & tools
|
||||||
|
// - ethereum/client-go:alltools-stable - Version tag publish on GitHub, Geth & tools
|
||||||
|
// - ethereum/client-go:release-<major>.<minor> - Version tag publish on GitHub, Geth only
|
||||||
|
// - ethereum/client-go:alltools-release-<major>.<minor> - Version tag publish on GitHub, Geth & tools
|
||||||
|
// - ethereum/client-go:v<major>.<minor>.<patch> - Version tag publish on GitHub, Geth only
|
||||||
|
// - ethereum/client-go:alltools-v<major>.<minor>.<patch> - Version tag publish on GitHub, Geth & tools
|
||||||
|
var tags []string
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case env.Branch == "master":
|
||||||
|
tags = []string{"latest"}
|
||||||
|
case strings.HasPrefix(env.Tag, "v1."):
|
||||||
|
tags = []string{"stable", fmt.Sprintf("release-1.%d", params.VersionMinor), "v" + params.Version}
|
||||||
|
}
|
||||||
|
// If architecture specific image builds are requested, build and push them
|
||||||
|
if *image {
|
||||||
|
build.MustRunCommand("docker", "build", "--build-arg", "COMMIT="+env.Commit, "--build-arg", "VERSION="+params.VersionWithMeta, "--build-arg", "BUILDNUM="+env.Buildnum, "--tag", fmt.Sprintf("%s:TAG", *upload), ".")
|
||||||
|
build.MustRunCommand("docker", "build", "--build-arg", "COMMIT="+env.Commit, "--build-arg", "VERSION="+params.VersionWithMeta, "--build-arg", "BUILDNUM="+env.Buildnum, "--tag", fmt.Sprintf("%s:alltools-TAG", *upload), "-f", "Dockerfile.alltools", ".")
|
||||||
|
|
||||||
|
// Tag and upload the images to Docker Hub
|
||||||
|
for _, tag := range tags {
|
||||||
|
gethImage := fmt.Sprintf("%s:%s-%s", *upload, tag, runtime.GOARCH)
|
||||||
|
toolImage := fmt.Sprintf("%s:alltools-%s-%s", *upload, tag, runtime.GOARCH)
|
||||||
|
|
||||||
|
// If the image already exists (non version tag), check the build
|
||||||
|
// number to prevent overwriting a newer commit if concurrent builds
|
||||||
|
// are running. This is still a tiny bit racey if two published are
|
||||||
|
// done at the same time, but that's extremely unlikely even on the
|
||||||
|
// master branch.
|
||||||
|
for _, img := range []string{gethImage, toolImage} {
|
||||||
|
if exec.Command("docker", "pull", img).Run() != nil {
|
||||||
|
continue // Generally the only failure is a missing image, which is good
|
||||||
|
}
|
||||||
|
buildnum, err := exec.Command("docker", "inspect", "--format", "{{index .Config.Labels \"buildnum\"}}", img).CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to inspect container: %v\nOutput: %s", err, string(buildnum))
|
||||||
|
}
|
||||||
|
buildnum = bytes.TrimSpace(buildnum)
|
||||||
|
|
||||||
|
if len(buildnum) > 0 && len(env.Buildnum) > 0 {
|
||||||
|
oldnum, err := strconv.Atoi(string(buildnum))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to parse old image build number: %v", err)
|
||||||
|
}
|
||||||
|
newnum, err := strconv.Atoi(env.Buildnum)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to parse current build number: %v", err)
|
||||||
|
}
|
||||||
|
if oldnum > newnum {
|
||||||
|
log.Fatalf("Current build number %d not newer than existing %d", newnum, oldnum)
|
||||||
|
} else {
|
||||||
|
log.Printf("Updating %s from build %d to %d", img, oldnum, newnum)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
build.MustRunCommand("docker", "image", "tag", fmt.Sprintf("%s:TAG", *upload), gethImage)
|
||||||
|
build.MustRunCommand("docker", "image", "tag", fmt.Sprintf("%s:alltools-TAG", *upload), toolImage)
|
||||||
|
build.MustRunCommand("docker", "push", gethImage)
|
||||||
|
build.MustRunCommand("docker", "push", toolImage)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If multi-arch image manifest push is requested, assemble it
|
||||||
|
if len(*manifest) != 0 {
|
||||||
|
// Since different architectures are pushed by different builders, wait
|
||||||
|
// until all required images are updated.
|
||||||
|
var mismatch bool
|
||||||
|
for i := 0; i < 2; i++ { // 2 attempts, second is race check
|
||||||
|
mismatch = false // hope there's no mismatch now
|
||||||
|
|
||||||
|
for _, tag := range tags {
|
||||||
|
for _, arch := range strings.Split(*manifest, ",") {
|
||||||
|
gethImage := fmt.Sprintf("%s:%s-%s", *upload, tag, arch)
|
||||||
|
toolImage := fmt.Sprintf("%s:alltools-%s-%s", *upload, tag, arch)
|
||||||
|
|
||||||
|
for _, img := range []string{gethImage, toolImage} {
|
||||||
|
if out, err := exec.Command("docker", "pull", img).CombinedOutput(); err != nil {
|
||||||
|
log.Printf("Required image %s unavailable: %v\nOutput: %s", img, err, out)
|
||||||
|
mismatch = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
buildnum, err := exec.Command("docker", "inspect", "--format", "{{index .Config.Labels \"buildnum\"}}", img).CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to inspect container: %v\nOutput: %s", err, string(buildnum))
|
||||||
|
}
|
||||||
|
buildnum = bytes.TrimSpace(buildnum)
|
||||||
|
|
||||||
|
if string(buildnum) != env.Buildnum {
|
||||||
|
log.Printf("Build number mismatch on %s: want %s, have %s", img, env.Buildnum, buildnum)
|
||||||
|
mismatch = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if mismatch {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if mismatch {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if mismatch {
|
||||||
|
// Build numbers mismatching, retry in a short time to
|
||||||
|
// avoid concurrent failes in both publisher images. If
|
||||||
|
// however the retry failed too, it means the concurrent
|
||||||
|
// builder is still crunching, let that do the publish.
|
||||||
|
if i == 0 {
|
||||||
|
time.Sleep(30 * time.Second)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if mismatch {
|
||||||
|
log.Println("Relinquishing publish to other builder")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Assemble and push the Geth manifest image
|
||||||
|
for _, tag := range tags {
|
||||||
|
gethImage := fmt.Sprintf("%s:%s", *upload, tag)
|
||||||
|
|
||||||
|
var gethSubImages []string
|
||||||
|
for _, arch := range strings.Split(*manifest, ",") {
|
||||||
|
gethSubImages = append(gethSubImages, gethImage+"-"+arch)
|
||||||
|
}
|
||||||
|
build.MustRunCommand("docker", append([]string{"manifest", "create", gethImage}, gethSubImages...)...)
|
||||||
|
build.MustRunCommand("docker", "manifest", "push", gethImage)
|
||||||
|
}
|
||||||
|
// Assemble and push the alltools manifest image
|
||||||
|
for _, tag := range tags {
|
||||||
|
toolImage := fmt.Sprintf("%s:alltools-%s", *upload, tag)
|
||||||
|
|
||||||
|
var toolSubImages []string
|
||||||
|
for _, arch := range strings.Split(*manifest, ",") {
|
||||||
|
toolSubImages = append(toolSubImages, toolImage+"-"+arch)
|
||||||
|
}
|
||||||
|
build.MustRunCommand("docker", append([]string{"manifest", "create", toolImage}, toolSubImages...)...)
|
||||||
|
build.MustRunCommand("docker", "manifest", "push", toolImage)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Debian Packaging
|
// Debian Packaging
|
||||||
func doDebianSource(cmdline []string) {
|
func doDebianSource(cmdline []string) {
|
||||||
var (
|
var (
|
||||||
goversion = flag.String("goversion", "", `Go version to build with (will be included in the source package)`)
|
cachedir = flag.String("cachedir", "./build/cache", `Filesystem path to cache the downloaded Go bundles at`)
|
||||||
gobundle = flag.String("gobundle", "/tmp/go.tar.gz", `Filesystem path to cache the downloaded Go bundles at`)
|
signer = flag.String("signer", "", `Signing key name, also used as package author`)
|
||||||
gohash = flag.String("gohash", "", `SHA256 checksum of the Go sources requested to build with`)
|
upload = flag.String("upload", "", `Where to upload the source package (usually "ethereum/ethereum")`)
|
||||||
signer = flag.String("signer", "", `Signing key name, also used as package author`)
|
sshUser = flag.String("sftp-user", "", `Username for SFTP upload (usually "geth-ci")`)
|
||||||
upload = flag.String("upload", "", `Where to upload the source package (usually "ethereum/ethereum")`)
|
workdir = flag.String("workdir", "", `Output directory for packages (uses temp dir if unset)`)
|
||||||
sshUser = flag.String("sftp-user", "", `Username for SFTP upload (usually "geth-ci")`)
|
now = time.Now()
|
||||||
workdir = flag.String("workdir", "", `Output directory for packages (uses temp dir if unset)`)
|
|
||||||
now = time.Now()
|
|
||||||
)
|
)
|
||||||
flag.CommandLine.Parse(cmdline)
|
flag.CommandLine.Parse(cmdline)
|
||||||
*workdir = makeWorkdir(*workdir)
|
*workdir = makeWorkdir(*workdir)
|
||||||
env := build.Env()
|
env := build.Env()
|
||||||
|
tc := new(build.GoToolchain)
|
||||||
maybeSkipArchive(env)
|
maybeSkipArchive(env)
|
||||||
|
|
||||||
// Import the signing key.
|
// Import the signing key.
|
||||||
@ -491,45 +644,72 @@ func doDebianSource(cmdline []string) {
|
|||||||
gpg.Stdin = bytes.NewReader(key)
|
gpg.Stdin = bytes.NewReader(key)
|
||||||
build.MustRun(gpg)
|
build.MustRun(gpg)
|
||||||
}
|
}
|
||||||
// Download and verify the Go source package
|
|
||||||
if err := build.EnsureGoSources(*goversion, hexutil.MustDecode("0x"+*gohash), *gobundle); err != nil {
|
// Download and verify the Go source package.
|
||||||
log.Fatalf("Failed to ensure Go source package: %v", err)
|
gobundle := downloadGoSources(*cachedir)
|
||||||
}
|
|
||||||
// Create Debian packages and upload them
|
// Download all the dependencies needed to build the sources and run the ci script
|
||||||
|
srcdepfetch := tc.Go("mod", "download")
|
||||||
|
srcdepfetch.Env = append(srcdepfetch.Env, "GOPATH="+filepath.Join(*workdir, "modgopath"))
|
||||||
|
build.MustRun(srcdepfetch)
|
||||||
|
|
||||||
|
cidepfetch := tc.Go("run", "./build/ci.go")
|
||||||
|
cidepfetch.Env = append(cidepfetch.Env, "GOPATH="+filepath.Join(*workdir, "modgopath"))
|
||||||
|
cidepfetch.Run() // Command fails, don't care, we only need the deps to start it
|
||||||
|
|
||||||
|
// Create Debian packages and upload them.
|
||||||
for _, pkg := range debPackages {
|
for _, pkg := range debPackages {
|
||||||
for distro, goboot := range debDistroGoBoots {
|
for distro, goboot := range debDistroGoBoots {
|
||||||
// Prepare the debian package with the go-ethereum sources
|
// Prepare the debian package with the go-ethereum sources.
|
||||||
meta := newDebMetadata(distro, goboot, *signer, env, now, pkg.Name, pkg.Version, pkg.Executables)
|
meta := newDebMetadata(distro, goboot, *signer, env, now, pkg.Name, pkg.Version, pkg.Executables)
|
||||||
pkgdir := stageDebianSource(*workdir, meta)
|
pkgdir := stageDebianSource(*workdir, meta)
|
||||||
|
|
||||||
// Ship the Go sources along so we have a proper thing to build with
|
// Add Go source code
|
||||||
if err := build.ExtractTarballArchive(*gobundle, pkgdir); err != nil {
|
if err := build.ExtractArchive(gobundle, pkgdir); err != nil {
|
||||||
log.Fatalf("Failed to extract Go sources: %v", err)
|
log.Fatalf("Failed to extract Go sources: %v", err)
|
||||||
}
|
}
|
||||||
if err := os.Rename(filepath.Join(pkgdir, "go"), filepath.Join(pkgdir, ".go")); err != nil {
|
if err := os.Rename(filepath.Join(pkgdir, "go"), filepath.Join(pkgdir, ".go")); err != nil {
|
||||||
log.Fatalf("Failed to rename Go source folder: %v", err)
|
log.Fatalf("Failed to rename Go source folder: %v", err)
|
||||||
}
|
}
|
||||||
|
// Add all dependency modules in compressed form
|
||||||
|
os.MkdirAll(filepath.Join(pkgdir, ".mod", "cache"), 0755)
|
||||||
|
if err := cp.CopyAll(filepath.Join(pkgdir, ".mod", "cache", "download"), filepath.Join(*workdir, "modgopath", "pkg", "mod", "cache", "download")); err != nil {
|
||||||
|
log.Fatalf("Failed to copy Go module dependencies: %v", err)
|
||||||
|
}
|
||||||
// Run the packaging and upload to the PPA
|
// Run the packaging and upload to the PPA
|
||||||
debuild := exec.Command("debuild", "-S", "-sa", "-us", "-uc", "-d", "-Zxz")
|
debuild := exec.Command("debuild", "-S", "-sa", "-us", "-uc", "-d", "-Zxz", "-nc")
|
||||||
debuild.Dir = pkgdir
|
debuild.Dir = pkgdir
|
||||||
build.MustRun(debuild)
|
build.MustRun(debuild)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
basename = fmt.Sprintf("%s_%s", meta.Name(), meta.VersionString())
|
basename = fmt.Sprintf("%s_%s", meta.Name(), meta.VersionString())
|
||||||
source = filepath.Join(*workdir, basename+".tar.xz")
|
source = filepath.Join(*workdir, basename+".tar.xz")
|
||||||
dsc = filepath.Join(*workdir, basename+".dsc")
|
dsc = filepath.Join(*workdir, basename+".dsc")
|
||||||
changes = filepath.Join(*workdir, basename+"_source.changes")
|
changes = filepath.Join(*workdir, basename+"_source.changes")
|
||||||
|
buildinfo = filepath.Join(*workdir, basename+"_source.buildinfo")
|
||||||
)
|
)
|
||||||
if *signer != "" {
|
if *signer != "" {
|
||||||
build.MustRunCommand("debsign", changes)
|
build.MustRunCommand("debsign", changes)
|
||||||
}
|
}
|
||||||
if *upload != "" {
|
if *upload != "" {
|
||||||
ppaUpload(*workdir, *upload, *sshUser, []string{source, dsc, changes})
|
ppaUpload(*workdir, *upload, *sshUser, []string{source, dsc, changes, buildinfo})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// downloadGoSources downloads the Go source tarball.
|
||||||
|
func downloadGoSources(cachedir string) string {
|
||||||
|
csdb := build.MustLoadChecksums("build/checksums.txt")
|
||||||
|
file := fmt.Sprintf("go%s.src.tar.gz", dlgoVersion)
|
||||||
|
url := "https://dl.google.com/go/" + file
|
||||||
|
dst := filepath.Join(cachedir, file)
|
||||||
|
if err := csdb.DownloadFile(url, dst); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return dst
|
||||||
|
}
|
||||||
|
|
||||||
func ppaUpload(workdir, ppa, sshUser string, files []string) {
|
func ppaUpload(workdir, ppa, sshUser string, files []string) {
|
||||||
p := strings.Split(ppa, "/")
|
p := strings.Split(ppa, "/")
|
||||||
if len(p) != 2 {
|
if len(p) != 2 {
|
||||||
@ -725,6 +905,7 @@ func doWindowsInstaller(cmdline []string) {
|
|||||||
var (
|
var (
|
||||||
arch = flag.String("arch", runtime.GOARCH, "Architecture for cross build packaging")
|
arch = flag.String("arch", runtime.GOARCH, "Architecture for cross build packaging")
|
||||||
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. WINDOWS_SIGNING_KEY)`)
|
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. WINDOWS_SIGNING_KEY)`)
|
||||||
|
signify = flag.String("signify key", "", `Environment variable holding the signify signing key (e.g. WINDOWS_SIGNIFY_KEY)`)
|
||||||
upload = flag.String("upload", "", `Destination to upload the archives (usually "gethstore/builds")`)
|
upload = flag.String("upload", "", `Destination to upload the archives (usually "gethstore/builds")`)
|
||||||
workdir = flag.String("workdir", "", `Output directory for packages (uses temp dir if unset)`)
|
workdir = flag.String("workdir", "", `Output directory for packages (uses temp dir if unset)`)
|
||||||
)
|
)
|
||||||
@ -763,9 +944,12 @@ func doWindowsInstaller(cmdline []string) {
|
|||||||
build.Render("build/nsis.uninstall.nsh", filepath.Join(*workdir, "uninstall.nsh"), 0644, allTools)
|
build.Render("build/nsis.uninstall.nsh", filepath.Join(*workdir, "uninstall.nsh"), 0644, allTools)
|
||||||
build.Render("build/nsis.pathupdate.nsh", filepath.Join(*workdir, "PathUpdate.nsh"), 0644, nil)
|
build.Render("build/nsis.pathupdate.nsh", filepath.Join(*workdir, "PathUpdate.nsh"), 0644, nil)
|
||||||
build.Render("build/nsis.envvarupdate.nsh", filepath.Join(*workdir, "EnvVarUpdate.nsh"), 0644, nil)
|
build.Render("build/nsis.envvarupdate.nsh", filepath.Join(*workdir, "EnvVarUpdate.nsh"), 0644, nil)
|
||||||
build.CopyFile(filepath.Join(*workdir, "SimpleFC.dll"), "build/nsis.simplefc.dll", 0755)
|
if err := cp.CopyFile(filepath.Join(*workdir, "SimpleFC.dll"), "build/nsis.simplefc.dll"); err != nil {
|
||||||
build.CopyFile(filepath.Join(*workdir, "COPYING"), "COPYING", 0755)
|
log.Fatal("Failed to copy SimpleFC.dll: %v", err)
|
||||||
|
}
|
||||||
|
if err := cp.CopyFile(filepath.Join(*workdir, "COPYING"), "COPYING"); err != nil {
|
||||||
|
log.Fatal("Failed to copy copyright note: %v", err)
|
||||||
|
}
|
||||||
// Build the installer. This assumes that all the needed files have been previously
|
// Build the installer. This assumes that all the needed files have been previously
|
||||||
// built (don't mix building and packaging to keep cross compilation complexity to a
|
// built (don't mix building and packaging to keep cross compilation complexity to a
|
||||||
// minimum).
|
// minimum).
|
||||||
@ -782,9 +966,8 @@ func doWindowsInstaller(cmdline []string) {
|
|||||||
"/DARCH="+*arch,
|
"/DARCH="+*arch,
|
||||||
filepath.Join(*workdir, "geth.nsi"),
|
filepath.Join(*workdir, "geth.nsi"),
|
||||||
)
|
)
|
||||||
|
|
||||||
// Sign and publish installer.
|
// Sign and publish installer.
|
||||||
if err := archiveUpload(installer, *upload, *signer); err != nil {
|
if err := archiveUpload(installer, *upload, *signer, *signify); err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -793,25 +976,37 @@ func doWindowsInstaller(cmdline []string) {
|
|||||||
|
|
||||||
func doAndroidArchive(cmdline []string) {
|
func doAndroidArchive(cmdline []string) {
|
||||||
var (
|
var (
|
||||||
local = flag.Bool("local", false, `Flag whether we're only doing a local build (skip Maven artifacts)`)
|
local = flag.Bool("local", false, `Flag whether we're only doing a local build (skip Maven artifacts)`)
|
||||||
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. ANDROID_SIGNING_KEY)`)
|
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. ANDROID_SIGNING_KEY)`)
|
||||||
deploy = flag.String("deploy", "", `Destination to deploy the archive (usually "https://oss.sonatype.org")`)
|
signify = flag.String("signify", "", `Environment variable holding the signify signing key (e.g. ANDROID_SIGNIFY_KEY)`)
|
||||||
upload = flag.String("upload", "", `Destination to upload the archive (usually "gethstore/builds")`)
|
deploy = flag.String("deploy", "", `Destination to deploy the archive (usually "https://oss.sonatype.org")`)
|
||||||
|
upload = flag.String("upload", "", `Destination to upload the archive (usually "gethstore/builds")`)
|
||||||
)
|
)
|
||||||
flag.CommandLine.Parse(cmdline)
|
flag.CommandLine.Parse(cmdline)
|
||||||
env := build.Env()
|
env := build.Env()
|
||||||
|
tc := new(build.GoToolchain)
|
||||||
|
|
||||||
// Sanity check that the SDK and NDK are installed and set
|
// Sanity check that the SDK and NDK are installed and set
|
||||||
if os.Getenv("ANDROID_HOME") == "" {
|
if os.Getenv("ANDROID_HOME") == "" {
|
||||||
log.Fatal("Please ensure ANDROID_HOME points to your Android SDK")
|
log.Fatal("Please ensure ANDROID_HOME points to your Android SDK")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Build gomobile.
|
||||||
|
install := tc.Install(GOBIN, "golang.org/x/mobile/cmd/gomobile@latest", "golang.org/x/mobile/cmd/gobind@latest")
|
||||||
|
install.Env = append(install.Env)
|
||||||
|
build.MustRun(install)
|
||||||
|
|
||||||
|
// Ensure all dependencies are available. This is required to make
|
||||||
|
// gomobile bind work because it expects go.sum to contain all checksums.
|
||||||
|
build.MustRun(tc.Go("mod", "download"))
|
||||||
|
|
||||||
// Build the Android archive and Maven resources
|
// Build the Android archive and Maven resources
|
||||||
build.MustRun(goTool("get", "golang.org/x/mobile/cmd/gomobile", "golang.org/x/mobile/cmd/gobind"))
|
|
||||||
build.MustRun(gomobileTool("bind", "-ldflags", "-s -w", "--target", "android", "--javapkg", "org.ethereum", "-v", "github.com/ethereum/go-ethereum/mobile"))
|
build.MustRun(gomobileTool("bind", "-ldflags", "-s -w", "--target", "android", "--javapkg", "org.ethereum", "-v", "github.com/ethereum/go-ethereum/mobile"))
|
||||||
|
|
||||||
if *local {
|
if *local {
|
||||||
// If we're building locally, copy bundle to build dir and skip Maven
|
// If we're building locally, copy bundle to build dir and skip Maven
|
||||||
os.Rename("geth.aar", filepath.Join(GOBIN, "geth.aar"))
|
os.Rename("geth.aar", filepath.Join(GOBIN, "geth.aar"))
|
||||||
|
os.Rename("geth-sources.jar", filepath.Join(GOBIN, "geth-sources.jar"))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
meta := newMavenMetadata(env)
|
meta := newMavenMetadata(env)
|
||||||
@ -824,7 +1019,7 @@ func doAndroidArchive(cmdline []string) {
|
|||||||
archive := "geth-" + archiveBasename("android", params.ArchiveVersion(env.Commit)) + ".aar"
|
archive := "geth-" + archiveBasename("android", params.ArchiveVersion(env.Commit)) + ".aar"
|
||||||
os.Rename("geth.aar", archive)
|
os.Rename("geth.aar", archive)
|
||||||
|
|
||||||
if err := archiveUpload(archive, *upload, *signer); err != nil {
|
if err := archiveUpload(archive, *upload, *signer, *signify); err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
// Sign and upload all the artifacts to Maven Central
|
// Sign and upload all the artifacts to Maven Central
|
||||||
@ -855,15 +1050,15 @@ func gomobileTool(subcmd string, args ...string) *exec.Cmd {
|
|||||||
cmd := exec.Command(filepath.Join(GOBIN, "gomobile"), subcmd)
|
cmd := exec.Command(filepath.Join(GOBIN, "gomobile"), subcmd)
|
||||||
cmd.Args = append(cmd.Args, args...)
|
cmd.Args = append(cmd.Args, args...)
|
||||||
cmd.Env = []string{
|
cmd.Env = []string{
|
||||||
"GOPATH=" + build.GOPATH(),
|
|
||||||
"PATH=" + GOBIN + string(os.PathListSeparator) + os.Getenv("PATH"),
|
"PATH=" + GOBIN + string(os.PathListSeparator) + os.Getenv("PATH"),
|
||||||
}
|
}
|
||||||
for _, e := range os.Environ() {
|
for _, e := range os.Environ() {
|
||||||
if strings.HasPrefix(e, "GOPATH=") || strings.HasPrefix(e, "PATH=") {
|
if strings.HasPrefix(e, "GOPATH=") || strings.HasPrefix(e, "PATH=") || strings.HasPrefix(e, "GOBIN=") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
cmd.Env = append(cmd.Env, e)
|
cmd.Env = append(cmd.Env, e)
|
||||||
}
|
}
|
||||||
|
cmd.Env = append(cmd.Env, "GOBIN="+GOBIN)
|
||||||
return cmd
|
return cmd
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -917,45 +1112,52 @@ func newMavenMetadata(env build.Environment) mavenMetadata {
|
|||||||
|
|
||||||
func doXCodeFramework(cmdline []string) {
|
func doXCodeFramework(cmdline []string) {
|
||||||
var (
|
var (
|
||||||
local = flag.Bool("local", false, `Flag whether we're only doing a local build (skip Maven artifacts)`)
|
local = flag.Bool("local", false, `Flag whether we're only doing a local build (skip Maven artifacts)`)
|
||||||
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. IOS_SIGNING_KEY)`)
|
signer = flag.String("signer", "", `Environment variable holding the signing key (e.g. IOS_SIGNING_KEY)`)
|
||||||
deploy = flag.String("deploy", "", `Destination to deploy the archive (usually "trunk")`)
|
signify = flag.String("signify", "", `Environment variable holding the signify signing key (e.g. IOS_SIGNIFY_KEY)`)
|
||||||
upload = flag.String("upload", "", `Destination to upload the archives (usually "gethstore/builds")`)
|
deploy = flag.String("deploy", "", `Destination to deploy the archive (usually "trunk")`)
|
||||||
|
upload = flag.String("upload", "", `Destination to upload the archives (usually "gethstore/builds")`)
|
||||||
)
|
)
|
||||||
flag.CommandLine.Parse(cmdline)
|
flag.CommandLine.Parse(cmdline)
|
||||||
env := build.Env()
|
env := build.Env()
|
||||||
|
tc := new(build.GoToolchain)
|
||||||
|
|
||||||
|
// Build gomobile.
|
||||||
|
build.MustRun(tc.Install(GOBIN, "golang.org/x/mobile/cmd/gomobile@latest", "golang.org/x/mobile/cmd/gobind@latest"))
|
||||||
|
|
||||||
|
// Ensure all dependencies are available. This is required to make
|
||||||
|
// gomobile bind work because it expects go.sum to contain all checksums.
|
||||||
|
build.MustRun(tc.Go("mod", "download"))
|
||||||
|
|
||||||
// Build the iOS XCode framework
|
// Build the iOS XCode framework
|
||||||
build.MustRun(goTool("get", "golang.org/x/mobile/cmd/gomobile", "golang.org/x/mobile/cmd/gobind"))
|
|
||||||
build.MustRun(gomobileTool("init"))
|
|
||||||
bind := gomobileTool("bind", "-ldflags", "-s -w", "--target", "ios", "-v", "github.com/ethereum/go-ethereum/mobile")
|
bind := gomobileTool("bind", "-ldflags", "-s -w", "--target", "ios", "-v", "github.com/ethereum/go-ethereum/mobile")
|
||||||
|
|
||||||
if *local {
|
if *local {
|
||||||
// If we're building locally, use the build folder and stop afterwards
|
// If we're building locally, use the build folder and stop afterwards
|
||||||
bind.Dir, _ = filepath.Abs(GOBIN)
|
bind.Dir = GOBIN
|
||||||
build.MustRun(bind)
|
build.MustRun(bind)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create the archive.
|
||||||
|
maybeSkipArchive(env)
|
||||||
archive := "geth-" + archiveBasename("ios", params.ArchiveVersion(env.Commit))
|
archive := "geth-" + archiveBasename("ios", params.ArchiveVersion(env.Commit))
|
||||||
if err := os.Mkdir(archive, os.ModePerm); err != nil {
|
if err := os.MkdirAll(archive, 0755); err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
bind.Dir, _ = filepath.Abs(archive)
|
bind.Dir, _ = filepath.Abs(archive)
|
||||||
build.MustRun(bind)
|
build.MustRun(bind)
|
||||||
build.MustRunCommand("tar", "-zcvf", archive+".tar.gz", archive)
|
build.MustRunCommand("tar", "-zcvf", archive+".tar.gz", archive)
|
||||||
|
|
||||||
// Skip CocoaPods deploy and Azure upload for PR builds
|
|
||||||
maybeSkipArchive(env)
|
|
||||||
|
|
||||||
// Sign and upload the framework to Azure
|
// Sign and upload the framework to Azure
|
||||||
if err := archiveUpload(archive+".tar.gz", *upload, *signer); err != nil {
|
if err := archiveUpload(archive+".tar.gz", *upload, *signer, *signify); err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
// Prepare and upload a PodSpec to CocoaPods
|
// Prepare and upload a PodSpec to CocoaPods
|
||||||
if *deploy != "" {
|
if *deploy != "" {
|
||||||
meta := newPodMetadata(env, archive)
|
meta := newPodMetadata(env, archive)
|
||||||
build.Render("build/pod.podspec", "Geth.podspec", 0755, meta)
|
build.Render("build/pod.podspec", "Geth.podspec", 0755, meta)
|
||||||
build.MustRunCommand("pod", *deploy, "push", "Geth.podspec", "--allow-warnings", "--verbose")
|
build.MustRunCommand("pod", *deploy, "push", "Geth.podspec", "--allow-warnings")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1004,58 +1206,6 @@ func newPodMetadata(env build.Environment, archive string) podMetadata {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cross compilation
|
|
||||||
|
|
||||||
func doXgo(cmdline []string) {
|
|
||||||
var (
|
|
||||||
alltools = flag.Bool("alltools", false, `Flag whether we're building all known tools, or only on in particular`)
|
|
||||||
)
|
|
||||||
flag.CommandLine.Parse(cmdline)
|
|
||||||
env := build.Env()
|
|
||||||
|
|
||||||
// Make sure xgo is available for cross compilation
|
|
||||||
gogetxgo := goTool("get", "github.com/karalabe/xgo")
|
|
||||||
build.MustRun(gogetxgo)
|
|
||||||
|
|
||||||
// If all tools building is requested, build everything the builder wants
|
|
||||||
args := append(buildFlags(env), flag.Args()...)
|
|
||||||
|
|
||||||
if *alltools {
|
|
||||||
args = append(args, []string{"--dest", GOBIN}...)
|
|
||||||
for _, res := range allToolsArchiveFiles {
|
|
||||||
if strings.HasPrefix(res, GOBIN) {
|
|
||||||
// Binary tool found, cross build it explicitly
|
|
||||||
args = append(args, "./"+filepath.Join("cmd", filepath.Base(res)))
|
|
||||||
xgo := xgoTool(args)
|
|
||||||
build.MustRun(xgo)
|
|
||||||
args = args[:len(args)-1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// Otherwise xxecute the explicit cross compilation
|
|
||||||
path := args[len(args)-1]
|
|
||||||
args = append(args[:len(args)-1], []string{"--dest", GOBIN, path}...)
|
|
||||||
|
|
||||||
xgo := xgoTool(args)
|
|
||||||
build.MustRun(xgo)
|
|
||||||
}
|
|
||||||
|
|
||||||
func xgoTool(args []string) *exec.Cmd {
|
|
||||||
cmd := exec.Command(filepath.Join(GOBIN, "xgo"), args...)
|
|
||||||
cmd.Env = []string{
|
|
||||||
"GOPATH=" + build.GOPATH(),
|
|
||||||
"GOBIN=" + GOBIN,
|
|
||||||
}
|
|
||||||
for _, e := range os.Environ() {
|
|
||||||
if strings.HasPrefix(e, "GOPATH=") || strings.HasPrefix(e, "GOBIN=") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
cmd.Env = append(cmd.Env, e)
|
|
||||||
}
|
|
||||||
return cmd
|
|
||||||
}
|
|
||||||
|
|
||||||
// Binary distribution cleanups
|
// Binary distribution cleanups
|
||||||
|
|
||||||
func doPurge(cmdline []string) {
|
func doPurge(cmdline []string) {
|
||||||
@ -1079,6 +1229,8 @@ func doPurge(cmdline []string) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
fmt.Printf("Found %d blobs\n", len(blobs))
|
||||||
|
|
||||||
// Iterate over the blobs, collect and sort all unstable builds
|
// Iterate over the blobs, collect and sort all unstable builds
|
||||||
for i := 0; i < len(blobs); i++ {
|
for i := 0; i < len(blobs); i++ {
|
||||||
if !strings.Contains(blobs[i].Name, "unstable") {
|
if !strings.Contains(blobs[i].Name, "unstable") {
|
||||||
@ -1100,6 +1252,7 @@ func doPurge(cmdline []string) {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
fmt.Printf("Deleting %d blobs\n", len(blobs))
|
||||||
// Delete all marked as such and return
|
// Delete all marked as such and return
|
||||||
if err := build.AzureBlobstoreDelete(auth, blobs); err != nil {
|
if err := build.AzureBlobstoreDelete(auth, blobs); err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# Cleaning the Go cache only makes sense if we actually have Go installed... or
|
|
||||||
# if Go is actually callable. This does not hold true during deb packaging, so
|
|
||||||
# we need an explicit check to avoid build failures.
|
|
||||||
if ! command -v go > /dev/null; then
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
version_gt() {
|
|
||||||
test "$(printf '%s\n' "$@" | sort -V | head -n 1)" != "$1"
|
|
||||||
}
|
|
||||||
|
|
||||||
golang_version=$(go version |cut -d' ' -f3 |sed 's/go//')
|
|
||||||
|
|
||||||
# Clean go build cache when go version is greater than or equal to 1.10
|
|
||||||
if !(version_gt 1.10 $golang_version); then
|
|
||||||
go clean -cache
|
|
||||||
fi
|
|
@ -4,13 +4,27 @@
|
|||||||
# Uncomment this to turn on verbose mode.
|
# Uncomment this to turn on verbose mode.
|
||||||
#export DH_VERBOSE=1
|
#export DH_VERBOSE=1
|
||||||
|
|
||||||
# Launchpad rejects Go's access to $HOME/.cache, use custom folder
|
# Launchpad rejects Go's access to $HOME, use custom folders
|
||||||
export GOCACHE=/tmp/go-build
|
export GOCACHE=/tmp/go-build
|
||||||
|
export GOPATH=/tmp/gopath
|
||||||
export GOROOT_BOOTSTRAP={{.GoBootPath}}
|
export GOROOT_BOOTSTRAP={{.GoBootPath}}
|
||||||
|
|
||||||
|
override_dh_auto_clean:
|
||||||
|
# Don't try to be smart Launchpad, we know our build rules better than you
|
||||||
|
|
||||||
override_dh_auto_build:
|
override_dh_auto_build:
|
||||||
(cd .go/src && ./make.bash)
|
# We can't download a fresh Go within Launchpad, so we're shipping and building
|
||||||
build/env.sh .go/bin/go run build/ci.go install -git-commit={{.Env.Commit}} -git-branch={{.Env.Branch}} -git-tag={{.Env.Tag}} -buildnum={{.Env.Buildnum}} -pull-request={{.Env.IsPullRequest}}
|
# one on the fly. However, we can't build it inside the go-ethereum folder as
|
||||||
|
# bootstrapping clashes with go modules, so build in a sibling folder.
|
||||||
|
(mv .go ../ && cd ../.go/src && ./make.bash)
|
||||||
|
|
||||||
|
# We can't download external go modules within Launchpad, so we're shipping the
|
||||||
|
# entire dependency source cache with go-ethereum.
|
||||||
|
mkdir -p $(GOPATH)/pkg
|
||||||
|
mv .mod $(GOPATH)/pkg/mod
|
||||||
|
|
||||||
|
# A fresh Go was built, all dependency downloads faked, hope build works now
|
||||||
|
../.go/bin/go run build/ci.go install -git-commit={{.Env.Commit}} -git-branch={{.Env.Branch}} -git-tag={{.Env.Tag}} -buildnum={{.Env.Buildnum}} -pull-request={{.Env.IsPullRequest}}
|
||||||
|
|
||||||
override_dh_auto_test:
|
override_dh_auto_test:
|
||||||
|
|
||||||
|
30
build/env.sh
30
build/env.sh
@ -1,30 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
if [ ! -f "build/env.sh" ]; then
|
|
||||||
echo "$0 must be run from the root of the repository."
|
|
||||||
exit 2
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create fake Go workspace if it doesn't exist yet.
|
|
||||||
workspace="$PWD/build/_workspace"
|
|
||||||
root="$PWD"
|
|
||||||
ethdir="$workspace/src/github.com/ethereum"
|
|
||||||
if [ ! -L "$ethdir/go-ethereum" ]; then
|
|
||||||
mkdir -p "$ethdir"
|
|
||||||
cd "$ethdir"
|
|
||||||
ln -s ../../../../../. go-ethereum
|
|
||||||
cd "$root"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Set up the environment to use the workspace.
|
|
||||||
GOPATH="$workspace"
|
|
||||||
export GOPATH
|
|
||||||
|
|
||||||
# Run the command inside the workspace.
|
|
||||||
cd "$ethdir/go-ethereum"
|
|
||||||
PWD="$ethdir/go-ethereum"
|
|
||||||
|
|
||||||
# Launch the arguments with the configured environment.
|
|
||||||
exec "$@"
|
|
@ -43,7 +43,7 @@
|
|||||||
!ifndef Un${StrFuncName}_INCLUDED
|
!ifndef Un${StrFuncName}_INCLUDED
|
||||||
${Un${StrFuncName}}
|
${Un${StrFuncName}}
|
||||||
!endif
|
!endif
|
||||||
!define un.${StrFuncName} "${Un${StrFuncName}}"
|
!define un.${StrFuncName} '${Un${StrFuncName}}'
|
||||||
!macroend
|
!macroend
|
||||||
|
|
||||||
!insertmacro _IncludeStrFunction StrTok
|
!insertmacro _IncludeStrFunction StrTok
|
||||||
|
@ -19,9 +19,9 @@ Section "Geth" GETH_IDX
|
|||||||
|
|
||||||
# Create start menu launcher
|
# Create start menu launcher
|
||||||
createDirectory "$SMPROGRAMS\${APPNAME}"
|
createDirectory "$SMPROGRAMS\${APPNAME}"
|
||||||
createShortCut "$SMPROGRAMS\${APPNAME}\${APPNAME}.lnk" "$INSTDIR\geth.exe" "--fast" "--cache=512"
|
createShortCut "$SMPROGRAMS\${APPNAME}\${APPNAME}.lnk" "$INSTDIR\geth.exe"
|
||||||
createShortCut "$SMPROGRAMS\${APPNAME}\Attach.lnk" "$INSTDIR\geth.exe" "attach" "" ""
|
createShortCut "$SMPROGRAMS\${APPNAME}\Attach.lnk" "$INSTDIR\geth.exe" "attach"
|
||||||
createShortCut "$SMPROGRAMS\${APPNAME}\Uninstall.lnk" "$INSTDIR\uninstall.exe" "" "" ""
|
createShortCut "$SMPROGRAMS\${APPNAME}\Uninstall.lnk" "$INSTDIR\uninstall.exe"
|
||||||
|
|
||||||
# Firewall - remove rules (if exists)
|
# Firewall - remove rules (if exists)
|
||||||
SimpleFC::AdvRemoveRule "Geth incoming peers (TCP:30303)"
|
SimpleFC::AdvRemoveRule "Geth incoming peers (TCP:30303)"
|
||||||
|
74
cmd/abidump/main.go
Normal file
74
cmd/abidump/main.go
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
// Copyright 2019 The go-ethereum Authors
|
||||||
|
// This file is part of the go-ethereum library.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU Lesser General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// The go-ethereum library is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU Lesser General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU Lesser General Public License
|
||||||
|
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/ethereum/go-ethereum/signer/core/apitypes"
|
||||||
|
"github.com/ethereum/go-ethereum/signer/fourbyte"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
flag.Usage = func() {
|
||||||
|
fmt.Fprintln(os.Stderr, "Usage:", os.Args[0], "<hexdata>")
|
||||||
|
flag.PrintDefaults()
|
||||||
|
fmt.Fprintln(os.Stderr, `
|
||||||
|
Parses the given ABI data and tries to interpret it from the fourbyte database.`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func parse(data []byte) {
|
||||||
|
db, err := fourbyte.New()
|
||||||
|
if err != nil {
|
||||||
|
die(err)
|
||||||
|
}
|
||||||
|
messages := apitypes.ValidationMessages{}
|
||||||
|
db.ValidateCallData(nil, data, &messages)
|
||||||
|
for _, m := range messages.Messages {
|
||||||
|
fmt.Printf("%v: %v\n", m.Typ, m.Message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example
|
||||||
|
// ./abidump a9059cbb000000000000000000000000ea0e2dc7d65a50e77fc7e84bff3fd2a9e781ff5c0000000000000000000000000000000000000000000000015af1d78b58c40000
|
||||||
|
func main() {
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case flag.NArg() == 1:
|
||||||
|
hexdata := flag.Arg(0)
|
||||||
|
data, err := hex.DecodeString(strings.TrimPrefix(hexdata, "0x"))
|
||||||
|
if err != nil {
|
||||||
|
die(err)
|
||||||
|
}
|
||||||
|
parse(data)
|
||||||
|
default:
|
||||||
|
fmt.Fprintln(os.Stderr, "Error: one argument needed")
|
||||||
|
flag.Usage()
|
||||||
|
os.Exit(2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func die(args ...interface{}) {
|
||||||
|
fmt.Fprintln(os.Stderr, args...)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
@ -21,29 +21,20 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/ethereum/go-ethereum/accounts/abi"
|
||||||
"github.com/ethereum/go-ethereum/accounts/abi/bind"
|
"github.com/ethereum/go-ethereum/accounts/abi/bind"
|
||||||
"github.com/ethereum/go-ethereum/cmd/utils"
|
"github.com/ethereum/go-ethereum/cmd/utils"
|
||||||
"github.com/ethereum/go-ethereum/common/compiler"
|
"github.com/ethereum/go-ethereum/common/compiler"
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
|
"github.com/ethereum/go-ethereum/internal/flags"
|
||||||
"github.com/ethereum/go-ethereum/log"
|
"github.com/ethereum/go-ethereum/log"
|
||||||
"gopkg.in/urfave/cli.v1"
|
"gopkg.in/urfave/cli.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
commandHelperTemplate = `{{.Name}}{{if .Subcommands}} command{{end}}{{if .Flags}} [command options]{{end}} [arguments...]
|
|
||||||
{{if .Description}}{{.Description}}
|
|
||||||
{{end}}{{if .Subcommands}}
|
|
||||||
SUBCOMMANDS:
|
|
||||||
{{range .Subcommands}}{{.Name}}{{with .ShortName}}, {{.}}{{end}}{{ "\t" }}{{.Usage}}
|
|
||||||
{{end}}{{end}}{{if .Flags}}
|
|
||||||
OPTIONS:
|
|
||||||
{{range $.Flags}}{{"\t"}}{{.}}
|
|
||||||
{{end}}
|
|
||||||
{{end}}`
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
// Git SHA1 commit hash of the release (set via linker flags)
|
// Git SHA1 commit hash of the release (set via linker flags)
|
||||||
gitCommit = ""
|
gitCommit = ""
|
||||||
@ -103,10 +94,14 @@ var (
|
|||||||
Usage: "Destination language for the bindings (go, java, objc)",
|
Usage: "Destination language for the bindings (go, java, objc)",
|
||||||
Value: "go",
|
Value: "go",
|
||||||
}
|
}
|
||||||
|
aliasFlag = cli.StringFlag{
|
||||||
|
Name: "alias",
|
||||||
|
Usage: "Comma separated aliases for function and event renaming, e.g. original1=alias1, original2=alias2",
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
app = utils.NewApp(gitCommit, gitDate, "ethereum checkpoint helper tool")
|
app = flags.NewApp(gitCommit, gitDate, "ethereum checkpoint helper tool")
|
||||||
app.Flags = []cli.Flag{
|
app.Flags = []cli.Flag{
|
||||||
abiFlag,
|
abiFlag,
|
||||||
binFlag,
|
binFlag,
|
||||||
@ -120,9 +115,10 @@ func init() {
|
|||||||
pkgFlag,
|
pkgFlag,
|
||||||
outFlag,
|
outFlag,
|
||||||
langFlag,
|
langFlag,
|
||||||
|
aliasFlag,
|
||||||
}
|
}
|
||||||
app.Action = utils.MigrateFlags(abigen)
|
app.Action = utils.MigrateFlags(abigen)
|
||||||
cli.CommandHelpTemplate = commandHelperTemplate
|
cli.CommandHelpTemplate = flags.OriginCommandHelpTemplate
|
||||||
}
|
}
|
||||||
|
|
||||||
func abigen(c *cli.Context) error {
|
func abigen(c *cli.Context) error {
|
||||||
@ -144,11 +140,12 @@ func abigen(c *cli.Context) error {
|
|||||||
}
|
}
|
||||||
// If the entire solidity code was specified, build and bind based on that
|
// If the entire solidity code was specified, build and bind based on that
|
||||||
var (
|
var (
|
||||||
abis []string
|
abis []string
|
||||||
bins []string
|
bins []string
|
||||||
types []string
|
types []string
|
||||||
sigs []map[string]string
|
sigs []map[string]string
|
||||||
libs = make(map[string]string)
|
libs = make(map[string]string)
|
||||||
|
aliases = make(map[string]string)
|
||||||
)
|
)
|
||||||
if c.GlobalString(abiFlag.Name) != "" {
|
if c.GlobalString(abiFlag.Name) != "" {
|
||||||
// Load up the ABI, optional bytecode and type name from the parameters
|
// Load up the ABI, optional bytecode and type name from the parameters
|
||||||
@ -199,10 +196,22 @@ func abigen(c *cli.Context) error {
|
|||||||
utils.Fatalf("Failed to build Solidity contract: %v", err)
|
utils.Fatalf("Failed to build Solidity contract: %v", err)
|
||||||
}
|
}
|
||||||
case c.GlobalIsSet(vyFlag.Name):
|
case c.GlobalIsSet(vyFlag.Name):
|
||||||
contracts, err = compiler.CompileVyper(c.GlobalString(vyperFlag.Name), c.GlobalString(vyFlag.Name))
|
output, err := compiler.CompileVyper(c.GlobalString(vyperFlag.Name), c.GlobalString(vyFlag.Name))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatalf("Failed to build Vyper contract: %v", err)
|
utils.Fatalf("Failed to build Vyper contract: %v", err)
|
||||||
}
|
}
|
||||||
|
contracts = make(map[string]*compiler.Contract)
|
||||||
|
for n, contract := range output {
|
||||||
|
name := n
|
||||||
|
// Sanitize the combined json names to match the
|
||||||
|
// format expected by solidity.
|
||||||
|
if !strings.Contains(n, ":") {
|
||||||
|
// Remove extra path components
|
||||||
|
name = abi.ToCamelCase(strings.TrimSuffix(filepath.Base(name), ".vy"))
|
||||||
|
}
|
||||||
|
contracts[name] = contract
|
||||||
|
}
|
||||||
|
|
||||||
case c.GlobalIsSet(jsonFlag.Name):
|
case c.GlobalIsSet(jsonFlag.Name):
|
||||||
jsonOutput, err := ioutil.ReadFile(c.GlobalString(jsonFlag.Name))
|
jsonOutput, err := ioutil.ReadFile(c.GlobalString(jsonFlag.Name))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -232,8 +241,20 @@ func abigen(c *cli.Context) error {
|
|||||||
libs[libPattern] = nameParts[len(nameParts)-1]
|
libs[libPattern] = nameParts[len(nameParts)-1]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Extract all aliases from the flags
|
||||||
|
if c.GlobalIsSet(aliasFlag.Name) {
|
||||||
|
// We support multi-versions for aliasing
|
||||||
|
// e.g.
|
||||||
|
// foo=bar,foo2=bar2
|
||||||
|
// foo:bar,foo2:bar2
|
||||||
|
re := regexp.MustCompile(`(?:(\w+)[:=](\w+))`)
|
||||||
|
submatches := re.FindAllStringSubmatch(c.GlobalString(aliasFlag.Name), -1)
|
||||||
|
for _, match := range submatches {
|
||||||
|
aliases[match[1]] = match[2]
|
||||||
|
}
|
||||||
|
}
|
||||||
// Generate the contract binding
|
// Generate the contract binding
|
||||||
code, err := bind.Bind(types, abis, bins, sigs, c.GlobalString(pkgFlag.Name), lang, libs)
|
code, err := bind.Bind(types, abis, bins, sigs, c.GlobalString(pkgFlag.Name), lang, libs, aliases)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatalf("Failed to generate ABI binding: %v", err)
|
utils.Fatalf("Failed to generate ABI binding: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,6 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
"github.com/ethereum/go-ethereum/log"
|
"github.com/ethereum/go-ethereum/log"
|
||||||
"github.com/ethereum/go-ethereum/p2p/discover"
|
"github.com/ethereum/go-ethereum/p2p/discover"
|
||||||
"github.com/ethereum/go-ethereum/p2p/discv5"
|
|
||||||
"github.com/ethereum/go-ethereum/p2p/enode"
|
"github.com/ethereum/go-ethereum/p2p/enode"
|
||||||
"github.com/ethereum/go-ethereum/p2p/nat"
|
"github.com/ethereum/go-ethereum/p2p/nat"
|
||||||
"github.com/ethereum/go-ethereum/p2p/netutil"
|
"github.com/ethereum/go-ethereum/p2p/netutil"
|
||||||
@ -44,7 +43,7 @@ func main() {
|
|||||||
natdesc = flag.String("nat", "none", "port mapping mechanism (any|none|upnp|pmp|extip:<IP>)")
|
natdesc = flag.String("nat", "none", "port mapping mechanism (any|none|upnp|pmp|extip:<IP>)")
|
||||||
netrestrict = flag.String("netrestrict", "", "restrict network communication to the given IP networks (CIDR masks)")
|
netrestrict = flag.String("netrestrict", "", "restrict network communication to the given IP networks (CIDR masks)")
|
||||||
runv5 = flag.Bool("v5", false, "run a v5 topic discovery bootnode")
|
runv5 = flag.Bool("v5", false, "run a v5 topic discovery bootnode")
|
||||||
verbosity = flag.Int("verbosity", int(log.LvlInfo), "log verbosity (0-9)")
|
verbosity = flag.Int("verbosity", int(log.LvlInfo), "log verbosity (0-5)")
|
||||||
vmodule = flag.String("vmodule", "", "log verbosity pattern")
|
vmodule = flag.String("vmodule", "", "log verbosity pattern")
|
||||||
|
|
||||||
nodeKey *ecdsa.PrivateKey
|
nodeKey *ecdsa.PrivateKey
|
||||||
@ -121,17 +120,17 @@ func main() {
|
|||||||
|
|
||||||
printNotice(&nodeKey.PublicKey, *realaddr)
|
printNotice(&nodeKey.PublicKey, *realaddr)
|
||||||
|
|
||||||
|
db, _ := enode.OpenDB("")
|
||||||
|
ln := enode.NewLocalNode(db, nodeKey)
|
||||||
|
cfg := discover.Config{
|
||||||
|
PrivateKey: nodeKey,
|
||||||
|
NetRestrict: restrictList,
|
||||||
|
}
|
||||||
if *runv5 {
|
if *runv5 {
|
||||||
if _, err := discv5.ListenUDP(nodeKey, conn, "", restrictList); err != nil {
|
if _, err := discover.ListenV5(conn, ln, cfg); err != nil {
|
||||||
utils.Fatalf("%v", err)
|
utils.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
db, _ := enode.OpenDB("")
|
|
||||||
ln := enode.NewLocalNode(db, nodeKey)
|
|
||||||
cfg := discover.Config{
|
|
||||||
PrivateKey: nodeKey,
|
|
||||||
NetRestrict: restrictList,
|
|
||||||
}
|
|
||||||
if _, err := discover.ListenUDP(conn, ln, cfg); err != nil {
|
if _, err := discover.ListenUDP(conn, ln, cfg); err != nil {
|
||||||
utils.Fatalf("%v", err)
|
utils.Fatalf("%v", err)
|
||||||
}
|
}
|
||||||
|
103
cmd/checkpoint-admin/README.md
Normal file
103
cmd/checkpoint-admin/README.md
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
## Checkpoint-admin
|
||||||
|
|
||||||
|
Checkpoint-admin is a tool for updating checkpoint oracle status. It provides a series of functions including deploying checkpoint oracle contract, signing for new checkpoints, and updating checkpoints in the checkpoint oracle contract.
|
||||||
|
|
||||||
|
### Checkpoint
|
||||||
|
|
||||||
|
In the LES protocol, there is an important concept called checkpoint. In simple terms, whenever a certain number of blocks are generated on the blockchain, a new checkpoint is generated which contains some important information such as
|
||||||
|
|
||||||
|
* Block hash at checkpoint
|
||||||
|
* Canonical hash trie root at checkpoint
|
||||||
|
* Bloom trie root at checkpoint
|
||||||
|
|
||||||
|
*For a more detailed introduction to checkpoint, please see the LES [spec](https://github.com/ethereum/devp2p/blob/master/caps/les.md).*
|
||||||
|
|
||||||
|
Using this information, light clients can skip all historical block headers when synchronizing data and start synchronization from this checkpoint. Therefore, as long as the light client can obtain some latest and correct checkpoints, the amount of data and time for synchronization will be greatly reduced.
|
||||||
|
|
||||||
|
However, from a security perspective, the most critical step in a synchronization algorithm based on checkpoints is to determine whether the checkpoint used by the light client is correct. Otherwise, all blockchain data synchronized based on this checkpoint may be wrong. For this we provide two different ways to ensure the correctness of the checkpoint used by the light client.
|
||||||
|
|
||||||
|
#### Hardcoded checkpoint
|
||||||
|
|
||||||
|
There are several hardcoded checkpoints in the [source code](https://github.com/ethereum/go-ethereum/blob/master/params/config.go#L38) of the go-ethereum project. These checkpoints are updated by go-ethereum developers when new versions of software are released. Because light client users trust Geth developers to some extent, hardcoded checkpoints in the code can also be considered correct.
|
||||||
|
|
||||||
|
#### Checkpoint oracle
|
||||||
|
|
||||||
|
Hardcoded checkpoints can solve the problem of verifying the correctness of checkpoints (although this is a more centralized solution). But the pain point of this solution is that developers can only update checkpoints when a new version of software is released. In addition, light client users usually do not keep the Geth version they use always up to date. So hardcoded checkpoints used by users are generally stale. Therefore, it still needs to download a large amount of blockchain data during synchronization.
|
||||||
|
|
||||||
|
Checkpoint oracle is a more flexible solution. In simple terms, this is a smart contract that is deployed on the blockchain. The smart contract records several designated trusted signers. Whenever enough trusted signers have issued their signatures for the same checkpoint, it can be considered that the checkpoint has been authenticated by the signers. Checkpoints authenticated by trusted signers can be considered correct.
|
||||||
|
|
||||||
|
So this way, even without updating the software version, as long as the trusted signers regularly update the checkpoint in oracle on time, the light client can always use the latest and verified checkpoint for data synchronization.
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Checkpoint-admin is a command line tool designed for checkpoint oracle. Users can easily deploy contracts and update checkpoints through this tool.
|
||||||
|
|
||||||
|
#### Install
|
||||||
|
|
||||||
|
```shell
|
||||||
|
go get github.com/ethereum/go-ethereum/cmd/checkpoint-admin
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Deploy
|
||||||
|
|
||||||
|
Deploy checkpoint oracle contract. `--signers` indicates the specified trusted signer, and `--threshold` indicates the minimum number of signatures required by trusted signers to update a checkpoint.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
checkpoint-admin deploy --rpc <NODE_RPC_ENDPOINT> --clef <CLEF_ENDPOINT> --signer <SIGNER_TO_SIGN_TX> --signers <TRUSTED_SIGNER_LIST> --threshold 1
|
||||||
|
```
|
||||||
|
|
||||||
|
It is worth noting that checkpoint-admin only supports clef as a signer for transactions and plain text(checkpoint). For more clef usage, please see the clef [tutorial](https://geth.ethereum.org/docs/clef/tutorial) .
|
||||||
|
|
||||||
|
#### Sign
|
||||||
|
|
||||||
|
Checkpoint-admin provides two different modes of signing. You can automatically obtain the current stable checkpoint and sign it interactively, and you can also use the information provided by the command line flags to sign checkpoint offline.
|
||||||
|
|
||||||
|
**Interactive mode**
|
||||||
|
|
||||||
|
```shell
|
||||||
|
checkpoint-admin sign --clef <CLEF_ENDPOINT> --signer <SIGNER_TO_SIGN_CHECKPOINT> --rpc <NODE_RPC_ENDPOINT>
|
||||||
|
```
|
||||||
|
|
||||||
|
*It is worth noting that the connected Geth node can be a fullnode or a light client. If it is fullnode, you must enable the LES protocol. E.G. add `--light.serv 50` to the startup command line flags*.
|
||||||
|
|
||||||
|
**Offline mode**
|
||||||
|
|
||||||
|
```shell
|
||||||
|
checkpoint-admin sign --clef <CLEF_ENDPOINT> --signer <SIGNER_TO_SIGN_CHECKPOINT> --index <CHECKPOINT_INDEX> --hash <CHECKPOINT_HASH> --oracle <CHECKPOINT_ORACLE_ADDRESS>
|
||||||
|
```
|
||||||
|
|
||||||
|
*CHECKPOINT_HASH is obtained based on this [calculation method](https://github.com/ethereum/go-ethereum/blob/master/params/config.go#L251).*
|
||||||
|
|
||||||
|
#### Publish
|
||||||
|
|
||||||
|
Collect enough signatures from different trusted signers for the same checkpoint and submit them to oracle to update the "authenticated" checkpoint in the contract.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
checkpoint-admin publish --clef <CLEF_ENDPOINT> --rpc <NODE_RPC_ENDPOINT> --signer <SIGNER_TO_SIGN_TX> --index <CHECKPOINT_INDEX> --signatures <CHECKPOINT_SIGNATURE_LIST>
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Status query
|
||||||
|
|
||||||
|
Check the latest status of checkpoint oracle.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
checkpoint-admin status --rpc <NODE_RPC_ENDPOINT>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Enable checkpoint oracle in your private network
|
||||||
|
|
||||||
|
Currently, only the Ethereum mainnet and the default supported test networks (ropsten, rinkeby, goerli) activate this feature. If you want to activate this feature in your private network, you can overwrite the relevant checkpoint oracle settings through the configuration file after deploying the oracle contract.
|
||||||
|
|
||||||
|
* Get your node configuration file `geth dumpconfig OTHER_COMMAND_LINE_OPTIONS > config.toml`
|
||||||
|
* Edit the configuration file and add the following information
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[Eth.CheckpointOracle]
|
||||||
|
Address = CHECKPOINT_ORACLE_ADDRESS
|
||||||
|
Signers = [TRUSTED_SIGNER_1, ..., TRUSTED_SIGNER_N]
|
||||||
|
Threshold = THRESHOLD
|
||||||
|
```
|
||||||
|
|
||||||
|
* Start geth with the modified configuration file
|
||||||
|
|
||||||
|
*In the private network, all fullnodes and light clients need to be started using the same checkpoint oracle settings.*
|
@ -22,25 +22,12 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/ethereum/go-ethereum/cmd/utils"
|
|
||||||
"github.com/ethereum/go-ethereum/common/fdlimit"
|
"github.com/ethereum/go-ethereum/common/fdlimit"
|
||||||
|
"github.com/ethereum/go-ethereum/internal/flags"
|
||||||
"github.com/ethereum/go-ethereum/log"
|
"github.com/ethereum/go-ethereum/log"
|
||||||
"gopkg.in/urfave/cli.v1"
|
"gopkg.in/urfave/cli.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
commandHelperTemplate = `{{.Name}}{{if .Subcommands}} command{{end}}{{if .Flags}} [command options]{{end}} [arguments...]
|
|
||||||
{{if .Description}}{{.Description}}
|
|
||||||
{{end}}{{if .Subcommands}}
|
|
||||||
SUBCOMMANDS:
|
|
||||||
{{range .Subcommands}}{{.Name}}{{with .ShortName}}, {{.}}{{end}}{{ "\t" }}{{.Usage}}
|
|
||||||
{{end}}{{end}}{{if .Flags}}
|
|
||||||
OPTIONS:
|
|
||||||
{{range $.Flags}}{{"\t"}}{{.}}
|
|
||||||
{{end}}
|
|
||||||
{{end}}`
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
// Git SHA1 commit hash of the release (set via linker flags)
|
// Git SHA1 commit hash of the release (set via linker flags)
|
||||||
gitCommit = ""
|
gitCommit = ""
|
||||||
@ -50,7 +37,7 @@ var (
|
|||||||
var app *cli.App
|
var app *cli.App
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
app = utils.NewApp(gitCommit, gitDate, "ethereum checkpoint helper tool")
|
app = flags.NewApp(gitCommit, gitDate, "ethereum checkpoint helper tool")
|
||||||
app.Commands = []cli.Command{
|
app.Commands = []cli.Command{
|
||||||
commandStatus,
|
commandStatus,
|
||||||
commandDeploy,
|
commandDeploy,
|
||||||
@ -61,7 +48,7 @@ func init() {
|
|||||||
oracleFlag,
|
oracleFlag,
|
||||||
nodeURLFlag,
|
nodeURLFlag,
|
||||||
}
|
}
|
||||||
cli.CommandHelpTemplate = commandHelperTemplate
|
cli.CommandHelpTemplate = flags.OriginCommandHelpTemplate
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commonly used command line flags.
|
// Commonly used command line flags.
|
||||||
|
@ -9,7 +9,7 @@ Clef can run as a daemon on the same machine, off a usb-stick like [USB armory](
|
|||||||
Check out the
|
Check out the
|
||||||
|
|
||||||
* [CLI tutorial](tutorial.md) for some concrete examples on how Clef works.
|
* [CLI tutorial](tutorial.md) for some concrete examples on how Clef works.
|
||||||
* [Setup docs](docs/setup.md) for infos on how to configure Clef on QubesOS or USB Armory.
|
* [Setup docs](docs/setup.md) for information on how to configure Clef on QubesOS or USB Armory.
|
||||||
* [Data types](datatypes.md) for details on the communication messages between Clef and an external UI.
|
* [Data types](datatypes.md) for details on the communication messages between Clef and an external UI.
|
||||||
|
|
||||||
## Command line flags
|
## Command line flags
|
||||||
@ -33,12 +33,12 @@ GLOBAL OPTIONS:
|
|||||||
--lightkdf Reduce key-derivation RAM & CPU usage at some expense of KDF strength
|
--lightkdf Reduce key-derivation RAM & CPU usage at some expense of KDF strength
|
||||||
--nousb Disables monitoring for and managing USB hardware wallets
|
--nousb Disables monitoring for and managing USB hardware wallets
|
||||||
--pcscdpath value Path to the smartcard daemon (pcscd) socket file (default: "/run/pcscd/pcscd.comm")
|
--pcscdpath value Path to the smartcard daemon (pcscd) socket file (default: "/run/pcscd/pcscd.comm")
|
||||||
--rpcaddr value HTTP-RPC server listening interface (default: "localhost")
|
--http.addr value HTTP-RPC server listening interface (default: "localhost")
|
||||||
--rpcvhosts value Comma separated list of virtual hostnames from which to accept requests (server enforced). Accepts '*' wildcard. (default: "localhost")
|
--http.vhosts value Comma separated list of virtual hostnames from which to accept requests (server enforced). Accepts '*' wildcard. (default: "localhost")
|
||||||
--ipcdisable Disable the IPC-RPC server
|
--ipcdisable Disable the IPC-RPC server
|
||||||
--ipcpath Filename for IPC socket/pipe within the datadir (explicit paths escape it)
|
--ipcpath Filename for IPC socket/pipe within the datadir (explicit paths escape it)
|
||||||
--rpc Enable the HTTP-RPC server
|
--http Enable the HTTP-RPC server
|
||||||
--rpcport value HTTP-RPC server listening port (default: 8550)
|
--http.port value HTTP-RPC server listening port (default: 8550)
|
||||||
--signersecret value A file containing the (encrypted) master seed to encrypt Clef data, e.g. keystore credentials and ruleset hash
|
--signersecret value A file containing the (encrypted) master seed to encrypt Clef data, e.g. keystore credentials and ruleset hash
|
||||||
--4bytedb-custom value File used for writing new 4byte-identifiers submitted via API (default: "./4byte-custom.json")
|
--4bytedb-custom value File used for writing new 4byte-identifiers submitted via API (default: "./4byte-custom.json")
|
||||||
--auditlog value File used to emit audit logs. Set to "" to disable (default: "audit.log")
|
--auditlog value File used to emit audit logs. Set to "" to disable (default: "audit.log")
|
||||||
@ -46,6 +46,7 @@ GLOBAL OPTIONS:
|
|||||||
--stdio-ui Use STDIN/STDOUT as a channel for an external UI. This means that an STDIN/STDOUT is used for RPC-communication with a e.g. a graphical user interface, and can be used when Clef is started by an external process.
|
--stdio-ui Use STDIN/STDOUT as a channel for an external UI. This means that an STDIN/STDOUT is used for RPC-communication with a e.g. a graphical user interface, and can be used when Clef is started by an external process.
|
||||||
--stdio-ui-test Mechanism to test interface between Clef and UI. Requires 'stdio-ui'.
|
--stdio-ui-test Mechanism to test interface between Clef and UI. Requires 'stdio-ui'.
|
||||||
--advanced If enabled, issues warnings instead of rejections for suspicious requests. Default off
|
--advanced If enabled, issues warnings instead of rejections for suspicious requests. Default off
|
||||||
|
--suppress-bootwarn If set, does not show the warning during boot
|
||||||
--help, -h show help
|
--help, -h show help
|
||||||
--version, -v print the version
|
--version, -v print the version
|
||||||
```
|
```
|
||||||
@ -112,11 +113,11 @@ Some snags and todos
|
|||||||
|
|
||||||
### External API
|
### External API
|
||||||
|
|
||||||
Clef listens to HTTP requests on `rpcaddr`:`rpcport` (or to IPC on `ipcpath`), with the same JSON-RPC standard as Geth. The messages are expected to be [JSON-RPC 2.0 standard](https://www.jsonrpc.org/specification).
|
Clef listens to HTTP requests on `http.addr`:`http.port` (or to IPC on `ipcpath`), with the same JSON-RPC standard as Geth. The messages are expected to be [JSON-RPC 2.0 standard](https://www.jsonrpc.org/specification).
|
||||||
|
|
||||||
Some of these call can require user interaction. Clients must be aware that responses may be delayed significantly or may never be received if a users decides to ignore the confirmation request.
|
Some of these calls can require user interaction. Clients must be aware that responses may be delayed significantly or may never be received if a user decides to ignore the confirmation request.
|
||||||
|
|
||||||
The External API is **untrusted**: it does not accept credentials over this API, nor does it expect that requests have any authority.
|
The External API is **untrusted**: it does not accept credentials, nor does it expect that requests have any authority.
|
||||||
|
|
||||||
### Internal UI API
|
### Internal UI API
|
||||||
|
|
||||||
@ -145,13 +146,11 @@ See the [external API changelog](extapi_changelog.md) for information about chan
|
|||||||
|
|
||||||
All hex encoded values must be prefixed with `0x`.
|
All hex encoded values must be prefixed with `0x`.
|
||||||
|
|
||||||
## Methods
|
|
||||||
|
|
||||||
### account_new
|
### account_new
|
||||||
|
|
||||||
#### Create new password protected account
|
#### Create new password protected account
|
||||||
|
|
||||||
The signer will generate a new private key, encrypts it according to [web3 keystore spec](https://github.com/ethereum/wiki/wiki/Web3-Secret-Storage-Definition) and stores it in the keystore directory.
|
The signer will generate a new private key, encrypt it according to [web3 keystore spec](https://github.com/ethereum/wiki/wiki/Web3-Secret-Storage-Definition) and store it in the keystore directory.
|
||||||
The client is responsible for creating a backup of the keystore. If the keystore is lost there is no method of retrieving lost accounts.
|
The client is responsible for creating a backup of the keystore. If the keystore is lost there is no method of retrieving lost accounts.
|
||||||
|
|
||||||
#### Arguments
|
#### Arguments
|
||||||
@ -160,7 +159,6 @@ None
|
|||||||
|
|
||||||
#### Result
|
#### Result
|
||||||
- address [string]: account address that is derived from the generated key
|
- address [string]: account address that is derived from the generated key
|
||||||
- url [string]: location of the keyfile
|
|
||||||
|
|
||||||
#### Sample call
|
#### Sample call
|
||||||
```json
|
```json
|
||||||
@ -172,14 +170,11 @@ None
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
Response
|
Response
|
||||||
```
|
```json
|
||||||
{
|
{
|
||||||
"id": 0,
|
"id": 0,
|
||||||
"jsonrpc": "2.0",
|
"jsonrpc": "2.0",
|
||||||
"result": {
|
"result": "0xbea9183f8f4f03d427f6bcea17388bdff1cab133"
|
||||||
"address": "0xbea9183f8f4f03d427f6bcea17388bdff1cab133",
|
|
||||||
"url": "keystore:///my/keystore/UTC--2017-08-24T08-40-15.419655028Z--bea9183f8f4f03d427f6bcea17388bdff1cab133"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -195,8 +190,6 @@ None
|
|||||||
#### Result
|
#### Result
|
||||||
- array with account records:
|
- array with account records:
|
||||||
- account.address [string]: account address that is derived from the generated key
|
- account.address [string]: account address that is derived from the generated key
|
||||||
- account.type [string]: type of the
|
|
||||||
- account.url [string]: location of the account
|
|
||||||
|
|
||||||
#### Sample call
|
#### Sample call
|
||||||
```json
|
```json
|
||||||
@ -207,21 +200,13 @@ None
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
Response
|
Response
|
||||||
```
|
```json
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
"jsonrpc": "2.0",
|
"jsonrpc": "2.0",
|
||||||
"result": [
|
"result": [
|
||||||
{
|
"0xafb2f771f58513609765698f65d3f2f0224a956f",
|
||||||
"address": "0xafb2f771f58513609765698f65d3f2f0224a956f",
|
"0xbea9183f8f4f03d427f6bcea17388bdff1cab133"
|
||||||
"type": "account",
|
|
||||||
"url": "keystore:///tmp/keystore/UTC--2017-08-24T07-26-47.162109726Z--afb2f771f58513609765698f65d3f2f0224a956f"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "0xbea9183f8f4f03d427f6bcea17388bdff1cab133",
|
|
||||||
"type": "account",
|
|
||||||
"url": "keystore:///tmp/keystore/UTC--2017-08-24T08-40-15.419655028Z--bea9183f8f4f03d427f6bcea17388bdff1cab133"
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@ -229,10 +214,10 @@ Response
|
|||||||
### account_signTransaction
|
### account_signTransaction
|
||||||
|
|
||||||
#### Sign transactions
|
#### Sign transactions
|
||||||
Signs a transactions and responds with the signed transaction in RLP encoded form.
|
Signs a transaction and responds with the signed transaction in RLP-encoded and JSON forms.
|
||||||
|
|
||||||
#### Arguments
|
#### Arguments
|
||||||
2. transaction object:
|
1. transaction object:
|
||||||
- `from` [address]: account to send the transaction from
|
- `from` [address]: account to send the transaction from
|
||||||
- `to` [address]: receiver account. If omitted or `0x`, will cause contract creation.
|
- `to` [address]: receiver account. If omitted or `0x`, will cause contract creation.
|
||||||
- `gas` [number]: maximum amount of gas to burn
|
- `gas` [number]: maximum amount of gas to burn
|
||||||
@ -240,12 +225,13 @@ Response
|
|||||||
- `value` [number:optional]: amount of Wei to send with the transaction
|
- `value` [number:optional]: amount of Wei to send with the transaction
|
||||||
- `data` [data:optional]: input data
|
- `data` [data:optional]: input data
|
||||||
- `nonce` [number]: account nonce
|
- `nonce` [number]: account nonce
|
||||||
3. method signature [string:optional]
|
1. method signature [string:optional]
|
||||||
- The method signature, if present, is to aid decoding the calldata. Should consist of `methodname(paramtype,...)`, e.g. `transfer(uint256,address)`. The signer may use this data to parse the supplied calldata, and show the user. The data, however, is considered totally untrusted, and reliability is not expected.
|
- The method signature, if present, is to aid decoding the calldata. Should consist of `methodname(paramtype,...)`, e.g. `transfer(uint256,address)`. The signer may use this data to parse the supplied calldata, and show the user. The data, however, is considered totally untrusted, and reliability is not expected.
|
||||||
|
|
||||||
|
|
||||||
#### Result
|
#### Result
|
||||||
- signed transaction in RLP encoded form [data]
|
- raw [data]: signed transaction in RLP encoded form
|
||||||
|
- tx [json]: signed transaction in JSON form
|
||||||
|
|
||||||
#### Sample call
|
#### Sample call
|
||||||
```json
|
```json
|
||||||
@ -270,11 +256,22 @@ Response
|
|||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"id": 2,
|
|
||||||
"jsonrpc": "2.0",
|
"jsonrpc": "2.0",
|
||||||
"error": {
|
"id": 2,
|
||||||
"code": -32000,
|
"result": {
|
||||||
"message": "Request denied"
|
"raw": "0xf88380018203339407a565b7ed7d7a678680a4c162885bedbb695fe080a44401a6e4000000000000000000000000000000000000000000000000000000000000001226a0223a7c9bcf5531c99be5ea7082183816eb20cfe0bbc322e97cc5c7f71ab8b20ea02aadee6b34b45bb15bc42d9c09de4a6754e7000908da72d48cc7704971491663",
|
||||||
|
"tx": {
|
||||||
|
"nonce": "0x0",
|
||||||
|
"gasPrice": "0x1234",
|
||||||
|
"gas": "0x55555",
|
||||||
|
"to": "0x07a565b7ed7d7a678680a4c162885bedbb695fe0",
|
||||||
|
"value": "0x1234",
|
||||||
|
"input": "0xabcd",
|
||||||
|
"v": "0x26",
|
||||||
|
"r": "0x223a7c9bcf5531c99be5ea7082183816eb20cfe0bbc322e97cc5c7f71ab8b20e",
|
||||||
|
"s": "0x2aadee6b34b45bb15bc42d9c09de4a6754e7000908da72d48cc7704971491663",
|
||||||
|
"hash": "0xeba2df809e7a612a0a0d444ccfa5c839624bdc00dd29e3340d46df3870f8a30e"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@ -326,7 +323,7 @@ Response
|
|||||||
|
|
||||||
Bash example:
|
Bash example:
|
||||||
```bash
|
```bash
|
||||||
#curl -H "Content-Type: application/json" -X POST --data '{"jsonrpc":"2.0","method":"account_signTransaction","params":[{"from":"0x694267f14675d7e1b9494fd8d72fefe1755710fa","gas":"0x333","gasPrice":"0x1","nonce":"0x0","to":"0x07a565b7ed7d7a678680a4c162885bedbb695fe0", "value":"0x0", "data":"0x4401a6e40000000000000000000000000000000000000000000000000000000000000012"},"safeSend(address)"],"id":67}' http://localhost:8550/
|
> curl -H "Content-Type: application/json" -X POST --data '{"jsonrpc":"2.0","method":"account_signTransaction","params":[{"from":"0x694267f14675d7e1b9494fd8d72fefe1755710fa","gas":"0x333","gasPrice":"0x1","nonce":"0x0","to":"0x07a565b7ed7d7a678680a4c162885bedbb695fe0", "value":"0x0", "data":"0x4401a6e40000000000000000000000000000000000000000000000000000000000000012"},"safeSend(address)"],"id":67}' http://localhost:8550/
|
||||||
|
|
||||||
{"jsonrpc":"2.0","id":67,"result":{"raw":"0xf88380018203339407a565b7ed7d7a678680a4c162885bedbb695fe080a44401a6e4000000000000000000000000000000000000000000000000000000000000001226a0223a7c9bcf5531c99be5ea7082183816eb20cfe0bbc322e97cc5c7f71ab8b20ea02aadee6b34b45bb15bc42d9c09de4a6754e7000908da72d48cc7704971491663","tx":{"nonce":"0x0","gasPrice":"0x1","gas":"0x333","to":"0x07a565b7ed7d7a678680a4c162885bedbb695fe0","value":"0x0","input":"0x4401a6e40000000000000000000000000000000000000000000000000000000000000012","v":"0x26","r":"0x223a7c9bcf5531c99be5ea7082183816eb20cfe0bbc322e97cc5c7f71ab8b20e","s":"0x2aadee6b34b45bb15bc42d9c09de4a6754e7000908da72d48cc7704971491663","hash":"0xeba2df809e7a612a0a0d444ccfa5c839624bdc00dd29e3340d46df3870f8a30e"}}}
|
{"jsonrpc":"2.0","id":67,"result":{"raw":"0xf88380018203339407a565b7ed7d7a678680a4c162885bedbb695fe080a44401a6e4000000000000000000000000000000000000000000000000000000000000001226a0223a7c9bcf5531c99be5ea7082183816eb20cfe0bbc322e97cc5c7f71ab8b20ea02aadee6b34b45bb15bc42d9c09de4a6754e7000908da72d48cc7704971491663","tx":{"nonce":"0x0","gasPrice":"0x1","gas":"0x333","to":"0x07a565b7ed7d7a678680a4c162885bedbb695fe0","value":"0x0","input":"0x4401a6e40000000000000000000000000000000000000000000000000000000000000012","v":"0x26","r":"0x223a7c9bcf5531c99be5ea7082183816eb20cfe0bbc322e97cc5c7f71ab8b20e","s":"0x2aadee6b34b45bb15bc42d9c09de4a6754e7000908da72d48cc7704971491663","hash":"0xeba2df809e7a612a0a0d444ccfa5c839624bdc00dd29e3340d46df3870f8a30e"}}}
|
||||||
```
|
```
|
||||||
@ -373,7 +370,7 @@ Response
|
|||||||
### account_signTypedData
|
### account_signTypedData
|
||||||
|
|
||||||
#### Sign data
|
#### Sign data
|
||||||
Signs a chunk of structured data conformant to [EIP712]([EIP-712](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-712.md)) and returns the calculated signature.
|
Signs a chunk of structured data conformant to [EIP-712](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-712.md) and returns the calculated signature.
|
||||||
|
|
||||||
#### Arguments
|
#### Arguments
|
||||||
- account [address]: account to sign with
|
- account [address]: account to sign with
|
||||||
@ -469,7 +466,7 @@ Response
|
|||||||
|
|
||||||
### account_ecRecover
|
### account_ecRecover
|
||||||
|
|
||||||
#### Sign data
|
#### Recover the signing address
|
||||||
|
|
||||||
Derive the address from the account that was used to sign data with content type `text/plain` and the signature.
|
Derive the address from the account that was used to sign data with content type `text/plain` and the signature.
|
||||||
|
|
||||||
@ -487,7 +484,6 @@ Derive the address from the account that was used to sign data with content type
|
|||||||
"jsonrpc": "2.0",
|
"jsonrpc": "2.0",
|
||||||
"method": "account_ecRecover",
|
"method": "account_ecRecover",
|
||||||
"params": [
|
"params": [
|
||||||
"data/plain",
|
|
||||||
"0xaabbccdd",
|
"0xaabbccdd",
|
||||||
"0x5b6693f153b48ec1c706ba4169960386dbaa6903e249cc79a8e6ddc434451d417e1e57327872c7f538beeb323c300afa9999a3d4a5de6caf3be0d5ef832b67ef1c"
|
"0x5b6693f153b48ec1c706ba4169960386dbaa6903e249cc79a8e6ddc434451d417e1e57327872c7f538beeb323c300afa9999a3d4a5de6caf3be0d5ef832b67ef1c"
|
||||||
]
|
]
|
||||||
@ -503,117 +499,36 @@ Response
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### account_import
|
### account_version
|
||||||
|
|
||||||
#### Import account
|
#### Get external API version
|
||||||
Import a private key into the keystore. The imported key is expected to be encrypted according to the web3 keystore
|
|
||||||
format.
|
Get the version of the external API used by Clef.
|
||||||
|
|
||||||
#### Arguments
|
#### Arguments
|
||||||
- account [object]: key in [web3 keystore format](https://github.com/ethereum/wiki/wiki/Web3-Secret-Storage-Definition) (retrieved with account_export)
|
|
||||||
|
None
|
||||||
|
|
||||||
#### Result
|
#### Result
|
||||||
- imported key [object]:
|
|
||||||
- key.address [address]: address of the imported key
|
* external API version [string]
|
||||||
- key.type [string]: type of the account
|
|
||||||
- key.url [string]: key URL
|
|
||||||
|
|
||||||
#### Sample call
|
#### Sample call
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"id": 6,
|
"id": 0,
|
||||||
"jsonrpc": "2.0",
|
"jsonrpc": "2.0",
|
||||||
"method": "account_import",
|
"method": "account_version",
|
||||||
"params": [
|
"params": []
|
||||||
{
|
|
||||||
"address": "c7412fc59930fd90099c917a50e5f11d0934b2f5",
|
|
||||||
"crypto": {
|
|
||||||
"cipher": "aes-128-ctr",
|
|
||||||
"cipherparams": {
|
|
||||||
"iv": "401c39a7c7af0388491c3d3ecb39f532"
|
|
||||||
},
|
|
||||||
"ciphertext": "eb045260b18dd35cd0e6d99ead52f8fa1e63a6b0af2d52a8de198e59ad783204",
|
|
||||||
"kdf": "scrypt",
|
|
||||||
"kdfparams": {
|
|
||||||
"dklen": 32,
|
|
||||||
"n": 262144,
|
|
||||||
"p": 1,
|
|
||||||
"r": 8,
|
|
||||||
"salt": "9a657e3618527c9b5580ded60c12092e5038922667b7b76b906496f021bb841a"
|
|
||||||
},
|
|
||||||
"mac": "880dc10bc06e9cec78eb9830aeb1e7a4a26b4c2c19615c94acb632992b952806"
|
|
||||||
},
|
|
||||||
"id": "09bccb61-b8d3-4e93-bf4f-205a8194f0b9",
|
|
||||||
"version": 3
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Response
|
Response
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"id": 6,
|
"id": 0,
|
||||||
"jsonrpc": "2.0",
|
"jsonrpc": "2.0",
|
||||||
"result": {
|
"result": "6.0.0"
|
||||||
"address": "0xc7412fc59930fd90099c917a50e5f11d0934b2f5",
|
|
||||||
"type": "account",
|
|
||||||
"url": "keystore:///tmp/keystore/UTC--2017-08-24T11-00-42.032024108Z--c7412fc59930fd90099c917a50e5f11d0934b2f5"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### account_export
|
|
||||||
|
|
||||||
#### Export account from keystore
|
|
||||||
Export a private key from the keystore. The exported private key is encrypted with the original password. When the
|
|
||||||
key is imported later this password is required.
|
|
||||||
|
|
||||||
#### Arguments
|
|
||||||
- account [address]: export private key that is associated with this account
|
|
||||||
|
|
||||||
#### Result
|
|
||||||
- exported key, see [web3 keystore format](https://github.com/ethereum/wiki/wiki/Web3-Secret-Storage-Definition) for
|
|
||||||
more information
|
|
||||||
|
|
||||||
#### Sample call
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"jsonrpc": "2.0",
|
|
||||||
"method": "account_export",
|
|
||||||
"params": [
|
|
||||||
"0xc7412fc59930fd90099c917a50e5f11d0934b2f5"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
Response
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"jsonrpc": "2.0",
|
|
||||||
"result": {
|
|
||||||
"address": "c7412fc59930fd90099c917a50e5f11d0934b2f5",
|
|
||||||
"crypto": {
|
|
||||||
"cipher": "aes-128-ctr",
|
|
||||||
"cipherparams": {
|
|
||||||
"iv": "401c39a7c7af0388491c3d3ecb39f532"
|
|
||||||
},
|
|
||||||
"ciphertext": "eb045260b18dd35cd0e6d99ead52f8fa1e63a6b0af2d52a8de198e59ad783204",
|
|
||||||
"kdf": "scrypt",
|
|
||||||
"kdfparams": {
|
|
||||||
"dklen": 32,
|
|
||||||
"n": 262144,
|
|
||||||
"p": 1,
|
|
||||||
"r": 8,
|
|
||||||
"salt": "9a657e3618527c9b5580ded60c12092e5038922667b7b76b906496f021bb841a"
|
|
||||||
},
|
|
||||||
"mac": "880dc10bc06e9cec78eb9830aeb1e7a4a26b4c2c19615c94acb632992b952806"
|
|
||||||
},
|
|
||||||
"id": "09bccb61-b8d3-4e93-bf4f-205a8194f0b9",
|
|
||||||
"version": 3
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -625,7 +540,7 @@ By starting the signer with the switch `--stdio-ui-test`, the signer will invoke
|
|||||||
denials. This can be used during development to ensure that the API is (at least somewhat) correctly implemented.
|
denials. This can be used during development to ensure that the API is (at least somewhat) correctly implemented.
|
||||||
See `pythonsigner`, which can be invoked via `python3 pythonsigner.py test` to perform the 'denial-handshake-test'.
|
See `pythonsigner`, which can be invoked via `python3 pythonsigner.py test` to perform the 'denial-handshake-test'.
|
||||||
|
|
||||||
All methods in this API uses object-based parameters, so that there can be no mixups of parameters: each piece of data is accessed by key.
|
All methods in this API use object-based parameters, so that there can be no mixup of parameters: each piece of data is accessed by key.
|
||||||
|
|
||||||
See the [ui API changelog](intapi_changelog.md) for information about changes to this API.
|
See the [ui API changelog](intapi_changelog.md) for information about changes to this API.
|
||||||
|
|
||||||
@ -784,12 +699,10 @@ Invoked when a request for account listing has been made.
|
|||||||
{
|
{
|
||||||
"accounts": [
|
"accounts": [
|
||||||
{
|
{
|
||||||
"type": "Account",
|
|
||||||
"url": "keystore:///home/bazonk/.ethereum/keystore/UTC--2017-11-20T14-44-54.089682944Z--123409812340981234098123409812deadbeef42",
|
"url": "keystore:///home/bazonk/.ethereum/keystore/UTC--2017-11-20T14-44-54.089682944Z--123409812340981234098123409812deadbeef42",
|
||||||
"address": "0x123409812340981234098123409812deadbeef42"
|
"address": "0x123409812340981234098123409812deadbeef42"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "Account",
|
|
||||||
"url": "keystore:///home/bazonk/.ethereum/keystore/UTC--2017-11-23T21-59-03.199240693Z--cafebabedeadbeef34098123409812deadbeef42",
|
"url": "keystore:///home/bazonk/.ethereum/keystore/UTC--2017-11-23T21-59-03.199240693Z--cafebabedeadbeef34098123409812deadbeef42",
|
||||||
"address": "0xcafebabedeadbeef34098123409812deadbeef42"
|
"address": "0xcafebabedeadbeef34098123409812deadbeef42"
|
||||||
}
|
}
|
||||||
@ -819,7 +732,13 @@ Invoked when a request for account listing has been made.
|
|||||||
{
|
{
|
||||||
"address": "0x123409812340981234098123409812deadbeef42",
|
"address": "0x123409812340981234098123409812deadbeef42",
|
||||||
"raw_data": "0x01020304",
|
"raw_data": "0x01020304",
|
||||||
"message": "\u0019Ethereum Signed Message:\n4\u0001\u0002\u0003\u0004",
|
"messages": [
|
||||||
|
{
|
||||||
|
"name": "message",
|
||||||
|
"value": "\u0019Ethereum Signed Message:\n4\u0001\u0002\u0003\u0004",
|
||||||
|
"type": "text/plain"
|
||||||
|
}
|
||||||
|
],
|
||||||
"hash": "0x7e3a4e7a9d1744bc5c675c25e1234ca8ed9162bd17f78b9085e48047c15ac310",
|
"hash": "0x7e3a4e7a9d1744bc5c675c25e1234ca8ed9162bd17f78b9085e48047c15ac310",
|
||||||
"meta": {
|
"meta": {
|
||||||
"remote": "signer binary",
|
"remote": "signer binary",
|
||||||
@ -829,12 +748,34 @@ Invoked when a request for account listing has been made.
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### ApproveNewAccount / `ui_approveNewAccount`
|
||||||
|
|
||||||
|
Invoked when a request for creating a new account has been made.
|
||||||
|
|
||||||
|
#### Sample call
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": 4,
|
||||||
|
"method": "ui_approveNewAccount",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"meta": {
|
||||||
|
"remote": "signer binary",
|
||||||
|
"local": "main",
|
||||||
|
"scheme": "in-proc"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### ShowInfo / `ui_showInfo`
|
### ShowInfo / `ui_showInfo`
|
||||||
|
|
||||||
The UI should show the info to the user. Does not expect response.
|
The UI should show the info (a single message) to the user. Does not expect response.
|
||||||
|
|
||||||
#### Sample call
|
#### Sample call
|
||||||
|
|
||||||
@ -844,9 +785,7 @@ The UI should show the info to the user. Does not expect response.
|
|||||||
"id": 9,
|
"id": 9,
|
||||||
"method": "ui_showInfo",
|
"method": "ui_showInfo",
|
||||||
"params": [
|
"params": [
|
||||||
{
|
"Tests completed"
|
||||||
"text": "Tests completed"
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -854,18 +793,16 @@ The UI should show the info to the user. Does not expect response.
|
|||||||
|
|
||||||
### ShowError / `ui_showError`
|
### ShowError / `ui_showError`
|
||||||
|
|
||||||
The UI should show the info to the user. Does not expect response.
|
The UI should show the error (a single message) to the user. Does not expect response.
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
|
||||||
{
|
{
|
||||||
"jsonrpc": "2.0",
|
"jsonrpc": "2.0",
|
||||||
"id": 2,
|
"id": 2,
|
||||||
"method": "ShowError",
|
"method": "ui_showError",
|
||||||
"params": [
|
"params": [
|
||||||
{
|
"Something bad happened!"
|
||||||
"text": "Testing 'ShowError'"
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -879,9 +816,36 @@ When implementing rate-limited rules, this callback should be used.
|
|||||||
|
|
||||||
TLDR; Use this method to keep track of signed transactions, instead of using the data in `ApproveTx`.
|
TLDR; Use this method to keep track of signed transactions, instead of using the data in `ApproveTx`.
|
||||||
|
|
||||||
|
Example call:
|
||||||
|
```json
|
||||||
|
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": 1,
|
||||||
|
"method": "ui_onApprovedTx",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"raw": "0xf88380018203339407a565b7ed7d7a678680a4c162885bedbb695fe080a44401a6e4000000000000000000000000000000000000000000000000000000000000001226a0223a7c9bcf5531c99be5ea7082183816eb20cfe0bbc322e97cc5c7f71ab8b20ea02aadee6b34b45bb15bc42d9c09de4a6754e7000908da72d48cc7704971491663",
|
||||||
|
"tx": {
|
||||||
|
"nonce": "0x0",
|
||||||
|
"gasPrice": "0x1",
|
||||||
|
"gas": "0x333",
|
||||||
|
"to": "0x07a565b7ed7d7a678680a4c162885bedbb695fe0",
|
||||||
|
"value": "0x0",
|
||||||
|
"input": "0x4401a6e40000000000000000000000000000000000000000000000000000000000000012",
|
||||||
|
"v": "0x26",
|
||||||
|
"r": "0x223a7c9bcf5531c99be5ea7082183816eb20cfe0bbc322e97cc5c7f71ab8b20e",
|
||||||
|
"s": "0x2aadee6b34b45bb15bc42d9c09de4a6754e7000908da72d48cc7704971491663",
|
||||||
|
"hash": "0xeba2df809e7a612a0a0d444ccfa5c839624bdc00dd29e3340d46df3870f8a30e"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### OnSignerStartup / `ui_onSignerStartup`
|
### OnSignerStartup / `ui_onSignerStartup`
|
||||||
|
|
||||||
This method provide the UI with information about what API version the signer uses (both internal and external) aswell as build-info and external API,
|
This method provides the UI with information about what API version the signer uses (both internal and external) as well as build-info and external API,
|
||||||
in k/v-form.
|
in k/v-form.
|
||||||
|
|
||||||
Example call:
|
Example call:
|
||||||
@ -905,6 +869,27 @@ Example call:
|
|||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### OnInputRequired / `ui_onInputRequired`
|
||||||
|
|
||||||
|
Invoked when Clef requires user input (e.g. a password).
|
||||||
|
|
||||||
|
Example call:
|
||||||
|
```json
|
||||||
|
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"id": 1,
|
||||||
|
"method": "ui_onInputRequired",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"title": "Account password",
|
||||||
|
"prompt": "Please enter the password for account 0x694267f14675d7e1b9494fd8d72fefe1755710fa",
|
||||||
|
"isPassword": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Rules for UI apis
|
### Rules for UI apis
|
||||||
|
|
||||||
@ -934,4 +919,4 @@ There are a couple of implementation for a UI. We'll try to keep this list up to
|
|||||||
| QtSigner| https://github.com/holiman/qtsigner/| Python3/QT-based| :+1:| :+1:| :+1:| :+1:| :+1:| :x: | :+1: (partially)|
|
| QtSigner| https://github.com/holiman/qtsigner/| Python3/QT-based| :+1:| :+1:| :+1:| :+1:| :+1:| :x: | :+1: (partially)|
|
||||||
| GtkSigner| https://github.com/holiman/gtksigner| Python3/GTK-based| :+1:| :x:| :x:| :+1:| :+1:| :x: | :x: |
|
| GtkSigner| https://github.com/holiman/gtksigner| Python3/GTK-based| :+1:| :x:| :x:| :+1:| :+1:| :x: | :x: |
|
||||||
| Frame | https://github.com/floating/frame/commits/go-signer| Electron-based| :x:| :x:| :x:| :x:| ?| :x: | :x: |
|
| Frame | https://github.com/floating/frame/commits/go-signer| Electron-based| :x:| :x:| :x:| :x:| ?| :x: | :x: |
|
||||||
| Clef UI| https://github.com/kyokan/clef-ui| Golang/QT-based| :+1:| :+1:| :x:| :+1:| :+1:| :x: | :+1: (approve tx only)|
|
| Clef UI| https://github.com/ethereum/clef-ui| Golang/QT-based| :+1:| :+1:| :x:| :+1:| :+1:| :x: | :+1: (approve tx only)|
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
These data types are defined in the channel between clef and the UI
|
These data types are defined in the channel between clef and the UI
|
||||||
### SignDataRequest
|
### SignDataRequest
|
||||||
|
|
||||||
SignDataRequest contains information about a pending request to sign some data. The data to be signed can be of various types, defined by content-type. Clef has done most of the work in canonicalizing and making sense of the data, and it's up to the UI to presentthe user with the contents of the `message`
|
SignDataRequest contains information about a pending request to sign some data. The data to be signed can be of various types, defined by content-type. Clef has done most of the work in canonicalizing and making sense of the data, and it's up to the UI to present the user with the contents of the `message`
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
```json
|
```json
|
||||||
|
@ -34,7 +34,7 @@ There are two ways that this can be achieved: integrated via Qubes or integrated
|
|||||||
|
|
||||||
#### 1. Qubes Integrated
|
#### 1. Qubes Integrated
|
||||||
|
|
||||||
Qubes provdes a facility for inter-qubes communication via `qrexec`. A qube can request to make a cross-qube RPC request
|
Qubes provides a facility for inter-qubes communication via `qrexec`. A qube can request to make a cross-qube RPC request
|
||||||
to another qube. The OS then asks the user if the call is permitted.
|
to another qube. The OS then asks the user if the call is permitted.
|
||||||
|
|
||||||

|

|
||||||
@ -48,7 +48,7 @@ This is how [Split GPG](https://www.qubes-os.org/doc/split-gpg/) is implemented.
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
On the `target` qubes, we need to define the rpc service.
|
On the `target` qubes, we need to define the RPC service.
|
||||||
|
|
||||||
[qubes.Clefsign](qubes/qubes.Clefsign):
|
[qubes.Clefsign](qubes/qubes.Clefsign):
|
||||||
|
|
||||||
@ -94,7 +94,7 @@ with minimal requirements.
|
|||||||
On the `client` qube, we need to create a listener which will receive the request from the Dapp, and proxy it.
|
On the `client` qube, we need to create a listener which will receive the request from the Dapp, and proxy it.
|
||||||
|
|
||||||
|
|
||||||
[qubes-client.py](qubes/client/qubes-client.py):
|
[qubes-client.py](qubes/qubes-client.py):
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
|
||||||
@ -135,11 +135,11 @@ $ cat newaccnt.json
|
|||||||
$ cat newaccnt.json| qrexec-client-vm debian-work qubes.Clefsign
|
$ cat newaccnt.json| qrexec-client-vm debian-work qubes.Clefsign
|
||||||
```
|
```
|
||||||
|
|
||||||
This should pop up first a dialog to allow the IPC call:
|
A dialog should pop up first to allow the IPC call:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
Followed by a GTK-dialog to approve the operation
|
Followed by a GTK-dialog to approve the operation:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@ -169,7 +169,7 @@ However, it comes with a couple of drawbacks:
|
|||||||
- The `Origin` header must be forwarded
|
- The `Origin` header must be forwarded
|
||||||
- Information about the remote ip must be added as a `X-Forwarded-For`. However, Clef cannot always trust an `XFF` header,
|
- Information about the remote ip must be added as a `X-Forwarded-For`. However, Clef cannot always trust an `XFF` header,
|
||||||
since malicious clients may lie about `XFF` in order to fool the http server into believing it comes from another address.
|
since malicious clients may lie about `XFF` in order to fool the http server into believing it comes from another address.
|
||||||
- Even with a policy in place to allow rpc-calls between `caller` and `target`, there will be several popups:
|
- Even with a policy in place to allow RPC calls between `caller` and `target`, there will be several popups:
|
||||||
- One qubes-specific where the user specifies the `target` vm
|
- One qubes-specific where the user specifies the `target` vm
|
||||||
- One clef-specific to approve the transaction
|
- One clef-specific to approve the transaction
|
||||||
|
|
||||||
@ -177,7 +177,7 @@ However, it comes with a couple of drawbacks:
|
|||||||
#### 2. Network integrated
|
#### 2. Network integrated
|
||||||
|
|
||||||
The second way to set up Clef on a qubes system is to allow networking, and have Clef listen to a port which is accessible
|
The second way to set up Clef on a qubes system is to allow networking, and have Clef listen to a port which is accessible
|
||||||
form other qubes.
|
from other qubes.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@ -186,13 +186,13 @@ form other qubes.
|
|||||||
|
|
||||||
## USBArmory
|
## USBArmory
|
||||||
|
|
||||||
The [USB armory](https://inversepath.com/usbarmory) is an open source hardware design with an 800 Mhz ARM processor. It is a pocket-size
|
The [USB armory](https://inversepath.com/usbarmory) is an open source hardware design with an 800 MHz ARM processor. It is a pocket-size
|
||||||
computer. When inserted into a laptop, it identifies itself as a USB network interface, basically adding another network
|
computer. When inserted into a laptop, it identifies itself as a USB network interface, basically adding another network
|
||||||
to your computer. Over this new network interface, you can SSH into the device.
|
to your computer. Over this new network interface, you can SSH into the device.
|
||||||
|
|
||||||
Running Clef off a USB armory means that you can use the armory as a very versatile offline computer, which only
|
Running Clef off a USB armory means that you can use the armory as a very versatile offline computer, which only
|
||||||
ever connects to a local network between your computer and the device itself.
|
ever connects to a local network between your computer and the device itself.
|
||||||
|
|
||||||
Needless to say, the while this model should be fairly secure against remote attacks, an attacker with physical access
|
Needless to say, while this model should be fairly secure against remote attacks, an attacker with physical access
|
||||||
to the USB Armory would trivially be able to extract the contents of the device filesystem.
|
to the USB Armory would trivially be able to extract the contents of the device filesystem.
|
||||||
|
|
||||||
|
@ -10,6 +10,64 @@ TL;DR: Given a version number MAJOR.MINOR.PATCH, increment the:
|
|||||||
|
|
||||||
Additional labels for pre-release and build metadata are available as extensions to the MAJOR.MINOR.PATCH format.
|
Additional labels for pre-release and build metadata are available as extensions to the MAJOR.MINOR.PATCH format.
|
||||||
|
|
||||||
|
### 6.1.0
|
||||||
|
|
||||||
|
The API-method `account_signGnosisSafeTx` was added. This method takes two parameters,
|
||||||
|
`[address, safeTx]`. The latter, `safeTx`, can be copy-pasted from the gnosis relay. For example:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": "account_signGnosisSafeTx",
|
||||||
|
"params": ["0xfd1c4226bfD1c436672092F4eCbfC270145b7256",
|
||||||
|
{
|
||||||
|
"safe": "0x25a6c4BBd32B2424A9c99aEB0584Ad12045382B3",
|
||||||
|
"to": "0xB372a646f7F05Cc1785018dBDA7EBc734a2A20E2",
|
||||||
|
"value": "20000000000000000",
|
||||||
|
"data": null,
|
||||||
|
"operation": 0,
|
||||||
|
"gasToken": "0x0000000000000000000000000000000000000000",
|
||||||
|
"safeTxGas": 27845,
|
||||||
|
"baseGas": 0,
|
||||||
|
"gasPrice": "0",
|
||||||
|
"refundReceiver": "0x0000000000000000000000000000000000000000",
|
||||||
|
"nonce": 2,
|
||||||
|
"executionDate": null,
|
||||||
|
"submissionDate": "2020-09-15T21:54:49.617634Z",
|
||||||
|
"modified": "2020-09-15T21:54:49.617634Z",
|
||||||
|
"blockNumber": null,
|
||||||
|
"transactionHash": null,
|
||||||
|
"safeTxHash": "0x2edfbd5bc113ff18c0631595db32eb17182872d88d9bf8ee4d8c2dd5db6d95e2",
|
||||||
|
"executor": null,
|
||||||
|
"isExecuted": false,
|
||||||
|
"isSuccessful": null,
|
||||||
|
"ethGasPrice": null,
|
||||||
|
"gasUsed": null,
|
||||||
|
"fee": null,
|
||||||
|
"origin": null,
|
||||||
|
"dataDecoded": null,
|
||||||
|
"confirmationsRequired": null,
|
||||||
|
"confirmations": [
|
||||||
|
{
|
||||||
|
"owner": "0xAd2e180019FCa9e55CADe76E4487F126Fd08DA34",
|
||||||
|
"submissionDate": "2020-09-15T21:54:49.663299Z",
|
||||||
|
"transactionHash": null,
|
||||||
|
"confirmationType": "CONFIRMATION",
|
||||||
|
"signature": "0x95a7250bb645f831c86defc847350e7faff815b2fb586282568e96cc859e39315876db20a2eed5f7a0412906ec5ab57652a6f645ad4833f345bda059b9da2b821c",
|
||||||
|
"signatureType": "EOA"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"signatures": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": 67
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Not all fields are required, though. This method is really just a UX helper, which massages the
|
||||||
|
input to conform to the `EIP-712` [specification](https://docs.gnosis.io/safe/docs/contracts_tx_execution/#transaction-hash)
|
||||||
|
for the Gnosis Safe, and making the output be directly importable to by a relay service.
|
||||||
|
|
||||||
|
|
||||||
### 6.0.0
|
### 6.0.0
|
||||||
|
|
||||||
|
@ -10,6 +10,17 @@ TL;DR: Given a version number MAJOR.MINOR.PATCH, increment the:
|
|||||||
|
|
||||||
Additional labels for pre-release and build metadata are available as extensions to the MAJOR.MINOR.PATCH format.
|
Additional labels for pre-release and build metadata are available as extensions to the MAJOR.MINOR.PATCH format.
|
||||||
|
|
||||||
|
### 7.0.1
|
||||||
|
|
||||||
|
Added `clef_New` to the internal API callable from a UI.
|
||||||
|
|
||||||
|
> `New` creates a new password protected Account. The private key is protected with
|
||||||
|
> the given password. Users are responsible to backup the private key that is stored
|
||||||
|
> in the keystore location that was specified when this API was created.
|
||||||
|
> This method is the same as New on the external API, the difference being that
|
||||||
|
> this implementation does not ask for confirmation, since it's initiated by
|
||||||
|
> the user
|
||||||
|
|
||||||
### 7.0.0
|
### 7.0.0
|
||||||
|
|
||||||
- The `message` field was renamed to `messages` in all data signing request methods to better reflect that it's a list, not a value.
|
- The `message` field was renamed to `messages` in all data signing request methods to better reflect that it's a list, not a value.
|
||||||
@ -150,7 +161,7 @@ UserInputResponse struct {
|
|||||||
#### 1.2.0
|
#### 1.2.0
|
||||||
|
|
||||||
* Add `OnStartup` method, to provide the UI with information about what API version
|
* Add `OnStartup` method, to provide the UI with information about what API version
|
||||||
the signer uses (both internal and external) aswell as build-info and external api.
|
the signer uses (both internal and external) as well as build-info and external api.
|
||||||
|
|
||||||
Example call:
|
Example call:
|
||||||
```json
|
```json
|
||||||
|
296
cmd/clef/main.go
296
cmd/clef/main.go
@ -29,9 +29,9 @@ import (
|
|||||||
"math/big"
|
"math/big"
|
||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
"os/user"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -40,20 +40,21 @@ import (
|
|||||||
"github.com/ethereum/go-ethereum/cmd/utils"
|
"github.com/ethereum/go-ethereum/cmd/utils"
|
||||||
"github.com/ethereum/go-ethereum/common"
|
"github.com/ethereum/go-ethereum/common"
|
||||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||||
"github.com/ethereum/go-ethereum/console"
|
|
||||||
"github.com/ethereum/go-ethereum/core/types"
|
"github.com/ethereum/go-ethereum/core/types"
|
||||||
"github.com/ethereum/go-ethereum/crypto"
|
"github.com/ethereum/go-ethereum/crypto"
|
||||||
"github.com/ethereum/go-ethereum/internal/ethapi"
|
"github.com/ethereum/go-ethereum/internal/ethapi"
|
||||||
|
"github.com/ethereum/go-ethereum/internal/flags"
|
||||||
"github.com/ethereum/go-ethereum/log"
|
"github.com/ethereum/go-ethereum/log"
|
||||||
"github.com/ethereum/go-ethereum/node"
|
"github.com/ethereum/go-ethereum/node"
|
||||||
"github.com/ethereum/go-ethereum/params"
|
"github.com/ethereum/go-ethereum/params"
|
||||||
"github.com/ethereum/go-ethereum/rlp"
|
"github.com/ethereum/go-ethereum/rlp"
|
||||||
"github.com/ethereum/go-ethereum/rpc"
|
"github.com/ethereum/go-ethereum/rpc"
|
||||||
"github.com/ethereum/go-ethereum/signer/core"
|
"github.com/ethereum/go-ethereum/signer/core"
|
||||||
|
"github.com/ethereum/go-ethereum/signer/core/apitypes"
|
||||||
"github.com/ethereum/go-ethereum/signer/fourbyte"
|
"github.com/ethereum/go-ethereum/signer/fourbyte"
|
||||||
"github.com/ethereum/go-ethereum/signer/rules"
|
"github.com/ethereum/go-ethereum/signer/rules"
|
||||||
"github.com/ethereum/go-ethereum/signer/storage"
|
"github.com/ethereum/go-ethereum/signer/storage"
|
||||||
colorable "github.com/mattn/go-colorable"
|
"github.com/mattn/go-colorable"
|
||||||
"github.com/mattn/go-isatty"
|
"github.com/mattn/go-isatty"
|
||||||
"gopkg.in/urfave/cli.v1"
|
"gopkg.in/urfave/cli.v1"
|
||||||
)
|
)
|
||||||
@ -82,6 +83,10 @@ var (
|
|||||||
Name: "advanced",
|
Name: "advanced",
|
||||||
Usage: "If enabled, issues warnings instead of rejections for suspicious requests. Default off",
|
Usage: "If enabled, issues warnings instead of rejections for suspicious requests. Default off",
|
||||||
}
|
}
|
||||||
|
acceptFlag = cli.BoolFlag{
|
||||||
|
Name: "suppress-bootwarn",
|
||||||
|
Usage: "If set, does not show the warning during boot",
|
||||||
|
}
|
||||||
keystoreFlag = cli.StringFlag{
|
keystoreFlag = cli.StringFlag{
|
||||||
Name: "keystore",
|
Name: "keystore",
|
||||||
Value: filepath.Join(node.DefaultDataDir(), "keystore"),
|
Value: filepath.Join(node.DefaultDataDir(), "keystore"),
|
||||||
@ -98,7 +103,7 @@ var (
|
|||||||
Usage: "Chain id to use for signing (1=mainnet, 3=Ropsten, 4=Rinkeby, 5=Goerli)",
|
Usage: "Chain id to use for signing (1=mainnet, 3=Ropsten, 4=Rinkeby, 5=Goerli)",
|
||||||
}
|
}
|
||||||
rpcPortFlag = cli.IntFlag{
|
rpcPortFlag = cli.IntFlag{
|
||||||
Name: "rpcport",
|
Name: "http.port",
|
||||||
Usage: "HTTP-RPC server listening port",
|
Usage: "HTTP-RPC server listening port",
|
||||||
Value: node.DefaultHTTPPort + 5,
|
Value: node.DefaultHTTPPort + 5,
|
||||||
}
|
}
|
||||||
@ -187,6 +192,22 @@ The setpw command stores a password for a given address (keyfile).
|
|||||||
Description: `
|
Description: `
|
||||||
The delpw command removes a password for a given address (keyfile).
|
The delpw command removes a password for a given address (keyfile).
|
||||||
`}
|
`}
|
||||||
|
newAccountCommand = cli.Command{
|
||||||
|
Action: utils.MigrateFlags(newAccount),
|
||||||
|
Name: "newaccount",
|
||||||
|
Usage: "Create a new account",
|
||||||
|
ArgsUsage: "",
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
logLevelFlag,
|
||||||
|
keystoreFlag,
|
||||||
|
utils.LightKDFFlag,
|
||||||
|
acceptFlag,
|
||||||
|
},
|
||||||
|
Description: `
|
||||||
|
The newaccount command creates a new keystore-backed account. It is a convenience-method
|
||||||
|
which can be used in lieu of an external UI.`,
|
||||||
|
}
|
||||||
|
|
||||||
gendocCommand = cli.Command{
|
gendocCommand = cli.Command{
|
||||||
Action: GenDoc,
|
Action: GenDoc,
|
||||||
Name: "gendoc",
|
Name: "gendoc",
|
||||||
@ -196,6 +217,36 @@ The gendoc generates example structures of the json-rpc communication types.
|
|||||||
`}
|
`}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// AppHelpFlagGroups is the application flags, grouped by functionality.
|
||||||
|
var AppHelpFlagGroups = []flags.FlagGroup{
|
||||||
|
{
|
||||||
|
Name: "FLAGS",
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
logLevelFlag,
|
||||||
|
keystoreFlag,
|
||||||
|
configdirFlag,
|
||||||
|
chainIdFlag,
|
||||||
|
utils.LightKDFFlag,
|
||||||
|
utils.NoUSBFlag,
|
||||||
|
utils.SmartCardDaemonPathFlag,
|
||||||
|
utils.HTTPListenAddrFlag,
|
||||||
|
utils.HTTPVirtualHostsFlag,
|
||||||
|
utils.IPCDisabledFlag,
|
||||||
|
utils.IPCPathFlag,
|
||||||
|
utils.HTTPEnabledFlag,
|
||||||
|
rpcPortFlag,
|
||||||
|
signerSecretFlag,
|
||||||
|
customDBFlag,
|
||||||
|
auditLogFlag,
|
||||||
|
ruleFlag,
|
||||||
|
stdiouiFlag,
|
||||||
|
testFlag,
|
||||||
|
advancedMode,
|
||||||
|
acceptFlag,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
app.Name = "Clef"
|
app.Name = "Clef"
|
||||||
app.Usage = "Manage Ethereum account operations"
|
app.Usage = "Manage Ethereum account operations"
|
||||||
@ -207,11 +258,11 @@ func init() {
|
|||||||
utils.LightKDFFlag,
|
utils.LightKDFFlag,
|
||||||
utils.NoUSBFlag,
|
utils.NoUSBFlag,
|
||||||
utils.SmartCardDaemonPathFlag,
|
utils.SmartCardDaemonPathFlag,
|
||||||
utils.RPCListenAddrFlag,
|
utils.HTTPListenAddrFlag,
|
||||||
utils.RPCVirtualHostsFlag,
|
utils.HTTPVirtualHostsFlag,
|
||||||
utils.IPCDisabledFlag,
|
utils.IPCDisabledFlag,
|
||||||
utils.IPCPathFlag,
|
utils.IPCPathFlag,
|
||||||
utils.RPCEnabledFlag,
|
utils.HTTPEnabledFlag,
|
||||||
rpcPortFlag,
|
rpcPortFlag,
|
||||||
signerSecretFlag,
|
signerSecretFlag,
|
||||||
customDBFlag,
|
customDBFlag,
|
||||||
@ -220,9 +271,50 @@ func init() {
|
|||||||
stdiouiFlag,
|
stdiouiFlag,
|
||||||
testFlag,
|
testFlag,
|
||||||
advancedMode,
|
advancedMode,
|
||||||
|
acceptFlag,
|
||||||
}
|
}
|
||||||
app.Action = signer
|
app.Action = signer
|
||||||
app.Commands = []cli.Command{initCommand, attestCommand, setCredentialCommand, delCredentialCommand, gendocCommand}
|
app.Commands = []cli.Command{initCommand,
|
||||||
|
attestCommand,
|
||||||
|
setCredentialCommand,
|
||||||
|
delCredentialCommand,
|
||||||
|
newAccountCommand,
|
||||||
|
gendocCommand}
|
||||||
|
cli.CommandHelpTemplate = flags.CommandHelpTemplate
|
||||||
|
// Override the default app help template
|
||||||
|
cli.AppHelpTemplate = flags.ClefAppHelpTemplate
|
||||||
|
|
||||||
|
// Override the default app help printer, but only for the global app help
|
||||||
|
originalHelpPrinter := cli.HelpPrinter
|
||||||
|
cli.HelpPrinter = func(w io.Writer, tmpl string, data interface{}) {
|
||||||
|
if tmpl == flags.ClefAppHelpTemplate {
|
||||||
|
// Render out custom usage screen
|
||||||
|
originalHelpPrinter(w, tmpl, flags.HelpData{App: data, FlagGroups: AppHelpFlagGroups})
|
||||||
|
} else if tmpl == flags.CommandHelpTemplate {
|
||||||
|
// Iterate over all command specific flags and categorize them
|
||||||
|
categorized := make(map[string][]cli.Flag)
|
||||||
|
for _, flag := range data.(cli.Command).Flags {
|
||||||
|
if _, ok := categorized[flag.String()]; !ok {
|
||||||
|
categorized[flags.FlagCategory(flag, AppHelpFlagGroups)] = append(categorized[flags.FlagCategory(flag, AppHelpFlagGroups)], flag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// sort to get a stable ordering
|
||||||
|
sorted := make([]flags.FlagGroup, 0, len(categorized))
|
||||||
|
for cat, flgs := range categorized {
|
||||||
|
sorted = append(sorted, flags.FlagGroup{Name: cat, Flags: flgs})
|
||||||
|
}
|
||||||
|
sort.Sort(flags.ByCategory(sorted))
|
||||||
|
|
||||||
|
// add sorted array to data and render with default printer
|
||||||
|
originalHelpPrinter(w, tmpl, map[string]interface{}{
|
||||||
|
"cmd": data,
|
||||||
|
"categorizedFlags": sorted,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
originalHelpPrinter(w, tmpl, data)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
@ -262,7 +354,7 @@ func initializeSecrets(c *cli.Context) error {
|
|||||||
text := "The master seed of clef will be locked with a password.\nPlease specify a password. Do not forget this password!"
|
text := "The master seed of clef will be locked with a password.\nPlease specify a password. Do not forget this password!"
|
||||||
var password string
|
var password string
|
||||||
for {
|
for {
|
||||||
password = getPassPhrase(text, true)
|
password = utils.GetPassPhrase(text, true)
|
||||||
if err := core.ValidatePasswordFormat(password); err != nil {
|
if err := core.ValidatePasswordFormat(password); err != nil {
|
||||||
fmt.Printf("invalid password: %v\n", err)
|
fmt.Printf("invalid password: %v\n", err)
|
||||||
} else {
|
} else {
|
||||||
@ -335,7 +427,7 @@ func setCredential(ctx *cli.Context) error {
|
|||||||
utils.Fatalf("Invalid address specified: %s", addr)
|
utils.Fatalf("Invalid address specified: %s", addr)
|
||||||
}
|
}
|
||||||
address := common.HexToAddress(addr)
|
address := common.HexToAddress(addr)
|
||||||
password := getPassPhrase("Please enter a password to store for this address:", true)
|
password := utils.GetPassPhrase("Please enter a password to store for this address:", true)
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
|
|
||||||
stretchedKey, err := readMasterKey(ctx, nil)
|
stretchedKey, err := readMasterKey(ctx, nil)
|
||||||
@ -381,14 +473,41 @@ func removeCredential(ctx *cli.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func newAccount(c *cli.Context) error {
|
||||||
|
if err := initialize(c); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// The newaccount is meant for users using the CLI, since 'real' external
|
||||||
|
// UIs can use the UI-api instead. So we'll just use the native CLI UI here.
|
||||||
|
var (
|
||||||
|
ui = core.NewCommandlineUI()
|
||||||
|
pwStorage storage.Storage = &storage.NoStorage{}
|
||||||
|
ksLoc = c.GlobalString(keystoreFlag.Name)
|
||||||
|
lightKdf = c.GlobalBool(utils.LightKDFFlag.Name)
|
||||||
|
)
|
||||||
|
log.Info("Starting clef", "keystore", ksLoc, "light-kdf", lightKdf)
|
||||||
|
am := core.StartClefAccountManager(ksLoc, true, lightKdf, "")
|
||||||
|
// This gives is us access to the external API
|
||||||
|
apiImpl := core.NewSignerAPI(am, 0, true, ui, nil, false, pwStorage)
|
||||||
|
// This gives us access to the internal API
|
||||||
|
internalApi := core.NewUIServerAPI(apiImpl)
|
||||||
|
addr, err := internalApi.New(context.Background())
|
||||||
|
if err == nil {
|
||||||
|
fmt.Printf("Generated account %v\n", addr.String())
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
func initialize(c *cli.Context) error {
|
func initialize(c *cli.Context) error {
|
||||||
// Set up the logger to print everything
|
// Set up the logger to print everything
|
||||||
logOutput := os.Stdout
|
logOutput := os.Stdout
|
||||||
if c.GlobalBool(stdiouiFlag.Name) {
|
if c.GlobalBool(stdiouiFlag.Name) {
|
||||||
logOutput = os.Stderr
|
logOutput = os.Stderr
|
||||||
// If using the stdioui, we can't do the 'confirm'-flow
|
// If using the stdioui, we can't do the 'confirm'-flow
|
||||||
fmt.Fprintf(logOutput, legalWarning)
|
if !c.GlobalBool(acceptFlag.Name) {
|
||||||
} else {
|
fmt.Fprint(logOutput, legalWarning)
|
||||||
|
}
|
||||||
|
} else if !c.GlobalBool(acceptFlag.Name) {
|
||||||
if !confirm(legalWarning) {
|
if !confirm(legalWarning) {
|
||||||
return fmt.Errorf("aborted by user")
|
return fmt.Errorf("aborted by user")
|
||||||
}
|
}
|
||||||
@ -456,7 +575,6 @@ func signer(c *cli.Context) error {
|
|||||||
api core.ExternalAPI
|
api core.ExternalAPI
|
||||||
pwStorage storage.Storage = &storage.NoStorage{}
|
pwStorage storage.Storage = &storage.NoStorage{}
|
||||||
)
|
)
|
||||||
|
|
||||||
configDir := c.GlobalString(configdirFlag.Name)
|
configDir := c.GlobalString(configdirFlag.Name)
|
||||||
if stretchedKey, err := readMasterKey(c, ui); err != nil {
|
if stretchedKey, err := readMasterKey(c, ui); err != nil {
|
||||||
log.Warn("Failed to open master, rules disabled", "err", err)
|
log.Warn("Failed to open master, rules disabled", "err", err)
|
||||||
@ -534,22 +652,33 @@ func signer(c *cli.Context) error {
|
|||||||
Service: api,
|
Service: api,
|
||||||
Version: "1.0"},
|
Version: "1.0"},
|
||||||
}
|
}
|
||||||
if c.GlobalBool(utils.RPCEnabledFlag.Name) {
|
if c.GlobalBool(utils.HTTPEnabledFlag.Name) {
|
||||||
vhosts := splitAndTrim(c.GlobalString(utils.RPCVirtualHostsFlag.Name))
|
vhosts := utils.SplitAndTrim(c.GlobalString(utils.HTTPVirtualHostsFlag.Name))
|
||||||
cors := splitAndTrim(c.GlobalString(utils.RPCCORSDomainFlag.Name))
|
cors := utils.SplitAndTrim(c.GlobalString(utils.HTTPCORSDomainFlag.Name))
|
||||||
|
|
||||||
|
srv := rpc.NewServer()
|
||||||
|
err := node.RegisterApis(rpcAPI, []string{"account"}, srv, false)
|
||||||
|
if err != nil {
|
||||||
|
utils.Fatalf("Could not register API: %w", err)
|
||||||
|
}
|
||||||
|
handler := node.NewHTTPHandlerStack(srv, cors, vhosts)
|
||||||
|
|
||||||
|
// set port
|
||||||
|
port := c.Int(rpcPortFlag.Name)
|
||||||
|
|
||||||
// start http server
|
// start http server
|
||||||
httpEndpoint := fmt.Sprintf("%s:%d", c.GlobalString(utils.RPCListenAddrFlag.Name), c.Int(rpcPortFlag.Name))
|
httpEndpoint := fmt.Sprintf("%s:%d", c.GlobalString(utils.HTTPListenAddrFlag.Name), port)
|
||||||
listener, _, err := rpc.StartHTTPEndpoint(httpEndpoint, rpcAPI, []string{"account"}, cors, vhosts, rpc.DefaultHTTPTimeouts)
|
httpServer, addr, err := node.StartHTTPEndpoint(httpEndpoint, rpc.DefaultHTTPTimeouts, handler)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Fatalf("Could not start RPC api: %v", err)
|
utils.Fatalf("Could not start RPC api: %v", err)
|
||||||
}
|
}
|
||||||
extapiURL = fmt.Sprintf("http://%s", httpEndpoint)
|
extapiURL = fmt.Sprintf("http://%v/", addr)
|
||||||
log.Info("HTTP endpoint opened", "url", extapiURL)
|
log.Info("HTTP endpoint opened", "url", extapiURL)
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
listener.Close()
|
// Don't bother imposing a timeout here.
|
||||||
log.Info("HTTP endpoint closed", "url", httpEndpoint)
|
httpServer.Shutdown(context.Background())
|
||||||
|
log.Info("HTTP endpoint closed", "url", extapiURL)
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
if !c.GlobalBool(utils.IPCDisabledFlag.Name) {
|
if !c.GlobalBool(utils.IPCDisabledFlag.Name) {
|
||||||
@ -579,7 +708,7 @@ func signer(c *cli.Context) error {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
abortChan := make(chan os.Signal)
|
abortChan := make(chan os.Signal, 1)
|
||||||
signal.Notify(abortChan, os.Interrupt)
|
signal.Notify(abortChan, os.Interrupt)
|
||||||
|
|
||||||
sig := <-abortChan
|
sig := <-abortChan
|
||||||
@ -588,21 +717,11 @@ func signer(c *cli.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// splitAndTrim splits input separated by a comma
|
|
||||||
// and trims excessive white space from the substrings.
|
|
||||||
func splitAndTrim(input string) []string {
|
|
||||||
result := strings.Split(input, ",")
|
|
||||||
for i, r := range result {
|
|
||||||
result[i] = strings.TrimSpace(r)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// DefaultConfigDir is the default config directory to use for the vaults and other
|
// DefaultConfigDir is the default config directory to use for the vaults and other
|
||||||
// persistence requirements.
|
// persistence requirements.
|
||||||
func DefaultConfigDir() string {
|
func DefaultConfigDir() string {
|
||||||
// Try to place the data folder in the user's home dir
|
// Try to place the data folder in the user's home dir
|
||||||
home := homeDir()
|
home := utils.HomeDir()
|
||||||
if home != "" {
|
if home != "" {
|
||||||
if runtime.GOOS == "darwin" {
|
if runtime.GOOS == "darwin" {
|
||||||
return filepath.Join(home, "Library", "Signer")
|
return filepath.Join(home, "Library", "Signer")
|
||||||
@ -610,26 +729,15 @@ func DefaultConfigDir() string {
|
|||||||
appdata := os.Getenv("APPDATA")
|
appdata := os.Getenv("APPDATA")
|
||||||
if appdata != "" {
|
if appdata != "" {
|
||||||
return filepath.Join(appdata, "Signer")
|
return filepath.Join(appdata, "Signer")
|
||||||
} else {
|
|
||||||
return filepath.Join(home, "AppData", "Roaming", "Signer")
|
|
||||||
}
|
}
|
||||||
} else {
|
return filepath.Join(home, "AppData", "Roaming", "Signer")
|
||||||
return filepath.Join(home, ".clef")
|
|
||||||
}
|
}
|
||||||
|
return filepath.Join(home, ".clef")
|
||||||
}
|
}
|
||||||
// As we cannot guess a stable location, return empty and handle later
|
// As we cannot guess a stable location, return empty and handle later
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func homeDir() string {
|
|
||||||
if home := os.Getenv("HOME"); home != "" {
|
|
||||||
return home
|
|
||||||
}
|
|
||||||
if usr, err := user.Current(); err == nil {
|
|
||||||
return usr.HomeDir
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
func readMasterKey(ctx *cli.Context, ui core.UIClientAPI) ([]byte, error) {
|
func readMasterKey(ctx *cli.Context, ui core.UIClientAPI) ([]byte, error) {
|
||||||
var (
|
var (
|
||||||
file string
|
file string
|
||||||
@ -659,7 +767,7 @@ func readMasterKey(ctx *cli.Context, ui core.UIClientAPI) ([]byte, error) {
|
|||||||
}
|
}
|
||||||
password = resp.Text
|
password = resp.Text
|
||||||
} else {
|
} else {
|
||||||
password = getPassPhrase("Decrypt master seed of clef", false)
|
password = utils.GetPassPhrase("Decrypt master seed of clef", false)
|
||||||
}
|
}
|
||||||
masterSeed, err := decryptSeed(cipherKey, password)
|
masterSeed, err := decryptSeed(cipherKey, password)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -679,14 +787,16 @@ func readMasterKey(ctx *cli.Context, ui core.UIClientAPI) ([]byte, error) {
|
|||||||
|
|
||||||
// checkFile is a convenience function to check if a file
|
// checkFile is a convenience function to check if a file
|
||||||
// * exists
|
// * exists
|
||||||
// * is mode 0400
|
// * is mode 0400 (unix only)
|
||||||
func checkFile(filename string) error {
|
func checkFile(filename string) error {
|
||||||
info, err := os.Stat(filename)
|
info, err := os.Stat(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed stat on %s: %v", filename, err)
|
return fmt.Errorf("failed stat on %s: %v", filename, err)
|
||||||
}
|
}
|
||||||
// Check the unix permission bits
|
// Check the unix permission bits
|
||||||
if info.Mode().Perm()&0377 != 0 {
|
// However, on windows, we cannot use the unix perm-bits, see
|
||||||
|
// https://github.com/ethereum/go-ethereum/issues/20123
|
||||||
|
if runtime.GOOS != "windows" && info.Mode().Perm()&0377 != 0 {
|
||||||
return fmt.Errorf("file (%v) has insecure file permissions (%v)", filename, info.Mode().String())
|
return fmt.Errorf("file (%v) has insecure file permissions (%v)", filename, info.Mode().String())
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
@ -694,7 +804,7 @@ func checkFile(filename string) error {
|
|||||||
|
|
||||||
// confirm displays a text and asks for user confirmation
|
// confirm displays a text and asks for user confirmation
|
||||||
func confirm(text string) bool {
|
func confirm(text string) bool {
|
||||||
fmt.Printf(text)
|
fmt.Print(text)
|
||||||
fmt.Printf("\nEnter 'ok' to proceed:\n> ")
|
fmt.Printf("\nEnter 'ok' to proceed:\n> ")
|
||||||
|
|
||||||
text, err := bufio.NewReader(os.Stdin).ReadString('\n')
|
text, err := bufio.NewReader(os.Stdin).ReadString('\n')
|
||||||
@ -760,21 +870,19 @@ func testExternalUI(api *core.SignerAPI) {
|
|||||||
api.UI.ShowInfo("Please approve the next request for signing a clique header")
|
api.UI.ShowInfo("Please approve the next request for signing a clique header")
|
||||||
time.Sleep(delay)
|
time.Sleep(delay)
|
||||||
cliqueHeader := types.Header{
|
cliqueHeader := types.Header{
|
||||||
common.HexToHash("0000H45H"),
|
ParentHash: common.HexToHash("0000H45H"),
|
||||||
common.HexToHash("0000H45H"),
|
UncleHash: common.HexToHash("0000H45H"),
|
||||||
common.HexToAddress("0000H45H"),
|
Coinbase: common.HexToAddress("0000H45H"),
|
||||||
common.HexToHash("0000H00H"),
|
Root: common.HexToHash("0000H00H"),
|
||||||
common.HexToHash("0000H45H"),
|
TxHash: common.HexToHash("0000H45H"),
|
||||||
common.HexToHash("0000H45H"),
|
ReceiptHash: common.HexToHash("0000H45H"),
|
||||||
types.Bloom{},
|
Difficulty: big.NewInt(1337),
|
||||||
big.NewInt(1337),
|
Number: big.NewInt(1337),
|
||||||
big.NewInt(1337),
|
GasLimit: 1338,
|
||||||
1338,
|
GasUsed: 1338,
|
||||||
1338,
|
Time: 1338,
|
||||||
1338,
|
Extra: []byte("Extra data Extra data Extra data Extra data Extra data Extra data Extra data Extra data"),
|
||||||
[]byte("Extra data Extra data Extra data Extra data Extra data Extra data Extra data Extra data"),
|
MixDigest: common.HexToHash("0x0000H45H"),
|
||||||
common.HexToHash("0x0000H45H"),
|
|
||||||
types.BlockNonce{},
|
|
||||||
}
|
}
|
||||||
cliqueRlp, err := rlp.EncodeToBytes(cliqueHeader)
|
cliqueRlp, err := rlp.EncodeToBytes(cliqueHeader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -815,13 +923,13 @@ func testExternalUI(api *core.SignerAPI) {
|
|||||||
time.Sleep(delay)
|
time.Sleep(delay)
|
||||||
data := hexutil.Bytes([]byte{})
|
data := hexutil.Bytes([]byte{})
|
||||||
to := common.NewMixedcaseAddress(a)
|
to := common.NewMixedcaseAddress(a)
|
||||||
tx := core.SendTxArgs{
|
tx := apitypes.SendTxArgs{
|
||||||
Data: &data,
|
Data: &data,
|
||||||
Nonce: 0x1,
|
Nonce: 0x1,
|
||||||
Value: hexutil.Big(*big.NewInt(6)),
|
Value: hexutil.Big(*big.NewInt(6)),
|
||||||
From: common.NewMixedcaseAddress(a),
|
From: common.NewMixedcaseAddress(a),
|
||||||
To: &to,
|
To: &to,
|
||||||
GasPrice: hexutil.Big(*big.NewInt(5)),
|
GasPrice: (*hexutil.Big)(big.NewInt(5)),
|
||||||
Gas: 1000,
|
Gas: 1000,
|
||||||
Input: nil,
|
Input: nil,
|
||||||
}
|
}
|
||||||
@ -856,27 +964,6 @@ func testExternalUI(api *core.SignerAPI) {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// getPassPhrase retrieves the password associated with clef, either fetched
|
|
||||||
// from a list of preloaded passphrases, or requested interactively from the user.
|
|
||||||
// TODO: there are many `getPassPhrase` functions, it will be better to abstract them into one.
|
|
||||||
func getPassPhrase(prompt string, confirmation bool) string {
|
|
||||||
fmt.Println(prompt)
|
|
||||||
password, err := console.Stdin.PromptPassword("Password: ")
|
|
||||||
if err != nil {
|
|
||||||
utils.Fatalf("Failed to read password: %v", err)
|
|
||||||
}
|
|
||||||
if confirmation {
|
|
||||||
confirm, err := console.Stdin.PromptPassword("Repeat password: ")
|
|
||||||
if err != nil {
|
|
||||||
utils.Fatalf("Failed to read password confirmation: %v", err)
|
|
||||||
}
|
|
||||||
if password != confirm {
|
|
||||||
utils.Fatalf("Passwords do not match")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return password
|
|
||||||
}
|
|
||||||
|
|
||||||
type encryptedSeedStorage struct {
|
type encryptedSeedStorage struct {
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
Version int `json:"version"`
|
Version int `json:"version"`
|
||||||
@ -927,7 +1014,7 @@ func GenDoc(ctx *cli.Context) {
|
|||||||
if data, err := json.MarshalIndent(v, "", " "); err == nil {
|
if data, err := json.MarshalIndent(v, "", " "); err == nil {
|
||||||
output = append(output, fmt.Sprintf("### %s\n\n%s\n\nExample:\n```json\n%s\n```", name, desc, data))
|
output = append(output, fmt.Sprintf("### %s\n\n%s\n\nExample:\n```json\n%s\n```", name, desc, data))
|
||||||
} else {
|
} else {
|
||||||
log.Error("Error generating output", err)
|
log.Error("Error generating output", "err", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -938,7 +1025,7 @@ func GenDoc(ctx *cli.Context) {
|
|||||||
"of the work in canonicalizing and making sense of the data, and it's up to the UI to present" +
|
"of the work in canonicalizing and making sense of the data, and it's up to the UI to present" +
|
||||||
"the user with the contents of the `message`"
|
"the user with the contents of the `message`"
|
||||||
sighash, msg := accounts.TextAndHash([]byte("hello world"))
|
sighash, msg := accounts.TextAndHash([]byte("hello world"))
|
||||||
messages := []*core.NameValueType{{"message", msg, accounts.MimetypeTextPlain}}
|
messages := []*core.NameValueType{{Name: "message", Value: msg, Typ: accounts.MimetypeTextPlain}}
|
||||||
|
|
||||||
add("SignDataRequest", desc, &core.SignDataRequest{
|
add("SignDataRequest", desc, &core.SignDataRequest{
|
||||||
Address: common.NewMixedcaseAddress(a),
|
Address: common.NewMixedcaseAddress(a),
|
||||||
@ -968,17 +1055,17 @@ func GenDoc(ctx *cli.Context) {
|
|||||||
data := hexutil.Bytes([]byte{0x01, 0x02, 0x03, 0x04})
|
data := hexutil.Bytes([]byte{0x01, 0x02, 0x03, 0x04})
|
||||||
add("SignTxRequest", desc, &core.SignTxRequest{
|
add("SignTxRequest", desc, &core.SignTxRequest{
|
||||||
Meta: meta,
|
Meta: meta,
|
||||||
Callinfo: []core.ValidationInfo{
|
Callinfo: []apitypes.ValidationInfo{
|
||||||
{"Warning", "Something looks odd, show this message as a warning"},
|
{Typ: "Warning", Message: "Something looks odd, show this message as a warning"},
|
||||||
{"Info", "User should see this aswell"},
|
{Typ: "Info", Message: "User should see this as well"},
|
||||||
},
|
},
|
||||||
Transaction: core.SendTxArgs{
|
Transaction: apitypes.SendTxArgs{
|
||||||
Data: &data,
|
Data: &data,
|
||||||
Nonce: 0x1,
|
Nonce: 0x1,
|
||||||
Value: hexutil.Big(*big.NewInt(6)),
|
Value: hexutil.Big(*big.NewInt(6)),
|
||||||
From: common.NewMixedcaseAddress(a),
|
From: common.NewMixedcaseAddress(a),
|
||||||
To: nil,
|
To: nil,
|
||||||
GasPrice: hexutil.Big(*big.NewInt(5)),
|
GasPrice: (*hexutil.Big)(big.NewInt(5)),
|
||||||
Gas: 1000,
|
Gas: 1000,
|
||||||
Input: nil,
|
Input: nil,
|
||||||
}})
|
}})
|
||||||
@ -988,13 +1075,13 @@ func GenDoc(ctx *cli.Context) {
|
|||||||
add("SignTxResponse - approve", "Response to request to sign a transaction. This response needs to contain the `transaction`"+
|
add("SignTxResponse - approve", "Response to request to sign a transaction. This response needs to contain the `transaction`"+
|
||||||
", because the UI is free to make modifications to the transaction.",
|
", because the UI is free to make modifications to the transaction.",
|
||||||
&core.SignTxResponse{Approved: true,
|
&core.SignTxResponse{Approved: true,
|
||||||
Transaction: core.SendTxArgs{
|
Transaction: apitypes.SendTxArgs{
|
||||||
Data: &data,
|
Data: &data,
|
||||||
Nonce: 0x4,
|
Nonce: 0x4,
|
||||||
Value: hexutil.Big(*big.NewInt(6)),
|
Value: hexutil.Big(*big.NewInt(6)),
|
||||||
From: common.NewMixedcaseAddress(a),
|
From: common.NewMixedcaseAddress(a),
|
||||||
To: nil,
|
To: nil,
|
||||||
GasPrice: hexutil.Big(*big.NewInt(5)),
|
GasPrice: (*hexutil.Big)(big.NewInt(5)),
|
||||||
Gas: 1000,
|
Gas: 1000,
|
||||||
Input: nil,
|
Input: nil,
|
||||||
}})
|
}})
|
||||||
@ -1019,7 +1106,7 @@ func GenDoc(ctx *cli.Context) {
|
|||||||
|
|
||||||
rlpdata := common.FromHex("0xf85d640101948a8eafb1cf62bfbeb1741769dae1a9dd47996192018026a0716bd90515acb1e68e5ac5867aa11a1e65399c3349d479f5fb698554ebc6f293a04e8a4ebfff434e971e0ef12c5bf3a881b06fd04fc3f8b8a7291fb67a26a1d4ed")
|
rlpdata := common.FromHex("0xf85d640101948a8eafb1cf62bfbeb1741769dae1a9dd47996192018026a0716bd90515acb1e68e5ac5867aa11a1e65399c3349d479f5fb698554ebc6f293a04e8a4ebfff434e971e0ef12c5bf3a881b06fd04fc3f8b8a7291fb67a26a1d4ed")
|
||||||
var tx types.Transaction
|
var tx types.Transaction
|
||||||
rlp.DecodeBytes(rlpdata, &tx)
|
tx.UnmarshalBinary(rlpdata)
|
||||||
add("OnApproved - SignTransactionResult", desc, ðapi.SignTransactionResult{Raw: rlpdata, Tx: &tx})
|
add("OnApproved - SignTransactionResult", desc, ðapi.SignTransactionResult{Raw: rlpdata, Tx: &tx})
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -1036,16 +1123,21 @@ func GenDoc(ctx *cli.Context) {
|
|||||||
&core.ListRequest{
|
&core.ListRequest{
|
||||||
Meta: meta,
|
Meta: meta,
|
||||||
Accounts: []accounts.Account{
|
Accounts: []accounts.Account{
|
||||||
{a, accounts.URL{Scheme: "keystore", Path: "/path/to/keyfile/a"}},
|
{Address: a, URL: accounts.URL{Scheme: "keystore", Path: "/path/to/keyfile/a"}},
|
||||||
{b, accounts.URL{Scheme: "keystore", Path: "/path/to/keyfile/b"}}},
|
{Address: b, URL: accounts.URL{Scheme: "keystore", Path: "/path/to/keyfile/b"}}},
|
||||||
})
|
})
|
||||||
|
|
||||||
add("ListResponse", "Response to list request. The response contains a list of all addresses to show to the caller. "+
|
add("ListResponse", "Response to list request. The response contains a list of all addresses to show to the caller. "+
|
||||||
"Note: the UI is free to respond with any address the caller, regardless of whether it exists or not",
|
"Note: the UI is free to respond with any address the caller, regardless of whether it exists or not",
|
||||||
&core.ListResponse{
|
&core.ListResponse{
|
||||||
Accounts: []accounts.Account{
|
Accounts: []accounts.Account{
|
||||||
{common.HexToAddress("0xcowbeef000000cowbeef00000000000000000c0w"), accounts.URL{Path: ".. ignored .."}},
|
{
|
||||||
{common.HexToAddress("0xffffffffffffffffffffffffffffffffffffffff"), accounts.URL{}},
|
Address: common.HexToAddress("0xcowbeef000000cowbeef00000000000000000c0w"),
|
||||||
|
URL: accounts.URL{Path: ".. ignored .."},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Address: common.HexToAddress("0xffffffffffffffffffffffffffffffffffffffff"),
|
||||||
|
},
|
||||||
}})
|
}})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
16
cmd/clef/testdata/sign_1559_missing_field_exp_fail.json
vendored
Normal file
16
cmd/clef/testdata/sign_1559_missing_field_exp_fail.json
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": "account_signTransaction",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"from": "0x8A8eAFb1cf62BfBeb1741769DAE1a9dd47996192",
|
||||||
|
"to": "0x8A8eAFb1cf62BfBeb1741769DAE1a9dd47996192",
|
||||||
|
"gas": "0x333",
|
||||||
|
"maxFeePerGas": "0x123",
|
||||||
|
"nonce": "0x0",
|
||||||
|
"value": "0x10",
|
||||||
|
"data": "0x4401a6e40000000000000000000000000000000000000000000000000000000000000012"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": 67
|
||||||
|
}
|
16
cmd/clef/testdata/sign_1559_missing_maxfeepergas_exp_fail.json
vendored
Normal file
16
cmd/clef/testdata/sign_1559_missing_maxfeepergas_exp_fail.json
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": "account_signTransaction",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"from": "0x8A8eAFb1cf62BfBeb1741769DAE1a9dd47996192",
|
||||||
|
"to": "0x8A8eAFb1cf62BfBeb1741769DAE1a9dd47996192",
|
||||||
|
"gas": "0x333",
|
||||||
|
"maxPriorityFeePerGas": "0x123",
|
||||||
|
"nonce": "0x0",
|
||||||
|
"value": "0x10",
|
||||||
|
"data": "0x4401a6e40000000000000000000000000000000000000000000000000000000000000012"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": 67
|
||||||
|
}
|
17
cmd/clef/testdata/sign_1559_tx.json
vendored
Normal file
17
cmd/clef/testdata/sign_1559_tx.json
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": "account_signTransaction",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"from": "0x8A8eAFb1cf62BfBeb1741769DAE1a9dd47996192",
|
||||||
|
"to": "0x8A8eAFb1cf62BfBeb1741769DAE1a9dd47996192",
|
||||||
|
"gas": "0x333",
|
||||||
|
"maxPriorityFeePerGas": "0x123",
|
||||||
|
"maxFeePerGas": "0x123",
|
||||||
|
"nonce": "0x0",
|
||||||
|
"value": "0x10",
|
||||||
|
"data": "0x4401a6e40000000000000000000000000000000000000000000000000000000000000012"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": 67
|
||||||
|
}
|
17
cmd/clef/testdata/sign_bad_checksum_exp_fail.json
vendored
Normal file
17
cmd/clef/testdata/sign_bad_checksum_exp_fail.json
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"method": "account_signTransaction",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"from":"0x8a8eafb1cf62bfbeb1741769dae1a9dd47996192",
|
||||||
|
"to":"0x8a8eafb1cf62bfbeb1741769dae1a9dd47996192",
|
||||||
|
"gas": "0x333",
|
||||||
|
"gasPrice": "0x123",
|
||||||
|
"nonce": "0x0",
|
||||||
|
"value": "0x10",
|
||||||
|
"data":
|
||||||
|
"0x4401a6e40000000000000000000000000000000000000000000000000000000000000012"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": 67
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user