Compare commits
568 Commits
Author | SHA1 | Date | |
---|---|---|---|
5f55d95aea | |||
c2eea6306e | |||
1d6b65cd84 | |||
1b2941cd56 | |||
b8c0883770 | |||
14bad7e212 | |||
8c20fe17bd | |||
a0cc73a27a | |||
682c4531af | |||
5c3051e6fa | |||
3dd46bc884 | |||
e44d50fb52 | |||
5d9ea439b3 | |||
d0668838b9 | |||
da776556d0 | |||
f2e8759d10 | |||
98095efe88 | |||
b7e3dfc5a2 | |||
3e1dbc3ca7 | |||
adb065a328 | |||
c793cb3385 | |||
3eef19598e | |||
f4aebd4c8d | |||
98be7cd833 | |||
eaf706b73c | |||
b170a80cdc | |||
aefffc9ed8 | |||
f31a3a251a | |||
a9c94cbf48 | |||
667a386d87 | |||
d2089e46f8 | |||
be29e41334 | |||
47965930a1 | |||
bc6c4a337c | |||
f7fdfa4eac | |||
0405f728c6 | |||
63c5a46b82 | |||
c89fa789b7 | |||
39f1d909d1 | |||
71b577f839 | |||
a93d63d576 | |||
7fb72dbcbf | |||
688fbab5d5 | |||
0f036f6209 | |||
71a89b7c75 | |||
ecb8e23e88 | |||
058c5fe960 | |||
a29bdf547c | |||
44b912ec64 | |||
3d69970c15 | |||
8b90a49f3d | |||
c046126c87 | |||
cd134178f7 | |||
4918c820c6 | |||
5904d58a96 | |||
7c90a2e42e | |||
c39de61a0a | |||
af53767e16 | |||
7632acf6b4 | |||
9ccb70da7b | |||
8fefee7132 | |||
7a4073a758 | |||
ab522d3bc7 | |||
c45c424073 | |||
8d2775e3d7 | |||
170036289b | |||
8ebbd9b7c7 | |||
7df36e5ec1 | |||
5fb29fd45f | |||
90beb6112e | |||
efa2b3da7e | |||
e3b3c298df | |||
3752507a11 | |||
a269a713d6 | |||
27df30f30f | |||
311f5a0ed1 | |||
68ae6b52e9 | |||
1776c717bf | |||
0f6e3e873a | |||
7a7a5acc9f | |||
66d74dfb75 | |||
b950a2977c | |||
8ea3c88e44 | |||
94ad694a26 | |||
4c6953606e | |||
fc0638f9d8 | |||
4b11f207cb | |||
7e5c49cafa | |||
efcfa2209b | |||
aa18aad7ad | |||
594328c112 | |||
2e6b9c141b | |||
b38cea6654 | |||
dd083aa34e | |||
f213a9d8e8 | |||
6826b2040f | |||
2a3657d8d9 | |||
566af6ef92 | |||
290e851f57 | |||
8f96d66241 | |||
5782164a35 | |||
27f657478f | |||
756b62988c | |||
f61e203c10 | |||
56ed6152a1 | |||
dc7f202ecd | |||
1d42061e2c | |||
b6135a72dd | |||
7d59c5c58d | |||
8aa4597c9e | |||
57ba1824ac | |||
c89f4352d0 | |||
6a00a3ade1 | |||
f821b0188a | |||
d79f2f2656 | |||
130bccc763 | |||
ae9ed5c420 | |||
a1c201a5ac | |||
844e911129 | |||
4b9de75623 | |||
2d7d7ef2fe | |||
14d5033c9d | |||
d52a693f80 | |||
258cc73ea9 | |||
79b7b5eeaa | |||
b4fbcd5060 | |||
488528e9e4 | |||
8110671960 | |||
32bb280179 | |||
4536b993ff | |||
586eddfd09 | |||
d46da273c6 | |||
ecd7199c43 | |||
1c20313a6a | |||
cfa999f006 | |||
c74a575725 | |||
9672a62a38 | |||
572da73d4d | |||
0f722df2d9 | |||
1b77d5090d | |||
e62c2aeb1b | |||
4880868c88 | |||
c3d5250473 | |||
e0dc45fce2 | |||
48cc36ce83 | |||
123aa659e4 | |||
cdcbb2f160 | |||
db62979514 | |||
5137c04ccf | |||
a20d3fc362 | |||
3d6d828caf | |||
6a543607ef | |||
c1a4dcfc87 | |||
70b8b54cd2 | |||
c88c89fd9e | |||
b06f44ecc2 | |||
87ae0df476 | |||
5127ec10cb | |||
18580e152c | |||
8241fa5227 | |||
a6ca8fd268 | |||
27116bd46c | |||
a40e61b4ac | |||
16d10aae0c | |||
e728aaca72 | |||
e581f2690a | |||
6197fbf8d7 | |||
5c17b2f521 | |||
3a5bdef962 | |||
bf5ae502ef | |||
6fdd0893c3 | |||
68c755a238 | |||
ebf3cf8f7d | |||
24cdac41f3 | |||
fdc5a7dc1a | |||
d04a2e7557 | |||
728ad6f47d | |||
499d63f706 | |||
5f917715c5 | |||
529897ea2b | |||
53016c1225 | |||
bbc77f488e | |||
e50e3bea49 | |||
bea56d84e5 | |||
bcd8aeefdd | |||
05e257c22c | |||
f08680985a | |||
5542b51b50 | |||
b34b130fb5 | |||
fd36448d6a | |||
9d81f4fdd1 | |||
4e85be0717 | |||
f460b0217f | |||
a1f1c404c3 | |||
1e9b504ee7 | |||
33e4f51749 | |||
6498df7b02 | |||
46df50be18 | |||
91aaddaeb3 | |||
ea005a0295 | |||
aca9d6a1fb | |||
549f1add29 | |||
6f1ca0bc91 | |||
a9f26dcd0d | |||
ef63e9af55 | |||
ee1682ffe6 | |||
6cb08d8328 | |||
46e8940b19 | |||
2dc20963e7 | |||
4e6d8b348d | |||
85e6c40c00 | |||
dff9b4246f | |||
aceaaa5178 | |||
9027981280 | |||
83877a0f9d | |||
8627680e24 | |||
aa9fff3e68 | |||
934f587bd5 | |||
e456451a89 | |||
bf1f620343 | |||
701ac1ce6c | |||
3c5329599c | |||
8abf06d378 | |||
a7161caedf | |||
4b2492b614 | |||
4081868452 | |||
18490d833c | |||
fc4fffd5ac | |||
7e02105672 | |||
ed92f116f7 | |||
6a185531d2 | |||
8d8e2248b2 | |||
a306e17a26 | |||
968d8ffe94 | |||
022cbd6800 | |||
467bb7a719 | |||
e9254bb0f9 | |||
3a2da31c3e | |||
52dc7cb452 | |||
f6f7e7a870 | |||
c90fc3503d | |||
659c0cb9e8 | |||
96c7c39ae4 | |||
cf842b3fe5 | |||
78b70d79ec | |||
f7328c5ecb | |||
fb578f4550 | |||
8b2aca96c5 | |||
bbeaab7e64 | |||
be44651523 | |||
c58079461b | |||
d63e29241d | |||
4097478884 | |||
1f3596c25a | |||
9055c16efa | |||
f0cbebb19f | |||
916fe11241 | |||
10d3466c93 | |||
db739b506a | |||
4326061e35 | |||
f115f077d7 | |||
6c670eff01 | |||
4ab593c5a1 | |||
c2bbff6116 | |||
787d688c2f | |||
b813e4d411 | |||
798e4fb4ed | |||
9b4a45f6e8 | |||
cfb3a8ea8f | |||
73308dbe0e | |||
86cfc22c79 | |||
72826bb5ad | |||
75c86f8646 | |||
0cfa21fc7f | |||
5f92606be2 | |||
3601320ccd | |||
14013372ae | |||
9866f19d6a | |||
6f30034413 | |||
77d21e472d | |||
a7bae3b2a6 | |||
342ae7ce7d | |||
b3b110bc95 | |||
138e7af96c | |||
8b6ae6bf86 | |||
270ea6eec3 | |||
0228fb57cd | |||
2855a93ede | |||
b5cee9738b | |||
434e4b31d8 | |||
06fe6310a3 | |||
066d301520 | |||
bbbe2360d0 | |||
b7bb2d8589 | |||
c7727191ae | |||
fe45210c55 | |||
e189fb839c | |||
42c049e519 | |||
a0e2e22a4f | |||
b8f4a48ada | |||
64c8e2f2ca | |||
c1343c8872 | |||
cafdd5931e | |||
8a3ce5450a | |||
08759b0aaf | |||
558d18d2b0 | |||
c3a4874e5e | |||
9ff07304a3 | |||
daad2b2559 | |||
850f41b374 | |||
8b58cd0190 | |||
73d21ea6af | |||
bff9ceb6b8 | |||
6d3cd03a03 | |||
e3f2b541f2 | |||
ea19e61fba | |||
edd7aa054c | |||
e90958cd29 | |||
ba3fb9e6f4 | |||
05c86c2c9f | |||
31b4ff8cc1 | |||
119b724362 | |||
0fd251c7f7 | |||
848e50d6fb | |||
bc63a3d282 | |||
de1831b6e9 | |||
627c2311fb | |||
d45f01d5f7 | |||
ae4982a365 | |||
41f35d7913 | |||
5adc461d58 | |||
789b9a9f16 | |||
c0bf321ec8 | |||
e8e6df5159 | |||
8255afbc75 | |||
f30b809f00 | |||
7d598af493 | |||
0d89e6c2f1 | |||
7cbcae3fac | |||
8ce04ae8e7 | |||
ab92678fb3 | |||
b1c1d09f83 | |||
64ee5763ee | |||
b534106cc8 | |||
cec92f5940 | |||
4e7abcff30 | |||
f954a8b666 | |||
85865a51b6 | |||
26e72b2ccd | |||
a6903ad6a5 | |||
45d08a8aa7 | |||
0969b35eec | |||
cda91ee180 | |||
66b148dd8f | |||
483feb0d3f | |||
0a5ee08e2b | |||
1415669ac3 | |||
1e62cd6c79 | |||
ac954f48bd | |||
b1908f6a16 | |||
f1ddb1a7ad | |||
7841f0cc09 | |||
93c0012000 | |||
b57a3f154f | |||
1f281dcaab | |||
3dca9cc550 | |||
139f6a0f4c | |||
8f65444bf3 | |||
53ce0a76d8 | |||
e6af65d02a | |||
fbf3b2ede2 | |||
3d971c5a34 | |||
0c6665558a | |||
436fc8d76a | |||
c20d6e5e4e | |||
5387ad760f | |||
e4b138a593 | |||
0a1da69fac | |||
c616391df2 | |||
f8d98f7fcd | |||
c305005d83 | |||
6777531a2d | |||
17649edd85 | |||
7d8155714b | |||
ee1debda53 | |||
bb07ce3eed | |||
216729009b | |||
5b283663b4 | |||
b6d88a0e9f | |||
4f4d2b6474 | |||
371871d685 | |||
aa36a6ae4f | |||
b8d59d9c98 | |||
151c7bef41 | |||
5a057a8ded | |||
7e29b0b5b4 | |||
0ff2adb21b | |||
6ba7bbbe29 | |||
ae5bc89cad | |||
3ab1fb0215 | |||
4f28c5b69d | |||
6fe917ecb8 | |||
cb85923828 | |||
770b29fd80 | |||
987c1a595a | |||
fdb936ee95 | |||
2abf1a36b9 | |||
4063d30b5e | |||
2680e23b15 | |||
d6c6bcc9f3 | |||
b019f3ee29 | |||
b05e472c07 | |||
6cace73bea | |||
ecc876cec0 | |||
537774e049 | |||
1cc4bd76db | |||
725f2a4cf7 | |||
59cd60b266 | |||
be79b4aacb | |||
dda5af0e6c | |||
df75dbfd68 | |||
900e124bee | |||
8b5b635db7 | |||
f1ec226d80 | |||
856b9e9c50 | |||
5fb68f4b39 | |||
707ac67bab | |||
68dda34905 | |||
7486904b92 | |||
7d2d141b34 | |||
cf47ef12d3 | |||
a13bc9d7a1 | |||
ba7c125153 | |||
2128289631 | |||
6b939fbeaa | |||
188ab928c3 | |||
3274db19c7 | |||
a50bccc642 | |||
fbbedb4c03 | |||
5da7ec7c18 | |||
a8fd0de0d3 | |||
2e2f093ec2 | |||
da645f007b | |||
72c255caef | |||
db056eeafe | |||
9aa7158057 | |||
1894c9239f | |||
bddf8f76c8 | |||
15780ead07 | |||
6005dcef5b | |||
d3642b0715 | |||
f85212aa86 | |||
d951ff300e | |||
0781fd7aa0 | |||
aed25640a4 | |||
f9b2fd79e8 | |||
528dcc3814 | |||
ae1a137ce7 | |||
3750d835a1 | |||
e287b56b69 | |||
a411fe7e6e | |||
5728dd381d | |||
63e76482ac | |||
19b2640e89 | |||
f2ab351e8d | |||
2871781f64 | |||
6e6931ef73 | |||
a2dc074b1d | |||
896c134d30 | |||
e4d794851b | |||
0edcbc797f | |||
a15b02320e | |||
78f1964095 | |||
7b11e94441 | |||
f3d4ce0d16 | |||
886478b18b | |||
e7f6798b59 | |||
cdd34fcb16 | |||
6e7620afe0 | |||
5945a33350 | |||
2d5d6d9d0e | |||
752c75fb21 | |||
1b8566a7b1 | |||
e5b480b638 | |||
0ab8a175d8 | |||
e2fdd33541 | |||
32226f1b0c | |||
a7fc4fa9b7 | |||
649787a9bf | |||
9e011ff1cd | |||
36137623ed | |||
1f39746886 | |||
dbbcf558e2 | |||
4811409e99 | |||
3c9a2c752f | |||
fd69d2b7a8 | |||
e6fb69296e | |||
1b89bd5d26 | |||
9be5d5cd90 | |||
2baf1de00d | |||
b3f1f4c673 | |||
6c41e675ec | |||
04c6369a09 | |||
d1f507b7f1 | |||
ef422ee1e1 | |||
82a024d425 | |||
24f856ad6b | |||
e640861704 | |||
e51eeb8104 | |||
f3aac71fad | |||
975c591261 | |||
b9aedeab0b | |||
9c636d2490 | |||
d8370a4e15 | |||
fa187a366d | |||
eae81465c1 | |||
03f090b30b | |||
787d71d659 | |||
b2ffb76ca2 | |||
0e4deeb6e7 | |||
38ff815485 | |||
659d6b9b7c | |||
032453b3e7 | |||
2a3322ea14 | |||
1cecda8333 | |||
4dee2000a3 | |||
fece1fa9be | |||
02356b36d6 | |||
6d3a924283 | |||
9acec62cf8 | |||
abb53644c6 | |||
3eebeae363 | |||
98d68cfa91 | |||
888e7bc765 | |||
8db9d44ca9 | |||
2d4c228933 | |||
96d86740a1 | |||
23031b1554 | |||
2e43414c79 | |||
9901a40f04 | |||
d648e96b3d | |||
391ca61678 | |||
f801ec78ce | |||
61ca14bc44 | |||
52904ae32f | |||
f4a6470a7b | |||
c8ad64f33c | |||
27a50c8f4b | |||
1ead43c88c | |||
66d47ced48 | |||
9af4065243 | |||
a0d203a874 | |||
18ea468cf8 | |||
7dde2b902c | |||
3e1000fda3 | |||
1e806c4c77 | |||
8a44451edf | |||
9e1d9bff3b | |||
ffe58bf5ab | |||
de75d542f3 | |||
b9db5b37f2 | |||
b1e0143444 | |||
168d0e9e45 | |||
a0bf2ea7e7 | |||
5f0a4416db | |||
83a22b457c | |||
2c42e54519 | |||
f27e826b14 |
11
CONTRIBUTING.md → .github/CONTRIBUTING.md
vendored
@ -1,5 +1,14 @@
|
||||
## Can I have feature X
|
||||
|
||||
Before you do a feature request please check and make sure that it isn't possible
|
||||
through some other means. The JavaScript enabled console is a powerful feature
|
||||
in the right hands. Please check our [Bitchin' tricks](https://github.com/ethereum/go-ethereum/wiki/bitchin-tricks) wiki page for more info
|
||||
and help.
|
||||
|
||||
## Contributing
|
||||
|
||||
If you'd like to contribute to go-ethereum please fork, fix, commit and
|
||||
send a pull request. Commits who do not comply with the coding standards
|
||||
send a pull request. Commits which do not comply with the coding standards
|
||||
are ignored (use gofmt!). If you send pull requests make absolute sure that you
|
||||
commit on the `develop` branch and that you do not merge to master.
|
||||
Commits that are directly based on master are simply ignored.
|
20
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
#### System information
|
||||
|
||||
Geth version: `geth version`
|
||||
OS & Version: Windows/Linux/OSX
|
||||
Commit hash : (if `develop`)
|
||||
|
||||
#### Expected behaviour
|
||||
|
||||
|
||||
#### Actual behaviour
|
||||
|
||||
|
||||
#### Steps to reproduce the behaviour
|
||||
|
||||
|
||||
#### Backtrace
|
||||
|
||||
````
|
||||
[backtrace]
|
||||
````
|
2
.gitignore
vendored
@ -35,3 +35,5 @@ cmd/mist/assets/ext/ethereum.js/
|
||||
profile.tmp
|
||||
profile.cov
|
||||
|
||||
# vagrant
|
||||
.vagrant
|
||||
|
4
.mailmap
@ -59,3 +59,7 @@ Jason Carver <jacarver@linkedin.com> <ut96caarrs@snkmail.com>
|
||||
|
||||
Joseph Chow <ethereum@outlook.com>
|
||||
Joseph Chow <ethereum@outlook.com> ethers <TODO>
|
||||
|
||||
Enrique Fynn <enriquefynn@gmail.com>
|
||||
|
||||
Vincent G <caktux@gmail.com>
|
12
.travis.yml
@ -1,10 +1,12 @@
|
||||
language: go
|
||||
go:
|
||||
- 1.4.2
|
||||
- 1.5.4
|
||||
- 1.6.2
|
||||
install:
|
||||
# - go get code.google.com/p/go.tools/cmd/goimports
|
||||
# - go get github.com/golang/lint/golint
|
||||
# - go get golang.org/x/tools/cmd/vet
|
||||
# - go get golang.org/x/tools/cmd/vet
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
before_script:
|
||||
# - gofmt -l -w .
|
||||
@ -20,14 +22,10 @@ env:
|
||||
global:
|
||||
- secure: "U2U1AmkU4NJBgKR/uUAebQY87cNL0+1JHjnLOmmXwxYYyj5ralWb1aSuSH3qSXiT93qLBmtaUkuv9fberHVqrbAeVlztVdUsKAq7JMQH+M99iFkC9UiRMqHmtjWJ0ok4COD1sRYixxi21wb/JrMe3M1iL4QJVS61iltjHhVdM64="
|
||||
sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libgmp3-dev
|
||||
notifications:
|
||||
webhooks:
|
||||
urls:
|
||||
- https://webhooks.gitter.im/e/e09ccdce1048c5e03445
|
||||
on_success: change
|
||||
on_success: change
|
||||
on_failure: always
|
||||
on_start: false
|
||||
on_start: false
|
||||
|
12
AUTHORS
@ -3,22 +3,34 @@
|
||||
Alex Leverington <alex@ethdev.com>
|
||||
Alexandre Van de Sande <alex.vandesande@ethdev.com>
|
||||
Bas van Kervel <bas@ethdev.com>
|
||||
Christoph Jentzsch <jentzsch.software@gmail.com>
|
||||
Daniel A. Nagy <nagy.da@gmail.com>
|
||||
Drake Burroughs <wildfyre@hotmail.com>
|
||||
Enrique Fynn <enriquefynn@gmail.com>
|
||||
Ethan Buchman <ethan@coinculture.info>
|
||||
Fabian Vogelsteller <fabian@frozeman.de>
|
||||
Felix Lange <fjl@twurst.com>
|
||||
Gustav Simonsson <gustav.simonsson@gmail.com>
|
||||
Isidoro Ghezzi <isidoro.ghezzi@icloud.com>
|
||||
Jae Kwon <jkwon.work@gmail.com>
|
||||
Jason Carver <jacarver@linkedin.com>
|
||||
Jeff R. Allen <jra@nella.org>
|
||||
Jeffrey Wilcke <jeffrey@ethereum.org>
|
||||
Joseph Chow <ethereum@outlook.com>
|
||||
Kobi Gurkan <kobigurk@gmail.com>
|
||||
Lefteris Karapetsas <lefteris@refu.co>
|
||||
Leif Jurvetson <leijurv@gmail.com>
|
||||
Maran Hidskes <maran.hidskes@gmail.com>
|
||||
Marek Kotewicz <marek.kotewicz@gmail.com>
|
||||
Matthew Wampler-Doty <matthew.wampler.doty@gmail.com>
|
||||
Nick Dodson <silentcicero@outlook.com>
|
||||
Peter Pratscher <pratscher@gmail.com>
|
||||
Péter Szilágyi <peterke@gmail.com>
|
||||
Ramesh Nair <ram@hiddentao.com>
|
||||
Ricardo Catalinas Jiménez <r@untroubled.be>
|
||||
Rémy Roy <remyroy@remyroy.com>
|
||||
Taylor Gerring <taylor.gerring@gmail.com>
|
||||
Viktor Trón <viktor.tron@gmail.com>
|
||||
Vincent G <caktux@gmail.com>
|
||||
Vitalik Buterin <v@buterin.com>
|
||||
Zsolt Felföldi <zsfelfoldi@gmail.com>
|
||||
|
271
Godeps/Godeps.json
generated
@ -1,121 +1,315 @@
|
||||
{
|
||||
"ImportPath": "github.com/ethereum/go-ethereum",
|
||||
"GoVersion": "go1.4",
|
||||
"GoVersion": "go1.5.2",
|
||||
"GodepVersion": "v74",
|
||||
"Packages": [
|
||||
"./..."
|
||||
],
|
||||
"Deps": [
|
||||
{
|
||||
"ImportPath": "github.com/codegangsta/cli",
|
||||
"Comment": "1.2.0-161-gf445c89",
|
||||
"Rev": "f445c894402839580d30de47551cedc152dad814"
|
||||
"ImportPath": "github.com/Gustav-Simonsson/go-opencl/cl",
|
||||
"Rev": "593e01cfc4f3353585015321e01951d4a907d3ef"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/cespare/cp",
|
||||
"Rev": "165db2f241fd235aec29ba6d9b1ccd5f1c14637c"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/davecgh/go-spew/spew",
|
||||
"Rev": "3e6e67c4dcea3ac2f25fd4731abc0e1deaf36216"
|
||||
"Rev": "5215b55f46b2b919f50a1df0eaa5886afe4e3b3d"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/ethereum/ethash",
|
||||
"Comment": "v23.1-238-g9401881",
|
||||
"Rev": "9401881ab040d1a3b0ae9e4780a115bc284a8a1a"
|
||||
"Comment": "v23.1-245-g25b32de",
|
||||
"Rev": "25b32de0c0271065c28c3719c2bfe86959d72f0c"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/fatih/color",
|
||||
"Comment": "v0.1-5-gf773d4c",
|
||||
"Rev": "f773d4c806cc8e4a5749d6a35e2a4bbcd71443d6"
|
||||
"Comment": "v0.1-12-g9aae6aa",
|
||||
"Rev": "9aae6aaa22315390f03959adca2c4d395b02fcef"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/gizak/termui",
|
||||
"Rev": "bab8dce01c193d82bc04888a0a9a7814d505f532"
|
||||
"Rev": "08a5d3f67b7d9ec87830ea39c48e570a1f18531f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/golang/snappy",
|
||||
"Rev": "799c780093d646c1b79d30894e22512c319fa137"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/hashicorp/golang-lru",
|
||||
"Rev": "7f9ef20a0256f494e24126014135cf893ab71e9e"
|
||||
"Rev": "a0d98a5f288019575c6d1f4bb1573fef2d1fcdc4"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/hashicorp/golang-lru/simplelru",
|
||||
"Rev": "a0d98a5f288019575c6d1f4bb1573fef2d1fcdc4"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/huin/goupnp",
|
||||
"Rev": "90f71cb5dd6d4606388666d2cda4ce2f563d2185"
|
||||
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/huin/goupnp/dcps/internetgateway1",
|
||||
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/huin/goupnp/dcps/internetgateway2",
|
||||
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/huin/goupnp/httpu",
|
||||
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/huin/goupnp/scpd",
|
||||
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/huin/goupnp/soap",
|
||||
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/huin/goupnp/ssdp",
|
||||
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/jackpal/gateway",
|
||||
"Rev": "192609c58b8985e645cbe82ddcb28a4362ca0fdc"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/jackpal/go-nat-pmp",
|
||||
"Rev": "a45aa3d54aef73b504e15eb71bea0e5565b5e6e1"
|
||||
"Rev": "46523a463303c6ede3ddfe45bde1c7ed52ebaacd"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/kardianos/osext",
|
||||
"Rev": "ccfcd0245381f0c94c68f50626665eed3c6b726a"
|
||||
"ImportPath": "github.com/mattn/go-colorable",
|
||||
"Rev": "9fdad7c47650b7d2e1da50644c1f4ba7f172f252"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mattn/go-isatty",
|
||||
"Rev": "7fcbc72f853b92b5720db4a6b8482be612daef24"
|
||||
"Rev": "56b76bdf51f7708750eac80fa38b952bb9f32639"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mattn/go-runewidth",
|
||||
"Comment": "travisish-33-g5890272",
|
||||
"Rev": "5890272cd41c5103531cd7b79e428d99c9e97f76"
|
||||
"Comment": "travisish-44-ge882a96",
|
||||
"Rev": "e882a96ec18dd43fa283187b66af74497c9101c0"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/microsoft/go-winio",
|
||||
"Comment": "v0.2.0",
|
||||
"Rev": "9e2895e5f6c3f16473b91d37fae6e89990a4520c"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/nsf/termbox-go",
|
||||
"Rev": "675ffd907b7401b8a709a5ef2249978af5616bb2"
|
||||
"Rev": "362329b0aa6447eadd52edd8d660ec1dff470295"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/pborman/uuid",
|
||||
"Rev": "cccd189d45f7ac3368a0d127efb7f4d08ae0b655"
|
||||
"Comment": "v1.0-6-g0f1a469",
|
||||
"Rev": "0f1a46960a86dcdf5dd30d3e6568a497a997909f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/peterh/liner",
|
||||
"Rev": "29f6a646557d83e2b6e9ba05c45fbea9c006dbe8"
|
||||
"Rev": "ad1edfd30321d8f006ccf05f1e0524adeb943060"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/rcrowley/go-metrics",
|
||||
"Rev": "a5cfc242a56ba7fa70b785f678d6214837bf93b9"
|
||||
"Rev": "51425a2415d21afadfd55cd93432c0bc69e9598d"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/rjeczalik/notify",
|
||||
"Rev": "5dd6205716539662f8f14ab513552b41eab69d5d"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/robertkrimen/otto",
|
||||
"Rev": "dea31a3d392779af358ec41f77a07fcc7e9d04ba"
|
||||
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/robertkrimen/otto/ast",
|
||||
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/robertkrimen/otto/dbg",
|
||||
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/robertkrimen/otto/file",
|
||||
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/robertkrimen/otto/parser",
|
||||
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/robertkrimen/otto/registry",
|
||||
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/robertkrimen/otto/token",
|
||||
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/rs/cors",
|
||||
"Rev": "6e0c3cb65fc0fdb064c743d176a620e3ca446dfb"
|
||||
"Rev": "5950cf11d77f8a61b432a25dd4d444b4ced01379"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/shiena/ansicolor",
|
||||
"Rev": "a5e2b567a4dd6cc74545b8a4f27c9d63b9e7735b"
|
||||
"ImportPath": "github.com/rs/xhandler",
|
||||
"Rev": "d9d9599b6aaf6a058cb7b1f48291ded2cbd13390"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb",
|
||||
"Rev": "4875955338b0a434238a31165cb87255ab6e9e4a"
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/gosnappy/snappy",
|
||||
"Rev": "156a073208e131d7d2e212cb749feae7c339e846"
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/cache",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/comparer",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/errors",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/filter",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/iterator",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/journal",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/memdb",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/opt",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/storage",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/table",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/syndtr/goleveldb/leveldb/util",
|
||||
"Rev": "917f41c560270110ceb73c5b38be2a9127387071"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/crypto/pbkdf2",
|
||||
"Rev": "4ed45ec682102c643324fae5dff8dab085b6c300"
|
||||
"Rev": "1f22c0103821b9390939b6776727195525381532"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/crypto/ripemd160",
|
||||
"Rev": "4ed45ec682102c643324fae5dff8dab085b6c300"
|
||||
"Rev": "1f22c0103821b9390939b6776727195525381532"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/crypto/scrypt",
|
||||
"Rev": "4ed45ec682102c643324fae5dff8dab085b6c300"
|
||||
"Rev": "1f22c0103821b9390939b6776727195525381532"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/net/context",
|
||||
"Rev": "8968c61983e8f51a91b8c0ef25bf739278c89634"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/net/html",
|
||||
"Rev": "e0403b4e005737430c05a57aac078479844f919c"
|
||||
"Rev": "8968c61983e8f51a91b8c0ef25bf739278c89634"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/net/html/atom",
|
||||
"Rev": "8968c61983e8f51a91b8c0ef25bf739278c89634"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/net/html/charset",
|
||||
"Rev": "8968c61983e8f51a91b8c0ef25bf739278c89634"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/net/websocket",
|
||||
"Rev": "8968c61983e8f51a91b8c0ef25bf739278c89634"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/sys/unix",
|
||||
"Rev": "50c6bc5e4292a1d4e65c6e9be5f53be28bcbe28e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding",
|
||||
"Rev": "c93e7c9fff19fb9139b5ab04ce041833add0134e"
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding/charmap",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding/htmlindex",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding/internal",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding/internal/identifier",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding/japanese",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding/korean",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding/simplifiedchinese",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding/traditionalchinese",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/encoding/unicode",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/internal/tag",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/internal/utf8internal",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/language",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/runes",
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/text/transform",
|
||||
"Rev": "c93e7c9fff19fb9139b5ab04ce041833add0134e"
|
||||
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/tools/go/ast/astutil",
|
||||
"Rev": "758728c4b28cfbac299730969ef8f655c4761283"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/tools/imports",
|
||||
"Rev": "758728c4b28cfbac299730969ef8f655c4761283"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/check.v1",
|
||||
"Rev": "64131543e7896d5bcc6bd5a76287eb75ea96c673"
|
||||
"Rev": "4f90aeace3a26ad7021961c297b22c42160c7b25"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/fatih/set.v0",
|
||||
@ -125,6 +319,11 @@
|
||||
{
|
||||
"ImportPath": "gopkg.in/karalabe/cookiejar.v2/collections/prque",
|
||||
"Rev": "8dcd6a7f4951f6ff3ee9cbb919a06d8925822e57"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/urfave/cli.v1",
|
||||
"Comment": "v1.17.0",
|
||||
"Rev": "01857ac33766ce0c93856370626f9799281c14f4"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
254
Godeps/_workspace/src/github.com/Gustav-Simonsson/go-opencl/cl/cl_test.go
generated
vendored
@ -1,254 +0,0 @@
|
||||
package cl
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var kernelSource = `
|
||||
__kernel void square(
|
||||
__global float* input,
|
||||
__global float* output,
|
||||
const unsigned int count)
|
||||
{
|
||||
int i = get_global_id(0);
|
||||
if(i < count)
|
||||
output[i] = input[i] * input[i];
|
||||
}
|
||||
`
|
||||
|
||||
func getObjectStrings(object interface{}) map[string]string {
|
||||
v := reflect.ValueOf(object)
|
||||
t := reflect.TypeOf(object)
|
||||
|
||||
strs := make(map[string]string)
|
||||
|
||||
numMethods := t.NumMethod()
|
||||
for i := 0; i < numMethods; i++ {
|
||||
method := t.Method(i)
|
||||
if method.Type.NumIn() == 1 && method.Type.NumOut() == 1 && method.Type.Out(0).Kind() == reflect.String {
|
||||
// this is a string-returning method with (presumably) only a pointer receiver parameter
|
||||
// call it
|
||||
outs := v.Method(i).Call([]reflect.Value{})
|
||||
// put the result in our map
|
||||
strs[method.Name] = (outs[0].Interface()).(string)
|
||||
}
|
||||
}
|
||||
|
||||
return strs
|
||||
}
|
||||
|
||||
func TestPlatformStringsContainNoNULs(t *testing.T) {
|
||||
platforms, err := GetPlatforms()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get platforms: %+v", err)
|
||||
}
|
||||
|
||||
for _, p := range platforms {
|
||||
for key, value := range getObjectStrings(p) {
|
||||
if strings.Contains(value, "\x00") {
|
||||
t.Fatalf("platform string %q = %+q contains NUL", key, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDeviceStringsContainNoNULs(t *testing.T) {
|
||||
platforms, err := GetPlatforms()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get platforms: %+v", err)
|
||||
}
|
||||
|
||||
for _, p := range platforms {
|
||||
devs, err := p.GetDevices(DeviceTypeAll)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get devices for platform %q: %+v", p.Name(), err)
|
||||
}
|
||||
|
||||
for _, d := range devs {
|
||||
for key, value := range getObjectStrings(d) {
|
||||
if strings.Contains(value, "\x00") {
|
||||
t.Fatalf("device string %q = %+q contains NUL", key, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestHello(t *testing.T) {
|
||||
var data [1024]float32
|
||||
for i := 0; i < len(data); i++ {
|
||||
data[i] = rand.Float32()
|
||||
}
|
||||
|
||||
platforms, err := GetPlatforms()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get platforms: %+v", err)
|
||||
}
|
||||
for i, p := range platforms {
|
||||
t.Logf("Platform %d:", i)
|
||||
t.Logf(" Name: %s", p.Name())
|
||||
t.Logf(" Vendor: %s", p.Vendor())
|
||||
t.Logf(" Profile: %s", p.Profile())
|
||||
t.Logf(" Version: %s", p.Version())
|
||||
t.Logf(" Extensions: %s", p.Extensions())
|
||||
}
|
||||
platform := platforms[0]
|
||||
|
||||
devices, err := platform.GetDevices(DeviceTypeAll)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get devices: %+v", err)
|
||||
}
|
||||
if len(devices) == 0 {
|
||||
t.Fatalf("GetDevices returned no devices")
|
||||
}
|
||||
deviceIndex := -1
|
||||
for i, d := range devices {
|
||||
if deviceIndex < 0 && d.Type() == DeviceTypeGPU {
|
||||
deviceIndex = i
|
||||
}
|
||||
t.Logf("Device %d (%s): %s", i, d.Type(), d.Name())
|
||||
t.Logf(" Address Bits: %d", d.AddressBits())
|
||||
t.Logf(" Available: %+v", d.Available())
|
||||
// t.Logf(" Built-In Kernels: %s", d.BuiltInKernels())
|
||||
t.Logf(" Compiler Available: %+v", d.CompilerAvailable())
|
||||
t.Logf(" Double FP Config: %s", d.DoubleFPConfig())
|
||||
t.Logf(" Driver Version: %s", d.DriverVersion())
|
||||
t.Logf(" Error Correction Supported: %+v", d.ErrorCorrectionSupport())
|
||||
t.Logf(" Execution Capabilities: %s", d.ExecutionCapabilities())
|
||||
t.Logf(" Extensions: %s", d.Extensions())
|
||||
t.Logf(" Global Memory Cache Type: %s", d.GlobalMemCacheType())
|
||||
t.Logf(" Global Memory Cacheline Size: %d KB", d.GlobalMemCachelineSize()/1024)
|
||||
t.Logf(" Global Memory Size: %d MB", d.GlobalMemSize()/(1024*1024))
|
||||
t.Logf(" Half FP Config: %s", d.HalfFPConfig())
|
||||
t.Logf(" Host Unified Memory: %+v", d.HostUnifiedMemory())
|
||||
t.Logf(" Image Support: %+v", d.ImageSupport())
|
||||
t.Logf(" Image2D Max Dimensions: %d x %d", d.Image2DMaxWidth(), d.Image2DMaxHeight())
|
||||
t.Logf(" Image3D Max Dimenionns: %d x %d x %d", d.Image3DMaxWidth(), d.Image3DMaxHeight(), d.Image3DMaxDepth())
|
||||
// t.Logf(" Image Max Buffer Size: %d", d.ImageMaxBufferSize())
|
||||
// t.Logf(" Image Max Array Size: %d", d.ImageMaxArraySize())
|
||||
// t.Logf(" Linker Available: %+v", d.LinkerAvailable())
|
||||
t.Logf(" Little Endian: %+v", d.EndianLittle())
|
||||
t.Logf(" Local Mem Size Size: %d KB", d.LocalMemSize()/1024)
|
||||
t.Logf(" Local Mem Type: %s", d.LocalMemType())
|
||||
t.Logf(" Max Clock Frequency: %d", d.MaxClockFrequency())
|
||||
t.Logf(" Max Compute Units: %d", d.MaxComputeUnits())
|
||||
t.Logf(" Max Constant Args: %d", d.MaxConstantArgs())
|
||||
t.Logf(" Max Constant Buffer Size: %d KB", d.MaxConstantBufferSize()/1024)
|
||||
t.Logf(" Max Mem Alloc Size: %d KB", d.MaxMemAllocSize()/1024)
|
||||
t.Logf(" Max Parameter Size: %d", d.MaxParameterSize())
|
||||
t.Logf(" Max Read-Image Args: %d", d.MaxReadImageArgs())
|
||||
t.Logf(" Max Samplers: %d", d.MaxSamplers())
|
||||
t.Logf(" Max Work Group Size: %d", d.MaxWorkGroupSize())
|
||||
t.Logf(" Max Work Item Dimensions: %d", d.MaxWorkItemDimensions())
|
||||
t.Logf(" Max Work Item Sizes: %d", d.MaxWorkItemSizes())
|
||||
t.Logf(" Max Write-Image Args: %d", d.MaxWriteImageArgs())
|
||||
t.Logf(" Memory Base Address Alignment: %d", d.MemBaseAddrAlign())
|
||||
t.Logf(" Native Vector Width Char: %d", d.NativeVectorWidthChar())
|
||||
t.Logf(" Native Vector Width Short: %d", d.NativeVectorWidthShort())
|
||||
t.Logf(" Native Vector Width Int: %d", d.NativeVectorWidthInt())
|
||||
t.Logf(" Native Vector Width Long: %d", d.NativeVectorWidthLong())
|
||||
t.Logf(" Native Vector Width Float: %d", d.NativeVectorWidthFloat())
|
||||
t.Logf(" Native Vector Width Double: %d", d.NativeVectorWidthDouble())
|
||||
t.Logf(" Native Vector Width Half: %d", d.NativeVectorWidthHalf())
|
||||
t.Logf(" OpenCL C Version: %s", d.OpenCLCVersion())
|
||||
// t.Logf(" Parent Device: %+v", d.ParentDevice())
|
||||
t.Logf(" Profile: %s", d.Profile())
|
||||
t.Logf(" Profiling Timer Resolution: %d", d.ProfilingTimerResolution())
|
||||
t.Logf(" Vendor: %s", d.Vendor())
|
||||
t.Logf(" Version: %s", d.Version())
|
||||
}
|
||||
if deviceIndex < 0 {
|
||||
deviceIndex = 0
|
||||
}
|
||||
device := devices[deviceIndex]
|
||||
t.Logf("Using device %d", deviceIndex)
|
||||
context, err := CreateContext([]*Device{device})
|
||||
if err != nil {
|
||||
t.Fatalf("CreateContext failed: %+v", err)
|
||||
}
|
||||
// imageFormats, err := context.GetSupportedImageFormats(0, MemObjectTypeImage2D)
|
||||
// if err != nil {
|
||||
// t.Fatalf("GetSupportedImageFormats failed: %+v", err)
|
||||
// }
|
||||
// t.Logf("Supported image formats: %+v", imageFormats)
|
||||
queue, err := context.CreateCommandQueue(device, 0)
|
||||
if err != nil {
|
||||
t.Fatalf("CreateCommandQueue failed: %+v", err)
|
||||
}
|
||||
program, err := context.CreateProgramWithSource([]string{kernelSource})
|
||||
if err != nil {
|
||||
t.Fatalf("CreateProgramWithSource failed: %+v", err)
|
||||
}
|
||||
if err := program.BuildProgram(nil, ""); err != nil {
|
||||
t.Fatalf("BuildProgram failed: %+v", err)
|
||||
}
|
||||
kernel, err := program.CreateKernel("square")
|
||||
if err != nil {
|
||||
t.Fatalf("CreateKernel failed: %+v", err)
|
||||
}
|
||||
for i := 0; i < 3; i++ {
|
||||
name, err := kernel.ArgName(i)
|
||||
if err == ErrUnsupported {
|
||||
break
|
||||
} else if err != nil {
|
||||
t.Errorf("GetKernelArgInfo for name failed: %+v", err)
|
||||
break
|
||||
} else {
|
||||
t.Logf("Kernel arg %d: %s", i, name)
|
||||
}
|
||||
}
|
||||
input, err := context.CreateEmptyBuffer(MemReadOnly, 4*len(data))
|
||||
if err != nil {
|
||||
t.Fatalf("CreateBuffer failed for input: %+v", err)
|
||||
}
|
||||
output, err := context.CreateEmptyBuffer(MemReadOnly, 4*len(data))
|
||||
if err != nil {
|
||||
t.Fatalf("CreateBuffer failed for output: %+v", err)
|
||||
}
|
||||
if _, err := queue.EnqueueWriteBufferFloat32(input, true, 0, data[:], nil); err != nil {
|
||||
t.Fatalf("EnqueueWriteBufferFloat32 failed: %+v", err)
|
||||
}
|
||||
if err := kernel.SetArgs(input, output, uint32(len(data))); err != nil {
|
||||
t.Fatalf("SetKernelArgs failed: %+v", err)
|
||||
}
|
||||
|
||||
local, err := kernel.WorkGroupSize(device)
|
||||
if err != nil {
|
||||
t.Fatalf("WorkGroupSize failed: %+v", err)
|
||||
}
|
||||
t.Logf("Work group size: %d", local)
|
||||
size, _ := kernel.PreferredWorkGroupSizeMultiple(nil)
|
||||
t.Logf("Preferred Work Group Size Multiple: %d", size)
|
||||
|
||||
global := len(data)
|
||||
d := len(data) % local
|
||||
if d != 0 {
|
||||
global += local - d
|
||||
}
|
||||
if _, err := queue.EnqueueNDRangeKernel(kernel, nil, []int{global}, []int{local}, nil); err != nil {
|
||||
t.Fatalf("EnqueueNDRangeKernel failed: %+v", err)
|
||||
}
|
||||
|
||||
if err := queue.Finish(); err != nil {
|
||||
t.Fatalf("Finish failed: %+v", err)
|
||||
}
|
||||
|
||||
results := make([]float32, len(data))
|
||||
if _, err := queue.EnqueueReadBufferFloat32(output, true, 0, results, nil); err != nil {
|
||||
t.Fatalf("EnqueueReadBufferFloat32 failed: %+v", err)
|
||||
}
|
||||
|
||||
correct := 0
|
||||
for i, v := range data {
|
||||
if results[i] == v*v {
|
||||
correct++
|
||||
}
|
||||
}
|
||||
|
||||
if correct != len(data) {
|
||||
t.Fatalf("%d/%d correct values", correct, len(data))
|
||||
}
|
||||
}
|
19
Godeps/_workspace/src/github.com/cespare/cp/LICENSE.txt
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright (c) 2015 Caleb Spare
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
9
Godeps/_workspace/src/github.com/cespare/cp/README.md
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
# cp
|
||||
|
||||
[](https://godoc.org/github.com/cespare/cp)
|
||||
|
||||
cp is a small Go package for copying files and directories.
|
||||
|
||||
The API may change because I want to add some options in the future (for merging with existing dirs).
|
||||
|
||||
It does not currently handle Windows specifically (I think it may require some special treatment).
|
58
Godeps/_workspace/src/github.com/cespare/cp/cp.go
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
// Package cp offers simple file and directory copying for Go.
|
||||
package cp
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var errCopyFileWithDir = errors.New("dir argument to CopyFile")
|
||||
|
||||
// CopyFile copies the file with path src to dst. The new file must not exist.
|
||||
// It is created with the same permissions as src.
|
||||
func CopyFile(dst, src string) error {
|
||||
rf, err := os.Open(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rf.Close()
|
||||
rstat, err := rf.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if rstat.IsDir() {
|
||||
return errCopyFileWithDir
|
||||
}
|
||||
|
||||
wf, err := os.OpenFile(dst, os.O_WRONLY|os.O_CREATE|os.O_EXCL, rstat.Mode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := io.Copy(wf, rf); err != nil {
|
||||
wf.Close()
|
||||
return err
|
||||
}
|
||||
return wf.Close()
|
||||
}
|
||||
|
||||
// CopyAll copies the file or (recursively) the directory at src to dst.
|
||||
// Permissions are preserved. dst must not already exist.
|
||||
func CopyAll(dst, src string) error {
|
||||
return filepath.Walk(src, makeWalkFn(dst, src))
|
||||
}
|
||||
|
||||
func makeWalkFn(dst, src string) filepath.WalkFunc {
|
||||
return func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dstPath := filepath.Join(dst, strings.TrimPrefix(path, src))
|
||||
if info.IsDir() {
|
||||
return os.Mkdir(dstPath, info.Mode())
|
||||
}
|
||||
return CopyFile(dstPath, path)
|
||||
}
|
||||
}
|
13
Godeps/_workspace/src/github.com/codegangsta/cli/.travis.yml
generated
vendored
@ -1,13 +0,0 @@
|
||||
language: go
|
||||
sudo: false
|
||||
|
||||
go:
|
||||
- 1.0.3
|
||||
- 1.1.2
|
||||
- 1.2.2
|
||||
- 1.3.3
|
||||
- 1.4.2
|
||||
|
||||
script:
|
||||
- go vet ./...
|
||||
- go test -v ./...
|
315
Godeps/_workspace/src/github.com/codegangsta/cli/README.md
generated
vendored
@ -1,315 +0,0 @@
|
||||
[](http://gocover.io/github.com/codegangsta/cli)
|
||||
[](https://travis-ci.org/codegangsta/cli)
|
||||
[](https://godoc.org/github.com/codegangsta/cli)
|
||||
|
||||
# cli.go
|
||||
`cli.go` is simple, fast, and fun package for building command line apps in Go. The goal is to enable developers to write fast and distributable command line applications in an expressive way.
|
||||
|
||||
## Overview
|
||||
Command line apps are usually so tiny that there is absolutely no reason why your code should *not* be self-documenting. Things like generating help text and parsing command flags/options should not hinder productivity when writing a command line app.
|
||||
|
||||
**This is where `cli.go` comes into play.** `cli.go` makes command line programming fun, organized, and expressive!
|
||||
|
||||
## Installation
|
||||
Make sure you have a working Go environment (go 1.1+ is *required*). [See the install instructions](http://golang.org/doc/install.html).
|
||||
|
||||
To install `cli.go`, simply run:
|
||||
```
|
||||
$ go get github.com/codegangsta/cli
|
||||
```
|
||||
|
||||
Make sure your `PATH` includes to the `$GOPATH/bin` directory so your commands can be easily used:
|
||||
```
|
||||
export PATH=$PATH:$GOPATH/bin
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
One of the philosophies behind `cli.go` is that an API should be playful and full of discovery. So a `cli.go` app can be as little as one line of code in `main()`.
|
||||
|
||||
``` go
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"github.com/codegangsta/cli"
|
||||
)
|
||||
|
||||
func main() {
|
||||
cli.NewApp().Run(os.Args)
|
||||
}
|
||||
```
|
||||
|
||||
This app will run and show help text, but is not very useful. Let's give an action to execute and some help documentation:
|
||||
|
||||
``` go
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"github.com/codegangsta/cli"
|
||||
)
|
||||
|
||||
func main() {
|
||||
app := cli.NewApp()
|
||||
app.Name = "boom"
|
||||
app.Usage = "make an explosive entrance"
|
||||
app.Action = func(c *cli.Context) {
|
||||
println("boom! I say!")
|
||||
}
|
||||
|
||||
app.Run(os.Args)
|
||||
}
|
||||
```
|
||||
|
||||
Running this already gives you a ton of functionality, plus support for things like subcommands and flags, which are covered below.
|
||||
|
||||
## Example
|
||||
|
||||
Being a programmer can be a lonely job. Thankfully by the power of automation that is not the case! Let's create a greeter app to fend off our demons of loneliness!
|
||||
|
||||
Start by creating a directory named `greet`, and within it, add a file, `greet.go` with the following code in it:
|
||||
|
||||
``` go
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"github.com/codegangsta/cli"
|
||||
)
|
||||
|
||||
func main() {
|
||||
app := cli.NewApp()
|
||||
app.Name = "greet"
|
||||
app.Usage = "fight the loneliness!"
|
||||
app.Action = func(c *cli.Context) {
|
||||
println("Hello friend!")
|
||||
}
|
||||
|
||||
app.Run(os.Args)
|
||||
}
|
||||
```
|
||||
|
||||
Install our command to the `$GOPATH/bin` directory:
|
||||
|
||||
```
|
||||
$ go install
|
||||
```
|
||||
|
||||
Finally run our new command:
|
||||
|
||||
```
|
||||
$ greet
|
||||
Hello friend!
|
||||
```
|
||||
|
||||
`cli.go` also generates neat help text:
|
||||
|
||||
```
|
||||
$ greet help
|
||||
NAME:
|
||||
greet - fight the loneliness!
|
||||
|
||||
USAGE:
|
||||
greet [global options] command [command options] [arguments...]
|
||||
|
||||
VERSION:
|
||||
0.0.0
|
||||
|
||||
COMMANDS:
|
||||
help, h Shows a list of commands or help for one command
|
||||
|
||||
GLOBAL OPTIONS
|
||||
--version Shows version information
|
||||
```
|
||||
|
||||
### Arguments
|
||||
You can lookup arguments by calling the `Args` function on `cli.Context`.
|
||||
|
||||
``` go
|
||||
...
|
||||
app.Action = func(c *cli.Context) {
|
||||
println("Hello", c.Args()[0])
|
||||
}
|
||||
...
|
||||
```
|
||||
|
||||
### Flags
|
||||
Setting and querying flags is simple.
|
||||
``` go
|
||||
...
|
||||
app.Flags = []cli.Flag {
|
||||
cli.StringFlag{
|
||||
Name: "lang",
|
||||
Value: "english",
|
||||
Usage: "language for the greeting",
|
||||
},
|
||||
}
|
||||
app.Action = func(c *cli.Context) {
|
||||
name := "someone"
|
||||
if len(c.Args()) > 0 {
|
||||
name = c.Args()[0]
|
||||
}
|
||||
if c.String("lang") == "spanish" {
|
||||
println("Hola", name)
|
||||
} else {
|
||||
println("Hello", name)
|
||||
}
|
||||
}
|
||||
...
|
||||
```
|
||||
|
||||
See full list of flags at http://godoc.org/github.com/codegangsta/cli
|
||||
|
||||
#### Alternate Names
|
||||
|
||||
You can set alternate (or short) names for flags by providing a comma-delimited list for the `Name`. e.g.
|
||||
|
||||
``` go
|
||||
app.Flags = []cli.Flag {
|
||||
cli.StringFlag{
|
||||
Name: "lang, l",
|
||||
Value: "english",
|
||||
Usage: "language for the greeting",
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
That flag can then be set with `--lang spanish` or `-l spanish`. Note that giving two different forms of the same flag in the same command invocation is an error.
|
||||
|
||||
#### Values from the Environment
|
||||
|
||||
You can also have the default value set from the environment via `EnvVar`. e.g.
|
||||
|
||||
``` go
|
||||
app.Flags = []cli.Flag {
|
||||
cli.StringFlag{
|
||||
Name: "lang, l",
|
||||
Value: "english",
|
||||
Usage: "language for the greeting",
|
||||
EnvVar: "APP_LANG",
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
The `EnvVar` may also be given as a comma-delimited "cascade", where the first environment variable that resolves is used as the default.
|
||||
|
||||
``` go
|
||||
app.Flags = []cli.Flag {
|
||||
cli.StringFlag{
|
||||
Name: "lang, l",
|
||||
Value: "english",
|
||||
Usage: "language for the greeting",
|
||||
EnvVar: "LEGACY_COMPAT_LANG,APP_LANG,LANG",
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Subcommands
|
||||
|
||||
Subcommands can be defined for a more git-like command line app.
|
||||
```go
|
||||
...
|
||||
app.Commands = []cli.Command{
|
||||
{
|
||||
Name: "add",
|
||||
Aliases: []string{"a"},
|
||||
Usage: "add a task to the list",
|
||||
Action: func(c *cli.Context) {
|
||||
println("added task: ", c.Args().First())
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "complete",
|
||||
Aliases: []string{"c"},
|
||||
Usage: "complete a task on the list",
|
||||
Action: func(c *cli.Context) {
|
||||
println("completed task: ", c.Args().First())
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "template",
|
||||
Aliases: []string{"r"},
|
||||
Usage: "options for task templates",
|
||||
Subcommands: []cli.Command{
|
||||
{
|
||||
Name: "add",
|
||||
Usage: "add a new template",
|
||||
Action: func(c *cli.Context) {
|
||||
println("new task template: ", c.Args().First())
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "remove",
|
||||
Usage: "remove an existing template",
|
||||
Action: func(c *cli.Context) {
|
||||
println("removed task template: ", c.Args().First())
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
...
|
||||
```
|
||||
|
||||
### Bash Completion
|
||||
|
||||
You can enable completion commands by setting the `EnableBashCompletion`
|
||||
flag on the `App` object. By default, this setting will only auto-complete to
|
||||
show an app's subcommands, but you can write your own completion methods for
|
||||
the App or its subcommands.
|
||||
```go
|
||||
...
|
||||
var tasks = []string{"cook", "clean", "laundry", "eat", "sleep", "code"}
|
||||
app := cli.NewApp()
|
||||
app.EnableBashCompletion = true
|
||||
app.Commands = []cli.Command{
|
||||
{
|
||||
Name: "complete",
|
||||
Aliases: []string{"c"},
|
||||
Usage: "complete a task on the list",
|
||||
Action: func(c *cli.Context) {
|
||||
println("completed task: ", c.Args().First())
|
||||
},
|
||||
BashComplete: func(c *cli.Context) {
|
||||
// This will complete if no args are passed
|
||||
if len(c.Args()) > 0 {
|
||||
return
|
||||
}
|
||||
for _, t := range tasks {
|
||||
fmt.Println(t)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
...
|
||||
```
|
||||
|
||||
#### To Enable
|
||||
|
||||
Source the `autocomplete/bash_autocomplete` file in your `.bashrc` file while
|
||||
setting the `PROG` variable to the name of your program:
|
||||
|
||||
`PROG=myprogram source /.../cli/autocomplete/bash_autocomplete`
|
||||
|
||||
#### To Distribute
|
||||
|
||||
Copy `autocomplete/bash_autocomplete` into `/etc/bash_completion.d/` and rename
|
||||
it to the name of the program you wish to add autocomplete support for (or
|
||||
automatically install it there if you are distributing a package). Don't forget
|
||||
to source the file to make it active in the current shell.
|
||||
|
||||
```
|
||||
sudo cp src/bash_autocomplete /etc/bash_completion.d/<myprogram>
|
||||
source /etc/bash_completion.d/<myprogram>
|
||||
```
|
||||
|
||||
Alternatively, you can just document that users should source the generic
|
||||
`autocomplete/bash_autocomplete` in their bash configuration with `$PROG` set
|
||||
to the name of their program (as above).
|
||||
|
||||
## Contribution Guidelines
|
||||
Feel free to put up a pull request to fix a bug or maybe add a feature. I will give it a code review and make sure that it does not break backwards compatibility. If I or any other collaborators agree that it is in line with the vision of the project, we will work with you to get the code into a mergeable state and merge it into the master branch.
|
||||
|
||||
If you have contributed something significant to the project, I will most likely add you as a collaborator. As a collaborator you are given the ability to merge others pull requests. It is very important that new code does not break existing code, so be careful about what code you do choose to merge. If you have any questions feel free to link @codegangsta to the issue in question and we can review it together.
|
||||
|
||||
If you feel like you have contributed to the project but have not yet been added as a collaborator, I probably forgot to add you. Hit @codegangsta up over email and we will get it figured out.
|
333
Godeps/_workspace/src/github.com/codegangsta/cli/app.go
generated
vendored
@ -1,333 +0,0 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"time"
|
||||
)
|
||||
|
||||
// App is the main structure of a cli application. It is recomended that
|
||||
// an app be created with the cli.NewApp() function
|
||||
type App struct {
|
||||
// The name of the program. Defaults to os.Args[0]
|
||||
Name string
|
||||
// Full name of command for help, defaults to Name
|
||||
HelpName string
|
||||
// Description of the program.
|
||||
Usage string
|
||||
// Description of the program argument format.
|
||||
ArgsUsage string
|
||||
// Version of the program
|
||||
Version string
|
||||
// List of commands to execute
|
||||
Commands []Command
|
||||
// List of flags to parse
|
||||
Flags []Flag
|
||||
// Boolean to enable bash completion commands
|
||||
EnableBashCompletion bool
|
||||
// Boolean to hide built-in help command
|
||||
HideHelp bool
|
||||
// Boolean to hide built-in version flag
|
||||
HideVersion bool
|
||||
// An action to execute when the bash-completion flag is set
|
||||
BashComplete func(context *Context)
|
||||
// An action to execute before any subcommands are run, but after the context is ready
|
||||
// If a non-nil error is returned, no subcommands are run
|
||||
Before func(context *Context) error
|
||||
// An action to execute after any subcommands are run, but after the subcommand has finished
|
||||
// It is run even if Action() panics
|
||||
After func(context *Context) error
|
||||
// The action to execute when no subcommands are specified
|
||||
Action func(context *Context)
|
||||
// Execute this function if the proper command cannot be found
|
||||
CommandNotFound func(context *Context, command string)
|
||||
// Compilation date
|
||||
Compiled time.Time
|
||||
// List of all authors who contributed
|
||||
Authors []Author
|
||||
// Copyright of the binary if any
|
||||
Copyright string
|
||||
// Name of Author (Note: Use App.Authors, this is deprecated)
|
||||
Author string
|
||||
// Email of Author (Note: Use App.Authors, this is deprecated)
|
||||
Email string
|
||||
// Writer writer to write output to
|
||||
Writer io.Writer
|
||||
}
|
||||
|
||||
// Tries to find out when this binary was compiled.
|
||||
// Returns the current time if it fails to find it.
|
||||
func compileTime() time.Time {
|
||||
info, err := os.Stat(os.Args[0])
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
return info.ModTime()
|
||||
}
|
||||
|
||||
// Creates a new cli Application with some reasonable defaults for Name, Usage, Version and Action.
|
||||
func NewApp() *App {
|
||||
return &App{
|
||||
Name: os.Args[0],
|
||||
HelpName: os.Args[0],
|
||||
Usage: "A new cli application",
|
||||
Version: "0.0.0",
|
||||
BashComplete: DefaultAppComplete,
|
||||
Action: helpCommand.Action,
|
||||
Compiled: compileTime(),
|
||||
Writer: os.Stdout,
|
||||
}
|
||||
}
|
||||
|
||||
// Entry point to the cli app. Parses the arguments slice and routes to the proper flag/args combination
|
||||
func (a *App) Run(arguments []string) (err error) {
|
||||
if a.Author != "" || a.Email != "" {
|
||||
a.Authors = append(a.Authors, Author{Name: a.Author, Email: a.Email})
|
||||
}
|
||||
|
||||
newCmds := []Command{}
|
||||
for _, c := range a.Commands {
|
||||
if c.HelpName == "" {
|
||||
c.HelpName = fmt.Sprintf("%s %s", a.HelpName, c.Name)
|
||||
}
|
||||
newCmds = append(newCmds, c)
|
||||
}
|
||||
a.Commands = newCmds
|
||||
|
||||
// append help to commands
|
||||
if a.Command(helpCommand.Name) == nil && !a.HideHelp {
|
||||
a.Commands = append(a.Commands, helpCommand)
|
||||
if (HelpFlag != BoolFlag{}) {
|
||||
a.appendFlag(HelpFlag)
|
||||
}
|
||||
}
|
||||
|
||||
//append version/help flags
|
||||
if a.EnableBashCompletion {
|
||||
a.appendFlag(BashCompletionFlag)
|
||||
}
|
||||
|
||||
if !a.HideVersion {
|
||||
a.appendFlag(VersionFlag)
|
||||
}
|
||||
|
||||
// parse flags
|
||||
set := flagSet(a.Name, a.Flags)
|
||||
set.SetOutput(ioutil.Discard)
|
||||
err = set.Parse(arguments[1:])
|
||||
nerr := normalizeFlags(a.Flags, set)
|
||||
if nerr != nil {
|
||||
fmt.Fprintln(a.Writer, nerr)
|
||||
context := NewContext(a, set, nil)
|
||||
ShowAppHelp(context)
|
||||
return nerr
|
||||
}
|
||||
context := NewContext(a, set, nil)
|
||||
|
||||
if err != nil {
|
||||
fmt.Fprintln(a.Writer, "Incorrect Usage.")
|
||||
fmt.Fprintln(a.Writer)
|
||||
ShowAppHelp(context)
|
||||
return err
|
||||
}
|
||||
|
||||
if checkCompletions(context) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !a.HideHelp && checkHelp(context) {
|
||||
ShowAppHelp(context)
|
||||
return nil
|
||||
}
|
||||
|
||||
if !a.HideVersion && checkVersion(context) {
|
||||
ShowVersion(context)
|
||||
return nil
|
||||
}
|
||||
|
||||
if a.After != nil {
|
||||
defer func() {
|
||||
afterErr := a.After(context)
|
||||
if afterErr != nil {
|
||||
if err != nil {
|
||||
err = NewMultiError(err, afterErr)
|
||||
} else {
|
||||
err = afterErr
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
if a.Before != nil {
|
||||
err := a.Before(context)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
args := context.Args()
|
||||
if args.Present() {
|
||||
name := args.First()
|
||||
c := a.Command(name)
|
||||
if c != nil {
|
||||
return c.Run(context)
|
||||
}
|
||||
}
|
||||
|
||||
// Run default Action
|
||||
a.Action(context)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Another entry point to the cli app, takes care of passing arguments and error handling
|
||||
func (a *App) RunAndExitOnError() {
|
||||
if err := a.Run(os.Args); err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// Invokes the subcommand given the context, parses ctx.Args() to generate command-specific flags
|
||||
func (a *App) RunAsSubcommand(ctx *Context) (err error) {
|
||||
// append help to commands
|
||||
if len(a.Commands) > 0 {
|
||||
if a.Command(helpCommand.Name) == nil && !a.HideHelp {
|
||||
a.Commands = append(a.Commands, helpCommand)
|
||||
if (HelpFlag != BoolFlag{}) {
|
||||
a.appendFlag(HelpFlag)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
newCmds := []Command{}
|
||||
for _, c := range a.Commands {
|
||||
if c.HelpName == "" {
|
||||
c.HelpName = fmt.Sprintf("%s %s", a.HelpName, c.Name)
|
||||
}
|
||||
newCmds = append(newCmds, c)
|
||||
}
|
||||
a.Commands = newCmds
|
||||
|
||||
// append flags
|
||||
if a.EnableBashCompletion {
|
||||
a.appendFlag(BashCompletionFlag)
|
||||
}
|
||||
|
||||
// parse flags
|
||||
set := flagSet(a.Name, a.Flags)
|
||||
set.SetOutput(ioutil.Discard)
|
||||
err = set.Parse(ctx.Args().Tail())
|
||||
nerr := normalizeFlags(a.Flags, set)
|
||||
context := NewContext(a, set, ctx)
|
||||
|
||||
if nerr != nil {
|
||||
fmt.Fprintln(a.Writer, nerr)
|
||||
fmt.Fprintln(a.Writer)
|
||||
if len(a.Commands) > 0 {
|
||||
ShowSubcommandHelp(context)
|
||||
} else {
|
||||
ShowCommandHelp(ctx, context.Args().First())
|
||||
}
|
||||
return nerr
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
fmt.Fprintln(a.Writer, "Incorrect Usage.")
|
||||
fmt.Fprintln(a.Writer)
|
||||
ShowSubcommandHelp(context)
|
||||
return err
|
||||
}
|
||||
|
||||
if checkCompletions(context) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(a.Commands) > 0 {
|
||||
if checkSubcommandHelp(context) {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
if checkCommandHelp(ctx, context.Args().First()) {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if a.After != nil {
|
||||
defer func() {
|
||||
afterErr := a.After(context)
|
||||
if afterErr != nil {
|
||||
if err != nil {
|
||||
err = NewMultiError(err, afterErr)
|
||||
} else {
|
||||
err = afterErr
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
if a.Before != nil {
|
||||
err := a.Before(context)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
args := context.Args()
|
||||
if args.Present() {
|
||||
name := args.First()
|
||||
c := a.Command(name)
|
||||
if c != nil {
|
||||
return c.Run(context)
|
||||
}
|
||||
}
|
||||
|
||||
// Run default Action
|
||||
a.Action(context)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Returns the named command on App. Returns nil if the command does not exist
|
||||
func (a *App) Command(name string) *Command {
|
||||
for _, c := range a.Commands {
|
||||
if c.HasName(name) {
|
||||
return &c
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *App) hasFlag(flag Flag) bool {
|
||||
for _, f := range a.Flags {
|
||||
if flag == f {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *App) appendFlag(flag Flag) {
|
||||
if !a.hasFlag(flag) {
|
||||
a.Flags = append(a.Flags, flag)
|
||||
}
|
||||
}
|
||||
|
||||
// Author represents someone who has contributed to a cli project.
|
||||
type Author struct {
|
||||
Name string // The Authors name
|
||||
Email string // The Authors email
|
||||
}
|
||||
|
||||
// String makes Author comply to the Stringer interface, to allow an easy print in the templating process
|
||||
func (a Author) String() string {
|
||||
e := ""
|
||||
if a.Email != "" {
|
||||
e = "<" + a.Email + "> "
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v %v", a.Name, e)
|
||||
}
|
15
Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/bash_autocomplete
generated
vendored
@ -1,15 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
: ${PROG:=$(basename ${BASH_SOURCE})}
|
||||
|
||||
_cli_bash_autocomplete() {
|
||||
local cur prev opts base
|
||||
COMPREPLY=()
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
opts=$( ${COMP_WORDS[@]:0:$COMP_CWORD} --generate-bash-completion )
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
|
||||
return 0
|
||||
}
|
||||
|
||||
complete -F _cli_bash_autocomplete $PROG
|
5
Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/zsh_autocomplete
generated
vendored
@ -1,5 +0,0 @@
|
||||
autoload -U compinit && compinit
|
||||
autoload -U bashcompinit && bashcompinit
|
||||
|
||||
script_dir=$(dirname $0)
|
||||
source ${script_dir}/bash_autocomplete
|
13
Godeps/_workspace/src/github.com/davecgh/go-spew/LICENSE
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
Copyright (c) 2012-2013 Dave Collins <dave@davec.name>
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
151
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/bypass.go
generated
vendored
Normal file
@ -0,0 +1,151 @@
|
||||
// Copyright (c) 2015 Dave Collins <dave@davec.name>
|
||||
//
|
||||
// Permission to use, copy, modify, and distribute this software for any
|
||||
// purpose with or without fee is hereby granted, provided that the above
|
||||
// copyright notice and this permission notice appear in all copies.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
// NOTE: Due to the following build constraints, this file will only be compiled
|
||||
// when the code is not running on Google App Engine and "-tags disableunsafe"
|
||||
// is not added to the go build command line.
|
||||
// +build !appengine,!disableunsafe
|
||||
|
||||
package spew
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const (
|
||||
// UnsafeDisabled is a build-time constant which specifies whether or
|
||||
// not access to the unsafe package is available.
|
||||
UnsafeDisabled = false
|
||||
|
||||
// ptrSize is the size of a pointer on the current arch.
|
||||
ptrSize = unsafe.Sizeof((*byte)(nil))
|
||||
)
|
||||
|
||||
var (
|
||||
// offsetPtr, offsetScalar, and offsetFlag are the offsets for the
|
||||
// internal reflect.Value fields. These values are valid before golang
|
||||
// commit ecccf07e7f9d which changed the format. The are also valid
|
||||
// after commit 82f48826c6c7 which changed the format again to mirror
|
||||
// the original format. Code in the init function updates these offsets
|
||||
// as necessary.
|
||||
offsetPtr = uintptr(ptrSize)
|
||||
offsetScalar = uintptr(0)
|
||||
offsetFlag = uintptr(ptrSize * 2)
|
||||
|
||||
// flagKindWidth and flagKindShift indicate various bits that the
|
||||
// reflect package uses internally to track kind information.
|
||||
//
|
||||
// flagRO indicates whether or not the value field of a reflect.Value is
|
||||
// read-only.
|
||||
//
|
||||
// flagIndir indicates whether the value field of a reflect.Value is
|
||||
// the actual data or a pointer to the data.
|
||||
//
|
||||
// These values are valid before golang commit 90a7c3c86944 which
|
||||
// changed their positions. Code in the init function updates these
|
||||
// flags as necessary.
|
||||
flagKindWidth = uintptr(5)
|
||||
flagKindShift = uintptr(flagKindWidth - 1)
|
||||
flagRO = uintptr(1 << 0)
|
||||
flagIndir = uintptr(1 << 1)
|
||||
)
|
||||
|
||||
func init() {
|
||||
// Older versions of reflect.Value stored small integers directly in the
|
||||
// ptr field (which is named val in the older versions). Versions
|
||||
// between commits ecccf07e7f9d and 82f48826c6c7 added a new field named
|
||||
// scalar for this purpose which unfortunately came before the flag
|
||||
// field, so the offset of the flag field is different for those
|
||||
// versions.
|
||||
//
|
||||
// This code constructs a new reflect.Value from a known small integer
|
||||
// and checks if the size of the reflect.Value struct indicates it has
|
||||
// the scalar field. When it does, the offsets are updated accordingly.
|
||||
vv := reflect.ValueOf(0xf00)
|
||||
if unsafe.Sizeof(vv) == (ptrSize * 4) {
|
||||
offsetScalar = ptrSize * 2
|
||||
offsetFlag = ptrSize * 3
|
||||
}
|
||||
|
||||
// Commit 90a7c3c86944 changed the flag positions such that the low
|
||||
// order bits are the kind. This code extracts the kind from the flags
|
||||
// field and ensures it's the correct type. When it's not, the flag
|
||||
// order has been changed to the newer format, so the flags are updated
|
||||
// accordingly.
|
||||
upf := unsafe.Pointer(uintptr(unsafe.Pointer(&vv)) + offsetFlag)
|
||||
upfv := *(*uintptr)(upf)
|
||||
flagKindMask := uintptr((1<<flagKindWidth - 1) << flagKindShift)
|
||||
if (upfv&flagKindMask)>>flagKindShift != uintptr(reflect.Int) {
|
||||
flagKindShift = 0
|
||||
flagRO = 1 << 5
|
||||
flagIndir = 1 << 6
|
||||
|
||||
// Commit adf9b30e5594 modified the flags to separate the
|
||||
// flagRO flag into two bits which specifies whether or not the
|
||||
// field is embedded. This causes flagIndir to move over a bit
|
||||
// and means that flagRO is the combination of either of the
|
||||
// original flagRO bit and the new bit.
|
||||
//
|
||||
// This code detects the change by extracting what used to be
|
||||
// the indirect bit to ensure it's set. When it's not, the flag
|
||||
// order has been changed to the newer format, so the flags are
|
||||
// updated accordingly.
|
||||
if upfv&flagIndir == 0 {
|
||||
flagRO = 3 << 5
|
||||
flagIndir = 1 << 7
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// unsafeReflectValue converts the passed reflect.Value into a one that bypasses
|
||||
// the typical safety restrictions preventing access to unaddressable and
|
||||
// unexported data. It works by digging the raw pointer to the underlying
|
||||
// value out of the protected value and generating a new unprotected (unsafe)
|
||||
// reflect.Value to it.
|
||||
//
|
||||
// This allows us to check for implementations of the Stringer and error
|
||||
// interfaces to be used for pretty printing ordinarily unaddressable and
|
||||
// inaccessible values such as unexported struct fields.
|
||||
func unsafeReflectValue(v reflect.Value) (rv reflect.Value) {
|
||||
indirects := 1
|
||||
vt := v.Type()
|
||||
upv := unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetPtr)
|
||||
rvf := *(*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetFlag))
|
||||
if rvf&flagIndir != 0 {
|
||||
vt = reflect.PtrTo(v.Type())
|
||||
indirects++
|
||||
} else if offsetScalar != 0 {
|
||||
// The value is in the scalar field when it's not one of the
|
||||
// reference types.
|
||||
switch vt.Kind() {
|
||||
case reflect.Uintptr:
|
||||
case reflect.Chan:
|
||||
case reflect.Func:
|
||||
case reflect.Map:
|
||||
case reflect.Ptr:
|
||||
case reflect.UnsafePointer:
|
||||
default:
|
||||
upv = unsafe.Pointer(uintptr(unsafe.Pointer(&v)) +
|
||||
offsetScalar)
|
||||
}
|
||||
}
|
||||
|
||||
pv := reflect.NewAt(vt, upv)
|
||||
rv = pv
|
||||
for i := 0; i < indirects; i++ {
|
||||
rv = rv.Elem()
|
||||
}
|
||||
return rv
|
||||
}
|
37
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/bypasssafe.go
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
// Copyright (c) 2015 Dave Collins <dave@davec.name>
|
||||
//
|
||||
// Permission to use, copy, modify, and distribute this software for any
|
||||
// purpose with or without fee is hereby granted, provided that the above
|
||||
// copyright notice and this permission notice appear in all copies.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
// NOTE: Due to the following build constraints, this file will only be compiled
|
||||
// when either the code is running on Google App Engine or "-tags disableunsafe"
|
||||
// is added to the go build command line.
|
||||
// +build appengine disableunsafe
|
||||
|
||||
package spew
|
||||
|
||||
import "reflect"
|
||||
|
||||
const (
|
||||
// UnsafeDisabled is a build-time constant which specifies whether or
|
||||
// not access to the unsafe package is available.
|
||||
UnsafeDisabled = true
|
||||
)
|
||||
|
||||
// unsafeReflectValue typically converts the passed reflect.Value into a one
|
||||
// that bypasses the typical safety restrictions preventing access to
|
||||
// unaddressable and unexported data. However, doing this relies on access to
|
||||
// the unsafe package. This is a stub version which simply returns the passed
|
||||
// reflect.Value when the unsafe package is not available.
|
||||
func unsafeReflectValue(v reflect.Value) reflect.Value {
|
||||
return v
|
||||
}
|
135
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/common.go
generated
vendored
@ -23,116 +23,8 @@ import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const (
|
||||
// ptrSize is the size of a pointer on the current arch.
|
||||
ptrSize = unsafe.Sizeof((*byte)(nil))
|
||||
)
|
||||
|
||||
var (
|
||||
// offsetPtr, offsetScalar, and offsetFlag are the offsets for the
|
||||
// internal reflect.Value fields. These values are valid before golang
|
||||
// commit ecccf07e7f9d which changed the format. The are also valid
|
||||
// after commit 82f48826c6c7 which changed the format again to mirror
|
||||
// the original format. Code in the init function updates these offsets
|
||||
// as necessary.
|
||||
offsetPtr = uintptr(ptrSize)
|
||||
offsetScalar = uintptr(0)
|
||||
offsetFlag = uintptr(ptrSize * 2)
|
||||
|
||||
// flagKindWidth and flagKindShift indicate various bits that the
|
||||
// reflect package uses internally to track kind information.
|
||||
//
|
||||
// flagRO indicates whether or not the value field of a reflect.Value is
|
||||
// read-only.
|
||||
//
|
||||
// flagIndir indicates whether the value field of a reflect.Value is
|
||||
// the actual data or a pointer to the data.
|
||||
//
|
||||
// These values are valid before golang commit 90a7c3c86944 which
|
||||
// changed their positions. Code in the init function updates these
|
||||
// flags as necessary.
|
||||
flagKindWidth = uintptr(5)
|
||||
flagKindShift = uintptr(flagKindWidth - 1)
|
||||
flagRO = uintptr(1 << 0)
|
||||
flagIndir = uintptr(1 << 1)
|
||||
)
|
||||
|
||||
func init() {
|
||||
// Older versions of reflect.Value stored small integers directly in the
|
||||
// ptr field (which is named val in the older versions). Versions
|
||||
// between commits ecccf07e7f9d and 82f48826c6c7 added a new field named
|
||||
// scalar for this purpose which unfortunately came before the flag
|
||||
// field, so the offset of the flag field is different for those
|
||||
// versions.
|
||||
//
|
||||
// This code constructs a new reflect.Value from a known small integer
|
||||
// and checks if the size of the reflect.Value struct indicates it has
|
||||
// the scalar field. When it does, the offsets are updated accordingly.
|
||||
vv := reflect.ValueOf(0xf00)
|
||||
if unsafe.Sizeof(vv) == (ptrSize * 4) {
|
||||
offsetScalar = ptrSize * 2
|
||||
offsetFlag = ptrSize * 3
|
||||
}
|
||||
|
||||
// Commit 90a7c3c86944 changed the flag positions such that the low
|
||||
// order bits are the kind. This code extracts the kind from the flags
|
||||
// field and ensures it's the correct type. When it's not, the flag
|
||||
// order has been changed to the newer format, so the flags are updated
|
||||
// accordingly.
|
||||
upf := unsafe.Pointer(uintptr(unsafe.Pointer(&vv)) + offsetFlag)
|
||||
upfv := *(*uintptr)(upf)
|
||||
flagKindMask := uintptr((1<<flagKindWidth - 1) << flagKindShift)
|
||||
if (upfv&flagKindMask)>>flagKindShift != uintptr(reflect.Int) {
|
||||
flagKindShift = 0
|
||||
flagRO = 1 << 5
|
||||
flagIndir = 1 << 6
|
||||
}
|
||||
}
|
||||
|
||||
// unsafeReflectValue converts the passed reflect.Value into a one that bypasses
|
||||
// the typical safety restrictions preventing access to unaddressable and
|
||||
// unexported data. It works by digging the raw pointer to the underlying
|
||||
// value out of the protected value and generating a new unprotected (unsafe)
|
||||
// reflect.Value to it.
|
||||
//
|
||||
// This allows us to check for implementations of the Stringer and error
|
||||
// interfaces to be used for pretty printing ordinarily unaddressable and
|
||||
// inaccessible values such as unexported struct fields.
|
||||
func unsafeReflectValue(v reflect.Value) (rv reflect.Value) {
|
||||
indirects := 1
|
||||
vt := v.Type()
|
||||
upv := unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetPtr)
|
||||
rvf := *(*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetFlag))
|
||||
if rvf&flagIndir != 0 {
|
||||
vt = reflect.PtrTo(v.Type())
|
||||
indirects++
|
||||
} else if offsetScalar != 0 {
|
||||
// The value is in the scalar field when it's not one of the
|
||||
// reference types.
|
||||
switch vt.Kind() {
|
||||
case reflect.Uintptr:
|
||||
case reflect.Chan:
|
||||
case reflect.Func:
|
||||
case reflect.Map:
|
||||
case reflect.Ptr:
|
||||
case reflect.UnsafePointer:
|
||||
default:
|
||||
upv = unsafe.Pointer(uintptr(unsafe.Pointer(&v)) +
|
||||
offsetScalar)
|
||||
}
|
||||
}
|
||||
|
||||
pv := reflect.NewAt(vt, upv)
|
||||
rv = pv
|
||||
for i := 0; i < indirects; i++ {
|
||||
rv = rv.Elem()
|
||||
}
|
||||
return rv
|
||||
}
|
||||
|
||||
// Some constants in the form of bytes to avoid string overhead. This mirrors
|
||||
// the technique used in the fmt package.
|
||||
var (
|
||||
@ -194,9 +86,14 @@ func handleMethods(cs *ConfigState, w io.Writer, v reflect.Value) (handled bool)
|
||||
// We need an interface to check if the type implements the error or
|
||||
// Stringer interface. However, the reflect package won't give us an
|
||||
// interface on certain things like unexported struct fields in order
|
||||
// to enforce visibility rules. We use unsafe to bypass these restrictions
|
||||
// since this package does not mutate the values.
|
||||
// to enforce visibility rules. We use unsafe, when it's available,
|
||||
// to bypass these restrictions since this package does not mutate the
|
||||
// values.
|
||||
if !v.CanInterface() {
|
||||
if UnsafeDisabled {
|
||||
return false
|
||||
}
|
||||
|
||||
v = unsafeReflectValue(v)
|
||||
}
|
||||
|
||||
@ -206,21 +103,15 @@ func handleMethods(cs *ConfigState, w io.Writer, v reflect.Value) (handled bool)
|
||||
// mutate the value, however, types which choose to satisify an error or
|
||||
// Stringer interface with a pointer receiver should not be mutating their
|
||||
// state inside these interface methods.
|
||||
var viface interface{}
|
||||
if !cs.DisablePointerMethods {
|
||||
if !v.CanAddr() {
|
||||
v = unsafeReflectValue(v)
|
||||
}
|
||||
viface = v.Addr().Interface()
|
||||
} else {
|
||||
if v.CanAddr() {
|
||||
v = v.Addr()
|
||||
}
|
||||
viface = v.Interface()
|
||||
if !cs.DisablePointerMethods && !UnsafeDisabled && !v.CanAddr() {
|
||||
v = unsafeReflectValue(v)
|
||||
}
|
||||
if v.CanAddr() {
|
||||
v = v.Addr()
|
||||
}
|
||||
|
||||
// Is it an error or Stringer?
|
||||
switch iface := viface.(type) {
|
||||
switch iface := v.Interface().(type) {
|
||||
case error:
|
||||
defer catchPanic(w, v)
|
||||
if cs.ContinueOnMethod {
|
||||
|
298
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/common_test.go
generated
vendored
@ -1,298 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2013 Dave Collins <dave@davec.name>
|
||||
*
|
||||
* Permission to use, copy, modify, and distribute this software for any
|
||||
* purpose with or without fee is hereby granted, provided that the above
|
||||
* copyright notice and this permission notice appear in all copies.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
|
||||
package spew_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
// custom type to test Stinger interface on non-pointer receiver.
|
||||
type stringer string
|
||||
|
||||
// String implements the Stringer interface for testing invocation of custom
|
||||
// stringers on types with non-pointer receivers.
|
||||
func (s stringer) String() string {
|
||||
return "stringer " + string(s)
|
||||
}
|
||||
|
||||
// custom type to test Stinger interface on pointer receiver.
|
||||
type pstringer string
|
||||
|
||||
// String implements the Stringer interface for testing invocation of custom
|
||||
// stringers on types with only pointer receivers.
|
||||
func (s *pstringer) String() string {
|
||||
return "stringer " + string(*s)
|
||||
}
|
||||
|
||||
// xref1 and xref2 are cross referencing structs for testing circular reference
|
||||
// detection.
|
||||
type xref1 struct {
|
||||
ps2 *xref2
|
||||
}
|
||||
type xref2 struct {
|
||||
ps1 *xref1
|
||||
}
|
||||
|
||||
// indirCir1, indirCir2, and indirCir3 are used to generate an indirect circular
|
||||
// reference for testing detection.
|
||||
type indirCir1 struct {
|
||||
ps2 *indirCir2
|
||||
}
|
||||
type indirCir2 struct {
|
||||
ps3 *indirCir3
|
||||
}
|
||||
type indirCir3 struct {
|
||||
ps1 *indirCir1
|
||||
}
|
||||
|
||||
// embed is used to test embedded structures.
|
||||
type embed struct {
|
||||
a string
|
||||
}
|
||||
|
||||
// embedwrap is used to test embedded structures.
|
||||
type embedwrap struct {
|
||||
*embed
|
||||
e *embed
|
||||
}
|
||||
|
||||
// panicer is used to intentionally cause a panic for testing spew properly
|
||||
// handles them
|
||||
type panicer int
|
||||
|
||||
func (p panicer) String() string {
|
||||
panic("test panic")
|
||||
}
|
||||
|
||||
// customError is used to test custom error interface invocation.
|
||||
type customError int
|
||||
|
||||
func (e customError) Error() string {
|
||||
return fmt.Sprintf("error: %d", int(e))
|
||||
}
|
||||
|
||||
// stringizeWants converts a slice of wanted test output into a format suitable
|
||||
// for a test error message.
|
||||
func stringizeWants(wants []string) string {
|
||||
s := ""
|
||||
for i, want := range wants {
|
||||
if i > 0 {
|
||||
s += fmt.Sprintf("want%d: %s", i+1, want)
|
||||
} else {
|
||||
s += "want: " + want
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// testFailed returns whether or not a test failed by checking if the result
|
||||
// of the test is in the slice of wanted strings.
|
||||
func testFailed(result string, wants []string) bool {
|
||||
for _, want := range wants {
|
||||
if result == want {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
type sortableStruct struct {
|
||||
x int
|
||||
}
|
||||
|
||||
func (ss sortableStruct) String() string {
|
||||
return fmt.Sprintf("ss.%d", ss.x)
|
||||
}
|
||||
|
||||
type unsortableStruct struct {
|
||||
x int
|
||||
}
|
||||
|
||||
type sortTestCase struct {
|
||||
input []reflect.Value
|
||||
expected []reflect.Value
|
||||
}
|
||||
|
||||
func helpTestSortValues(tests []sortTestCase, cs *spew.ConfigState, t *testing.T) {
|
||||
getInterfaces := func(values []reflect.Value) []interface{} {
|
||||
interfaces := []interface{}{}
|
||||
for _, v := range values {
|
||||
interfaces = append(interfaces, v.Interface())
|
||||
}
|
||||
return interfaces
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
spew.SortValues(test.input, cs)
|
||||
// reflect.DeepEqual cannot really make sense of reflect.Value,
|
||||
// probably because of all the pointer tricks. For instance,
|
||||
// v(2.0) != v(2.0) on a 32-bits system. Turn them into interface{}
|
||||
// instead.
|
||||
input := getInterfaces(test.input)
|
||||
expected := getInterfaces(test.expected)
|
||||
if !reflect.DeepEqual(input, expected) {
|
||||
t.Errorf("Sort mismatch:\n %v != %v", input, expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestSortValues ensures the sort functionality for relect.Value based sorting
|
||||
// works as intended.
|
||||
func TestSortValues(t *testing.T) {
|
||||
v := reflect.ValueOf
|
||||
|
||||
a := v("a")
|
||||
b := v("b")
|
||||
c := v("c")
|
||||
embedA := v(embed{"a"})
|
||||
embedB := v(embed{"b"})
|
||||
embedC := v(embed{"c"})
|
||||
tests := []sortTestCase{
|
||||
// No values.
|
||||
{
|
||||
[]reflect.Value{},
|
||||
[]reflect.Value{},
|
||||
},
|
||||
// Bools.
|
||||
{
|
||||
[]reflect.Value{v(false), v(true), v(false)},
|
||||
[]reflect.Value{v(false), v(false), v(true)},
|
||||
},
|
||||
// Ints.
|
||||
{
|
||||
[]reflect.Value{v(2), v(1), v(3)},
|
||||
[]reflect.Value{v(1), v(2), v(3)},
|
||||
},
|
||||
// Uints.
|
||||
{
|
||||
[]reflect.Value{v(uint8(2)), v(uint8(1)), v(uint8(3))},
|
||||
[]reflect.Value{v(uint8(1)), v(uint8(2)), v(uint8(3))},
|
||||
},
|
||||
// Floats.
|
||||
{
|
||||
[]reflect.Value{v(2.0), v(1.0), v(3.0)},
|
||||
[]reflect.Value{v(1.0), v(2.0), v(3.0)},
|
||||
},
|
||||
// Strings.
|
||||
{
|
||||
[]reflect.Value{b, a, c},
|
||||
[]reflect.Value{a, b, c},
|
||||
},
|
||||
// Array
|
||||
{
|
||||
[]reflect.Value{v([3]int{3, 2, 1}), v([3]int{1, 3, 2}), v([3]int{1, 2, 3})},
|
||||
[]reflect.Value{v([3]int{1, 2, 3}), v([3]int{1, 3, 2}), v([3]int{3, 2, 1})},
|
||||
},
|
||||
// Uintptrs.
|
||||
{
|
||||
[]reflect.Value{v(uintptr(2)), v(uintptr(1)), v(uintptr(3))},
|
||||
[]reflect.Value{v(uintptr(1)), v(uintptr(2)), v(uintptr(3))},
|
||||
},
|
||||
// SortableStructs.
|
||||
{
|
||||
// Note: not sorted - DisableMethods is set.
|
||||
[]reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
|
||||
[]reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
|
||||
},
|
||||
// UnsortableStructs.
|
||||
{
|
||||
// Note: not sorted - SpewKeys is false.
|
||||
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
|
||||
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
|
||||
},
|
||||
// Invalid.
|
||||
{
|
||||
[]reflect.Value{embedB, embedA, embedC},
|
||||
[]reflect.Value{embedB, embedA, embedC},
|
||||
},
|
||||
}
|
||||
cs := spew.ConfigState{DisableMethods: true, SpewKeys: false}
|
||||
helpTestSortValues(tests, &cs, t)
|
||||
}
|
||||
|
||||
// TestSortValuesWithMethods ensures the sort functionality for relect.Value
|
||||
// based sorting works as intended when using string methods.
|
||||
func TestSortValuesWithMethods(t *testing.T) {
|
||||
v := reflect.ValueOf
|
||||
|
||||
a := v("a")
|
||||
b := v("b")
|
||||
c := v("c")
|
||||
tests := []sortTestCase{
|
||||
// Ints.
|
||||
{
|
||||
[]reflect.Value{v(2), v(1), v(3)},
|
||||
[]reflect.Value{v(1), v(2), v(3)},
|
||||
},
|
||||
// Strings.
|
||||
{
|
||||
[]reflect.Value{b, a, c},
|
||||
[]reflect.Value{a, b, c},
|
||||
},
|
||||
// SortableStructs.
|
||||
{
|
||||
[]reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
|
||||
[]reflect.Value{v(sortableStruct{1}), v(sortableStruct{2}), v(sortableStruct{3})},
|
||||
},
|
||||
// UnsortableStructs.
|
||||
{
|
||||
// Note: not sorted - SpewKeys is false.
|
||||
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
|
||||
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
|
||||
},
|
||||
}
|
||||
cs := spew.ConfigState{DisableMethods: false, SpewKeys: false}
|
||||
helpTestSortValues(tests, &cs, t)
|
||||
}
|
||||
|
||||
// TestSortValuesWithSpew ensures the sort functionality for relect.Value
|
||||
// based sorting works as intended when using spew to stringify keys.
|
||||
func TestSortValuesWithSpew(t *testing.T) {
|
||||
v := reflect.ValueOf
|
||||
|
||||
a := v("a")
|
||||
b := v("b")
|
||||
c := v("c")
|
||||
tests := []sortTestCase{
|
||||
// Ints.
|
||||
{
|
||||
[]reflect.Value{v(2), v(1), v(3)},
|
||||
[]reflect.Value{v(1), v(2), v(3)},
|
||||
},
|
||||
// Strings.
|
||||
{
|
||||
[]reflect.Value{b, a, c},
|
||||
[]reflect.Value{a, b, c},
|
||||
},
|
||||
// SortableStructs.
|
||||
{
|
||||
[]reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
|
||||
[]reflect.Value{v(sortableStruct{1}), v(sortableStruct{2}), v(sortableStruct{3})},
|
||||
},
|
||||
// UnsortableStructs.
|
||||
{
|
||||
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
|
||||
[]reflect.Value{v(unsortableStruct{1}), v(unsortableStruct{2}), v(unsortableStruct{3})},
|
||||
},
|
||||
}
|
||||
cs := spew.ConfigState{DisableMethods: true, SpewKeys: true}
|
||||
helpTestSortValues(tests, &cs, t)
|
||||
}
|
5
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/config.go
generated
vendored
@ -61,7 +61,10 @@ type ConfigState struct {
|
||||
// with a pointer receiver could technically mutate the value, however,
|
||||
// in practice, types which choose to satisify an error or Stringer
|
||||
// interface with a pointer receiver should not be mutating their state
|
||||
// inside these interface methods.
|
||||
// inside these interface methods. As a result, this option relies on
|
||||
// access to the unsafe package, so it will not have any effect when
|
||||
// running in environments without access to the unsafe package such as
|
||||
// Google App Engine or with the "disableunsafe" build tag specified.
|
||||
DisablePointerMethods bool
|
||||
|
||||
// ContinueOnMethod specifies whether or not recursion should continue once
|
||||
|
29
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/dump.go
generated
vendored
@ -181,25 +181,28 @@ func (d *dumpState) dumpSlice(v reflect.Value) {
|
||||
// Try to use existing uint8 slices and fall back to converting
|
||||
// and copying if that fails.
|
||||
case vt.Kind() == reflect.Uint8:
|
||||
// We need an addressable interface to convert the type back
|
||||
// into a byte slice. However, the reflect package won't give
|
||||
// us an interface on certain things like unexported struct
|
||||
// fields in order to enforce visibility rules. We use unsafe
|
||||
// to bypass these restrictions since this package does not
|
||||
// We need an addressable interface to convert the type
|
||||
// to a byte slice. However, the reflect package won't
|
||||
// give us an interface on certain things like
|
||||
// unexported struct fields in order to enforce
|
||||
// visibility rules. We use unsafe, when available, to
|
||||
// bypass these restrictions since this package does not
|
||||
// mutate the values.
|
||||
vs := v
|
||||
if !vs.CanInterface() || !vs.CanAddr() {
|
||||
vs = unsafeReflectValue(vs)
|
||||
}
|
||||
vs = vs.Slice(0, numEntries)
|
||||
if !UnsafeDisabled {
|
||||
vs = vs.Slice(0, numEntries)
|
||||
|
||||
// Use the existing uint8 slice if it can be type
|
||||
// asserted.
|
||||
iface := vs.Interface()
|
||||
if slice, ok := iface.([]uint8); ok {
|
||||
buf = slice
|
||||
doHexDump = true
|
||||
break
|
||||
// Use the existing uint8 slice if it can be
|
||||
// type asserted.
|
||||
iface := vs.Interface()
|
||||
if slice, ok := iface.([]uint8); ok {
|
||||
buf = slice
|
||||
doHexDump = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// The underlying data needs to be converted if it can't
|
||||
|
1021
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/dump_test.go
generated
vendored
97
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/dumpcgo_test.go
generated
vendored
@ -1,97 +0,0 @@
|
||||
// Copyright (c) 2013 Dave Collins <dave@davec.name>
|
||||
//
|
||||
// Permission to use, copy, modify, and distribute this software for any
|
||||
// purpose with or without fee is hereby granted, provided that the above
|
||||
// copyright notice and this permission notice appear in all copies.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
// NOTE: Due to the following build constraints, this file will only be compiled
|
||||
// when both cgo is supported and "-tags testcgo" is added to the go test
|
||||
// command line. This means the cgo tests are only added (and hence run) when
|
||||
// specifially requested. This configuration is used because spew itself
|
||||
// does not require cgo to run even though it does handle certain cgo types
|
||||
// specially. Rather than forcing all clients to require cgo and an external
|
||||
// C compiler just to run the tests, this scheme makes them optional.
|
||||
// +build cgo,testcgo
|
||||
|
||||
package spew_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/davecgh/go-spew/spew/testdata"
|
||||
)
|
||||
|
||||
func addCgoDumpTests() {
|
||||
// C char pointer.
|
||||
v := testdata.GetCgoCharPointer()
|
||||
nv := testdata.GetCgoNullCharPointer()
|
||||
pv := &v
|
||||
vcAddr := fmt.Sprintf("%p", v)
|
||||
vAddr := fmt.Sprintf("%p", pv)
|
||||
pvAddr := fmt.Sprintf("%p", &pv)
|
||||
vt := "*testdata._Ctype_char"
|
||||
vs := "116"
|
||||
addDumpTest(v, "("+vt+")("+vcAddr+")("+vs+")\n")
|
||||
addDumpTest(pv, "(*"+vt+")("+vAddr+"->"+vcAddr+")("+vs+")\n")
|
||||
addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+"->"+vcAddr+")("+vs+")\n")
|
||||
addDumpTest(nv, "("+vt+")(<nil>)\n")
|
||||
|
||||
// C char array.
|
||||
v2, v2l, v2c := testdata.GetCgoCharArray()
|
||||
v2Len := fmt.Sprintf("%d", v2l)
|
||||
v2Cap := fmt.Sprintf("%d", v2c)
|
||||
v2t := "[6]testdata._Ctype_char"
|
||||
v2s := "(len=" + v2Len + " cap=" + v2Cap + ") " +
|
||||
"{\n 00000000 74 65 73 74 32 00 " +
|
||||
" |test2.|\n}"
|
||||
addDumpTest(v2, "("+v2t+") "+v2s+"\n")
|
||||
|
||||
// C unsigned char array.
|
||||
v3, v3l, v3c := testdata.GetCgoUnsignedCharArray()
|
||||
v3Len := fmt.Sprintf("%d", v3l)
|
||||
v3Cap := fmt.Sprintf("%d", v3c)
|
||||
v3t := "[6]testdata._Ctype_unsignedchar"
|
||||
v3s := "(len=" + v3Len + " cap=" + v3Cap + ") " +
|
||||
"{\n 00000000 74 65 73 74 33 00 " +
|
||||
" |test3.|\n}"
|
||||
addDumpTest(v3, "("+v3t+") "+v3s+"\n")
|
||||
|
||||
// C signed char array.
|
||||
v4, v4l, v4c := testdata.GetCgoSignedCharArray()
|
||||
v4Len := fmt.Sprintf("%d", v4l)
|
||||
v4Cap := fmt.Sprintf("%d", v4c)
|
||||
v4t := "[6]testdata._Ctype_schar"
|
||||
v4t2 := "testdata._Ctype_schar"
|
||||
v4s := "(len=" + v4Len + " cap=" + v4Cap + ") " +
|
||||
"{\n (" + v4t2 + ") 116,\n (" + v4t2 + ") 101,\n (" + v4t2 +
|
||||
") 115,\n (" + v4t2 + ") 116,\n (" + v4t2 + ") 52,\n (" + v4t2 +
|
||||
") 0\n}"
|
||||
addDumpTest(v4, "("+v4t+") "+v4s+"\n")
|
||||
|
||||
// C uint8_t array.
|
||||
v5, v5l, v5c := testdata.GetCgoUint8tArray()
|
||||
v5Len := fmt.Sprintf("%d", v5l)
|
||||
v5Cap := fmt.Sprintf("%d", v5c)
|
||||
v5t := "[6]testdata._Ctype_uint8_t"
|
||||
v5s := "(len=" + v5Len + " cap=" + v5Cap + ") " +
|
||||
"{\n 00000000 74 65 73 74 35 00 " +
|
||||
" |test5.|\n}"
|
||||
addDumpTest(v5, "("+v5t+") "+v5s+"\n")
|
||||
|
||||
// C typedefed unsigned char array.
|
||||
v6, v6l, v6c := testdata.GetCgoTypdefedUnsignedCharArray()
|
||||
v6Len := fmt.Sprintf("%d", v6l)
|
||||
v6Cap := fmt.Sprintf("%d", v6c)
|
||||
v6t := "[6]testdata._Ctype_custom_uchar_t"
|
||||
v6s := "(len=" + v6Len + " cap=" + v6Cap + ") " +
|
||||
"{\n 00000000 74 65 73 74 36 00 " +
|
||||
" |test6.|\n}"
|
||||
addDumpTest(v6, "("+v6t+") "+v6s+"\n")
|
||||
}
|
26
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/dumpnocgo_test.go
generated
vendored
@ -1,26 +0,0 @@
|
||||
// Copyright (c) 2013 Dave Collins <dave@davec.name>
|
||||
//
|
||||
// Permission to use, copy, modify, and distribute this software for any
|
||||
// purpose with or without fee is hereby granted, provided that the above
|
||||
// copyright notice and this permission notice appear in all copies.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
// NOTE: Due to the following build constraints, this file will only be compiled
|
||||
// when either cgo is not supported or "-tags testcgo" is not added to the go
|
||||
// test command line. This file intentionally does not setup any cgo tests in
|
||||
// this scenario.
|
||||
// +build !cgo !testcgo
|
||||
|
||||
package spew_test
|
||||
|
||||
func addCgoDumpTests() {
|
||||
// Don't add any tests for cgo since this file is only compiled when
|
||||
// there should not be any cgo tests.
|
||||
}
|
230
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/example_test.go
generated
vendored
@ -1,230 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2013 Dave Collins <dave@davec.name>
|
||||
*
|
||||
* Permission to use, copy, modify, and distribute this software for any
|
||||
* purpose with or without fee is hereby granted, provided that the above
|
||||
* copyright notice and this permission notice appear in all copies.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
|
||||
package spew_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
type Flag int
|
||||
|
||||
const (
|
||||
flagOne Flag = iota
|
||||
flagTwo
|
||||
)
|
||||
|
||||
var flagStrings = map[Flag]string{
|
||||
flagOne: "flagOne",
|
||||
flagTwo: "flagTwo",
|
||||
}
|
||||
|
||||
func (f Flag) String() string {
|
||||
if s, ok := flagStrings[f]; ok {
|
||||
return s
|
||||
}
|
||||
return fmt.Sprintf("Unknown flag (%d)", int(f))
|
||||
}
|
||||
|
||||
type Bar struct {
|
||||
flag Flag
|
||||
data uintptr
|
||||
}
|
||||
|
||||
type Foo struct {
|
||||
unexportedField Bar
|
||||
ExportedField map[interface{}]interface{}
|
||||
}
|
||||
|
||||
// This example demonstrates how to use Dump to dump variables to stdout.
|
||||
func ExampleDump() {
|
||||
// The following package level declarations are assumed for this example:
|
||||
/*
|
||||
type Flag int
|
||||
|
||||
const (
|
||||
flagOne Flag = iota
|
||||
flagTwo
|
||||
)
|
||||
|
||||
var flagStrings = map[Flag]string{
|
||||
flagOne: "flagOne",
|
||||
flagTwo: "flagTwo",
|
||||
}
|
||||
|
||||
func (f Flag) String() string {
|
||||
if s, ok := flagStrings[f]; ok {
|
||||
return s
|
||||
}
|
||||
return fmt.Sprintf("Unknown flag (%d)", int(f))
|
||||
}
|
||||
|
||||
type Bar struct {
|
||||
flag Flag
|
||||
data uintptr
|
||||
}
|
||||
|
||||
type Foo struct {
|
||||
unexportedField Bar
|
||||
ExportedField map[interface{}]interface{}
|
||||
}
|
||||
*/
|
||||
|
||||
// Setup some sample data structures for the example.
|
||||
bar := Bar{Flag(flagTwo), uintptr(0)}
|
||||
s1 := Foo{bar, map[interface{}]interface{}{"one": true}}
|
||||
f := Flag(5)
|
||||
b := []byte{
|
||||
0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
|
||||
0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
|
||||
0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
|
||||
0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30,
|
||||
0x31, 0x32,
|
||||
}
|
||||
|
||||
// Dump!
|
||||
spew.Dump(s1, f, b)
|
||||
|
||||
// Output:
|
||||
// (spew_test.Foo) {
|
||||
// unexportedField: (spew_test.Bar) {
|
||||
// flag: (spew_test.Flag) flagTwo,
|
||||
// data: (uintptr) <nil>
|
||||
// },
|
||||
// ExportedField: (map[interface {}]interface {}) (len=1) {
|
||||
// (string) (len=3) "one": (bool) true
|
||||
// }
|
||||
// }
|
||||
// (spew_test.Flag) Unknown flag (5)
|
||||
// ([]uint8) (len=34 cap=34) {
|
||||
// 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... |
|
||||
// 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0|
|
||||
// 00000020 31 32 |12|
|
||||
// }
|
||||
//
|
||||
}
|
||||
|
||||
// This example demonstrates how to use Printf to display a variable with a
|
||||
// format string and inline formatting.
|
||||
func ExamplePrintf() {
|
||||
// Create a double pointer to a uint 8.
|
||||
ui8 := uint8(5)
|
||||
pui8 := &ui8
|
||||
ppui8 := &pui8
|
||||
|
||||
// Create a circular data type.
|
||||
type circular struct {
|
||||
ui8 uint8
|
||||
c *circular
|
||||
}
|
||||
c := circular{ui8: 1}
|
||||
c.c = &c
|
||||
|
||||
// Print!
|
||||
spew.Printf("ppui8: %v\n", ppui8)
|
||||
spew.Printf("circular: %v\n", c)
|
||||
|
||||
// Output:
|
||||
// ppui8: <**>5
|
||||
// circular: {1 <*>{1 <*><shown>}}
|
||||
}
|
||||
|
||||
// This example demonstrates how to use a ConfigState.
|
||||
func ExampleConfigState() {
|
||||
// Modify the indent level of the ConfigState only. The global
|
||||
// configuration is not modified.
|
||||
scs := spew.ConfigState{Indent: "\t"}
|
||||
|
||||
// Output using the ConfigState instance.
|
||||
v := map[string]int{"one": 1}
|
||||
scs.Printf("v: %v\n", v)
|
||||
scs.Dump(v)
|
||||
|
||||
// Output:
|
||||
// v: map[one:1]
|
||||
// (map[string]int) (len=1) {
|
||||
// (string) (len=3) "one": (int) 1
|
||||
// }
|
||||
}
|
||||
|
||||
// This example demonstrates how to use ConfigState.Dump to dump variables to
|
||||
// stdout
|
||||
func ExampleConfigState_Dump() {
|
||||
// See the top-level Dump example for details on the types used in this
|
||||
// example.
|
||||
|
||||
// Create two ConfigState instances with different indentation.
|
||||
scs := spew.ConfigState{Indent: "\t"}
|
||||
scs2 := spew.ConfigState{Indent: " "}
|
||||
|
||||
// Setup some sample data structures for the example.
|
||||
bar := Bar{Flag(flagTwo), uintptr(0)}
|
||||
s1 := Foo{bar, map[interface{}]interface{}{"one": true}}
|
||||
|
||||
// Dump using the ConfigState instances.
|
||||
scs.Dump(s1)
|
||||
scs2.Dump(s1)
|
||||
|
||||
// Output:
|
||||
// (spew_test.Foo) {
|
||||
// unexportedField: (spew_test.Bar) {
|
||||
// flag: (spew_test.Flag) flagTwo,
|
||||
// data: (uintptr) <nil>
|
||||
// },
|
||||
// ExportedField: (map[interface {}]interface {}) (len=1) {
|
||||
// (string) (len=3) "one": (bool) true
|
||||
// }
|
||||
// }
|
||||
// (spew_test.Foo) {
|
||||
// unexportedField: (spew_test.Bar) {
|
||||
// flag: (spew_test.Flag) flagTwo,
|
||||
// data: (uintptr) <nil>
|
||||
// },
|
||||
// ExportedField: (map[interface {}]interface {}) (len=1) {
|
||||
// (string) (len=3) "one": (bool) true
|
||||
// }
|
||||
// }
|
||||
//
|
||||
}
|
||||
|
||||
// This example demonstrates how to use ConfigState.Printf to display a variable
|
||||
// with a format string and inline formatting.
|
||||
func ExampleConfigState_Printf() {
|
||||
// See the top-level Dump example for details on the types used in this
|
||||
// example.
|
||||
|
||||
// Create two ConfigState instances and modify the method handling of the
|
||||
// first ConfigState only.
|
||||
scs := spew.NewDefaultConfig()
|
||||
scs2 := spew.NewDefaultConfig()
|
||||
scs.DisableMethods = true
|
||||
|
||||
// Alternatively
|
||||
// scs := spew.ConfigState{Indent: " ", DisableMethods: true}
|
||||
// scs2 := spew.ConfigState{Indent: " "}
|
||||
|
||||
// This is of type Flag which implements a Stringer and has raw value 1.
|
||||
f := flagTwo
|
||||
|
||||
// Dump using the ConfigState instances.
|
||||
scs.Printf("f: %v\n", f)
|
||||
scs2.Printf("f: %v\n", f)
|
||||
|
||||
// Output:
|
||||
// f: 1
|
||||
// f: flagTwo
|
||||
}
|
1535
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/format_test.go
generated
vendored
156
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/internal_test.go
generated
vendored
@ -1,156 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2013 Dave Collins <dave@davec.name>
|
||||
*
|
||||
* Permission to use, copy, modify, and distribute this software for any
|
||||
* purpose with or without fee is hereby granted, provided that the above
|
||||
* copyright notice and this permission notice appear in all copies.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
|
||||
/*
|
||||
This test file is part of the spew package rather than than the spew_test
|
||||
package because it needs access to internals to properly test certain cases
|
||||
which are not possible via the public interface since they should never happen.
|
||||
*/
|
||||
|
||||
package spew
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"reflect"
|
||||
"testing"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// dummyFmtState implements a fake fmt.State to use for testing invalid
|
||||
// reflect.Value handling. This is necessary because the fmt package catches
|
||||
// invalid values before invoking the formatter on them.
|
||||
type dummyFmtState struct {
|
||||
bytes.Buffer
|
||||
}
|
||||
|
||||
func (dfs *dummyFmtState) Flag(f int) bool {
|
||||
if f == int('+') {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (dfs *dummyFmtState) Precision() (int, bool) {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func (dfs *dummyFmtState) Width() (int, bool) {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// TestInvalidReflectValue ensures the dump and formatter code handles an
|
||||
// invalid reflect value properly. This needs access to internal state since it
|
||||
// should never happen in real code and therefore can't be tested via the public
|
||||
// API.
|
||||
func TestInvalidReflectValue(t *testing.T) {
|
||||
i := 1
|
||||
|
||||
// Dump invalid reflect value.
|
||||
v := new(reflect.Value)
|
||||
buf := new(bytes.Buffer)
|
||||
d := dumpState{w: buf, cs: &Config}
|
||||
d.dump(*v)
|
||||
s := buf.String()
|
||||
want := "<invalid>"
|
||||
if s != want {
|
||||
t.Errorf("InvalidReflectValue #%d\n got: %s want: %s", i, s, want)
|
||||
}
|
||||
i++
|
||||
|
||||
// Formatter invalid reflect value.
|
||||
buf2 := new(dummyFmtState)
|
||||
f := formatState{value: *v, cs: &Config, fs: buf2}
|
||||
f.format(*v)
|
||||
s = buf2.String()
|
||||
want = "<invalid>"
|
||||
if s != want {
|
||||
t.Errorf("InvalidReflectValue #%d got: %s want: %s", i, s, want)
|
||||
}
|
||||
}
|
||||
|
||||
// changeKind uses unsafe to intentionally change the kind of a reflect.Value to
|
||||
// the maximum kind value which does not exist. This is needed to test the
|
||||
// fallback code which punts to the standard fmt library for new types that
|
||||
// might get added to the language.
|
||||
func changeKind(v *reflect.Value, readOnly bool) {
|
||||
rvf := (*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(v)) + offsetFlag))
|
||||
*rvf = *rvf | ((1<<flagKindWidth - 1) << flagKindShift)
|
||||
if readOnly {
|
||||
*rvf |= flagRO
|
||||
} else {
|
||||
*rvf &= ^uintptr(flagRO)
|
||||
}
|
||||
}
|
||||
|
||||
// TestAddedReflectValue tests functionaly of the dump and formatter code which
|
||||
// falls back to the standard fmt library for new types that might get added to
|
||||
// the language.
|
||||
func TestAddedReflectValue(t *testing.T) {
|
||||
i := 1
|
||||
|
||||
// Dump using a reflect.Value that is exported.
|
||||
v := reflect.ValueOf(int8(5))
|
||||
changeKind(&v, false)
|
||||
buf := new(bytes.Buffer)
|
||||
d := dumpState{w: buf, cs: &Config}
|
||||
d.dump(v)
|
||||
s := buf.String()
|
||||
want := "(int8) 5"
|
||||
if s != want {
|
||||
t.Errorf("TestAddedReflectValue #%d\n got: %s want: %s", i, s, want)
|
||||
}
|
||||
i++
|
||||
|
||||
// Dump using a reflect.Value that is not exported.
|
||||
changeKind(&v, true)
|
||||
buf.Reset()
|
||||
d.dump(v)
|
||||
s = buf.String()
|
||||
want = "(int8) <int8 Value>"
|
||||
if s != want {
|
||||
t.Errorf("TestAddedReflectValue #%d\n got: %s want: %s", i, s, want)
|
||||
}
|
||||
i++
|
||||
|
||||
// Formatter using a reflect.Value that is exported.
|
||||
changeKind(&v, false)
|
||||
buf2 := new(dummyFmtState)
|
||||
f := formatState{value: v, cs: &Config, fs: buf2}
|
||||
f.format(v)
|
||||
s = buf2.String()
|
||||
want = "5"
|
||||
if s != want {
|
||||
t.Errorf("TestAddedReflectValue #%d got: %s want: %s", i, s, want)
|
||||
}
|
||||
i++
|
||||
|
||||
// Formatter using a reflect.Value that is not exported.
|
||||
changeKind(&v, true)
|
||||
buf2.Reset()
|
||||
f = formatState{value: v, cs: &Config, fs: buf2}
|
||||
f.format(v)
|
||||
s = buf2.String()
|
||||
want = "<int8 Value>"
|
||||
if s != want {
|
||||
t.Errorf("TestAddedReflectValue #%d got: %s want: %s", i, s, want)
|
||||
}
|
||||
}
|
||||
|
||||
// SortValues makes the internal sortValues function available to the test
|
||||
// package.
|
||||
func SortValues(values []reflect.Value, cs *ConfigState) {
|
||||
sortValues(values, cs)
|
||||
}
|
308
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/spew_test.go
generated
vendored
@ -1,308 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2013 Dave Collins <dave@davec.name>
|
||||
*
|
||||
* Permission to use, copy, modify, and distribute this software for any
|
||||
* purpose with or without fee is hereby granted, provided that the above
|
||||
* copyright notice and this permission notice appear in all copies.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
|
||||
package spew_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// spewFunc is used to identify which public function of the spew package or
|
||||
// ConfigState a test applies to.
|
||||
type spewFunc int
|
||||
|
||||
const (
|
||||
fCSFdump spewFunc = iota
|
||||
fCSFprint
|
||||
fCSFprintf
|
||||
fCSFprintln
|
||||
fCSPrint
|
||||
fCSPrintln
|
||||
fCSSdump
|
||||
fCSSprint
|
||||
fCSSprintf
|
||||
fCSSprintln
|
||||
fCSErrorf
|
||||
fCSNewFormatter
|
||||
fErrorf
|
||||
fFprint
|
||||
fFprintln
|
||||
fPrint
|
||||
fPrintln
|
||||
fSdump
|
||||
fSprint
|
||||
fSprintf
|
||||
fSprintln
|
||||
)
|
||||
|
||||
// Map of spewFunc values to names for pretty printing.
|
||||
var spewFuncStrings = map[spewFunc]string{
|
||||
fCSFdump: "ConfigState.Fdump",
|
||||
fCSFprint: "ConfigState.Fprint",
|
||||
fCSFprintf: "ConfigState.Fprintf",
|
||||
fCSFprintln: "ConfigState.Fprintln",
|
||||
fCSSdump: "ConfigState.Sdump",
|
||||
fCSPrint: "ConfigState.Print",
|
||||
fCSPrintln: "ConfigState.Println",
|
||||
fCSSprint: "ConfigState.Sprint",
|
||||
fCSSprintf: "ConfigState.Sprintf",
|
||||
fCSSprintln: "ConfigState.Sprintln",
|
||||
fCSErrorf: "ConfigState.Errorf",
|
||||
fCSNewFormatter: "ConfigState.NewFormatter",
|
||||
fErrorf: "spew.Errorf",
|
||||
fFprint: "spew.Fprint",
|
||||
fFprintln: "spew.Fprintln",
|
||||
fPrint: "spew.Print",
|
||||
fPrintln: "spew.Println",
|
||||
fSdump: "spew.Sdump",
|
||||
fSprint: "spew.Sprint",
|
||||
fSprintf: "spew.Sprintf",
|
||||
fSprintln: "spew.Sprintln",
|
||||
}
|
||||
|
||||
func (f spewFunc) String() string {
|
||||
if s, ok := spewFuncStrings[f]; ok {
|
||||
return s
|
||||
}
|
||||
return fmt.Sprintf("Unknown spewFunc (%d)", int(f))
|
||||
}
|
||||
|
||||
// spewTest is used to describe a test to be performed against the public
|
||||
// functions of the spew package or ConfigState.
|
||||
type spewTest struct {
|
||||
cs *spew.ConfigState
|
||||
f spewFunc
|
||||
format string
|
||||
in interface{}
|
||||
want string
|
||||
}
|
||||
|
||||
// spewTests houses the tests to be performed against the public functions of
|
||||
// the spew package and ConfigState.
|
||||
//
|
||||
// These tests are only intended to ensure the public functions are exercised
|
||||
// and are intentionally not exhaustive of types. The exhaustive type
|
||||
// tests are handled in the dump and format tests.
|
||||
var spewTests []spewTest
|
||||
|
||||
// redirStdout is a helper function to return the standard output from f as a
|
||||
// byte slice.
|
||||
func redirStdout(f func()) ([]byte, error) {
|
||||
tempFile, err := ioutil.TempFile("", "ss-test")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fileName := tempFile.Name()
|
||||
defer os.Remove(fileName) // Ignore error
|
||||
|
||||
origStdout := os.Stdout
|
||||
os.Stdout = tempFile
|
||||
f()
|
||||
os.Stdout = origStdout
|
||||
tempFile.Close()
|
||||
|
||||
return ioutil.ReadFile(fileName)
|
||||
}
|
||||
|
||||
func initSpewTests() {
|
||||
// Config states with various settings.
|
||||
scsDefault := spew.NewDefaultConfig()
|
||||
scsNoMethods := &spew.ConfigState{Indent: " ", DisableMethods: true}
|
||||
scsNoPmethods := &spew.ConfigState{Indent: " ", DisablePointerMethods: true}
|
||||
scsMaxDepth := &spew.ConfigState{Indent: " ", MaxDepth: 1}
|
||||
scsContinue := &spew.ConfigState{Indent: " ", ContinueOnMethod: true}
|
||||
|
||||
// Variables for tests on types which implement Stringer interface with and
|
||||
// without a pointer receiver.
|
||||
ts := stringer("test")
|
||||
tps := pstringer("test")
|
||||
|
||||
// depthTester is used to test max depth handling for structs, array, slices
|
||||
// and maps.
|
||||
type depthTester struct {
|
||||
ic indirCir1
|
||||
arr [1]string
|
||||
slice []string
|
||||
m map[string]int
|
||||
}
|
||||
dt := depthTester{indirCir1{nil}, [1]string{"arr"}, []string{"slice"},
|
||||
map[string]int{"one": 1}}
|
||||
|
||||
// Variable for tests on types which implement error interface.
|
||||
te := customError(10)
|
||||
|
||||
spewTests = []spewTest{
|
||||
{scsDefault, fCSFdump, "", int8(127), "(int8) 127\n"},
|
||||
{scsDefault, fCSFprint, "", int16(32767), "32767"},
|
||||
{scsDefault, fCSFprintf, "%v", int32(2147483647), "2147483647"},
|
||||
{scsDefault, fCSFprintln, "", int(2147483647), "2147483647\n"},
|
||||
{scsDefault, fCSPrint, "", int64(9223372036854775807), "9223372036854775807"},
|
||||
{scsDefault, fCSPrintln, "", uint8(255), "255\n"},
|
||||
{scsDefault, fCSSdump, "", uint8(64), "(uint8) 64\n"},
|
||||
{scsDefault, fCSSprint, "", complex(1, 2), "(1+2i)"},
|
||||
{scsDefault, fCSSprintf, "%v", complex(float32(3), 4), "(3+4i)"},
|
||||
{scsDefault, fCSSprintln, "", complex(float64(5), 6), "(5+6i)\n"},
|
||||
{scsDefault, fCSErrorf, "%#v", uint16(65535), "(uint16)65535"},
|
||||
{scsDefault, fCSNewFormatter, "%v", uint32(4294967295), "4294967295"},
|
||||
{scsDefault, fErrorf, "%v", uint64(18446744073709551615), "18446744073709551615"},
|
||||
{scsDefault, fFprint, "", float32(3.14), "3.14"},
|
||||
{scsDefault, fFprintln, "", float64(6.28), "6.28\n"},
|
||||
{scsDefault, fPrint, "", true, "true"},
|
||||
{scsDefault, fPrintln, "", false, "false\n"},
|
||||
{scsDefault, fSdump, "", complex(-10, -20), "(complex128) (-10-20i)\n"},
|
||||
{scsDefault, fSprint, "", complex(-1, -2), "(-1-2i)"},
|
||||
{scsDefault, fSprintf, "%v", complex(float32(-3), -4), "(-3-4i)"},
|
||||
{scsDefault, fSprintln, "", complex(float64(-5), -6), "(-5-6i)\n"},
|
||||
{scsNoMethods, fCSFprint, "", ts, "test"},
|
||||
{scsNoMethods, fCSFprint, "", &ts, "<*>test"},
|
||||
{scsNoMethods, fCSFprint, "", tps, "test"},
|
||||
{scsNoMethods, fCSFprint, "", &tps, "<*>test"},
|
||||
{scsNoPmethods, fCSFprint, "", ts, "stringer test"},
|
||||
{scsNoPmethods, fCSFprint, "", &ts, "<*>stringer test"},
|
||||
{scsNoPmethods, fCSFprint, "", tps, "test"},
|
||||
{scsNoPmethods, fCSFprint, "", &tps, "<*>stringer test"},
|
||||
{scsMaxDepth, fCSFprint, "", dt, "{{<max>} [<max>] [<max>] map[<max>]}"},
|
||||
{scsMaxDepth, fCSFdump, "", dt, "(spew_test.depthTester) {\n" +
|
||||
" ic: (spew_test.indirCir1) {\n <max depth reached>\n },\n" +
|
||||
" arr: ([1]string) (len=1 cap=1) {\n <max depth reached>\n },\n" +
|
||||
" slice: ([]string) (len=1 cap=1) {\n <max depth reached>\n },\n" +
|
||||
" m: (map[string]int) (len=1) {\n <max depth reached>\n }\n}\n"},
|
||||
{scsContinue, fCSFprint, "", ts, "(stringer test) test"},
|
||||
{scsContinue, fCSFdump, "", ts, "(spew_test.stringer) " +
|
||||
"(len=4) (stringer test) \"test\"\n"},
|
||||
{scsContinue, fCSFprint, "", te, "(error: 10) 10"},
|
||||
{scsContinue, fCSFdump, "", te, "(spew_test.customError) " +
|
||||
"(error: 10) 10\n"},
|
||||
}
|
||||
}
|
||||
|
||||
// TestSpew executes all of the tests described by spewTests.
|
||||
func TestSpew(t *testing.T) {
|
||||
initSpewTests()
|
||||
|
||||
t.Logf("Running %d tests", len(spewTests))
|
||||
for i, test := range spewTests {
|
||||
buf := new(bytes.Buffer)
|
||||
switch test.f {
|
||||
case fCSFdump:
|
||||
test.cs.Fdump(buf, test.in)
|
||||
|
||||
case fCSFprint:
|
||||
test.cs.Fprint(buf, test.in)
|
||||
|
||||
case fCSFprintf:
|
||||
test.cs.Fprintf(buf, test.format, test.in)
|
||||
|
||||
case fCSFprintln:
|
||||
test.cs.Fprintln(buf, test.in)
|
||||
|
||||
case fCSPrint:
|
||||
b, err := redirStdout(func() { test.cs.Print(test.in) })
|
||||
if err != nil {
|
||||
t.Errorf("%v #%d %v", test.f, i, err)
|
||||
continue
|
||||
}
|
||||
buf.Write(b)
|
||||
|
||||
case fCSPrintln:
|
||||
b, err := redirStdout(func() { test.cs.Println(test.in) })
|
||||
if err != nil {
|
||||
t.Errorf("%v #%d %v", test.f, i, err)
|
||||
continue
|
||||
}
|
||||
buf.Write(b)
|
||||
|
||||
case fCSSdump:
|
||||
str := test.cs.Sdump(test.in)
|
||||
buf.WriteString(str)
|
||||
|
||||
case fCSSprint:
|
||||
str := test.cs.Sprint(test.in)
|
||||
buf.WriteString(str)
|
||||
|
||||
case fCSSprintf:
|
||||
str := test.cs.Sprintf(test.format, test.in)
|
||||
buf.WriteString(str)
|
||||
|
||||
case fCSSprintln:
|
||||
str := test.cs.Sprintln(test.in)
|
||||
buf.WriteString(str)
|
||||
|
||||
case fCSErrorf:
|
||||
err := test.cs.Errorf(test.format, test.in)
|
||||
buf.WriteString(err.Error())
|
||||
|
||||
case fCSNewFormatter:
|
||||
fmt.Fprintf(buf, test.format, test.cs.NewFormatter(test.in))
|
||||
|
||||
case fErrorf:
|
||||
err := spew.Errorf(test.format, test.in)
|
||||
buf.WriteString(err.Error())
|
||||
|
||||
case fFprint:
|
||||
spew.Fprint(buf, test.in)
|
||||
|
||||
case fFprintln:
|
||||
spew.Fprintln(buf, test.in)
|
||||
|
||||
case fPrint:
|
||||
b, err := redirStdout(func() { spew.Print(test.in) })
|
||||
if err != nil {
|
||||
t.Errorf("%v #%d %v", test.f, i, err)
|
||||
continue
|
||||
}
|
||||
buf.Write(b)
|
||||
|
||||
case fPrintln:
|
||||
b, err := redirStdout(func() { spew.Println(test.in) })
|
||||
if err != nil {
|
||||
t.Errorf("%v #%d %v", test.f, i, err)
|
||||
continue
|
||||
}
|
||||
buf.Write(b)
|
||||
|
||||
case fSdump:
|
||||
str := spew.Sdump(test.in)
|
||||
buf.WriteString(str)
|
||||
|
||||
case fSprint:
|
||||
str := spew.Sprint(test.in)
|
||||
buf.WriteString(str)
|
||||
|
||||
case fSprintf:
|
||||
str := spew.Sprintf(test.format, test.in)
|
||||
buf.WriteString(str)
|
||||
|
||||
case fSprintln:
|
||||
str := spew.Sprintln(test.in)
|
||||
buf.WriteString(str)
|
||||
|
||||
default:
|
||||
t.Errorf("%v #%d unrecognized function", test.f, i)
|
||||
continue
|
||||
}
|
||||
s := buf.String()
|
||||
if test.want != s {
|
||||
t.Errorf("ConfigState #%d\n got: %s want: %s", i, s, test.want)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
82
Godeps/_workspace/src/github.com/davecgh/go-spew/spew/testdata/dumpcgo.go
generated
vendored
@ -1,82 +0,0 @@
|
||||
// Copyright (c) 2013 Dave Collins <dave@davec.name>
|
||||
//
|
||||
// Permission to use, copy, modify, and distribute this software for any
|
||||
// purpose with or without fee is hereby granted, provided that the above
|
||||
// copyright notice and this permission notice appear in all copies.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
// NOTE: Due to the following build constraints, this file will only be compiled
|
||||
// when both cgo is supported and "-tags testcgo" is added to the go test
|
||||
// command line. This code should really only be in the dumpcgo_test.go file,
|
||||
// but unfortunately Go will not allow cgo in test files, so this is a
|
||||
// workaround to allow cgo types to be tested. This configuration is used
|
||||
// because spew itself does not require cgo to run even though it does handle
|
||||
// certain cgo types specially. Rather than forcing all clients to require cgo
|
||||
// and an external C compiler just to run the tests, this scheme makes them
|
||||
// optional.
|
||||
// +build cgo,testcgo
|
||||
|
||||
package testdata
|
||||
|
||||
/*
|
||||
#include <stdint.h>
|
||||
typedef unsigned char custom_uchar_t;
|
||||
|
||||
char *ncp = 0;
|
||||
char *cp = "test";
|
||||
char ca[6] = {'t', 'e', 's', 't', '2', '\0'};
|
||||
unsigned char uca[6] = {'t', 'e', 's', 't', '3', '\0'};
|
||||
signed char sca[6] = {'t', 'e', 's', 't', '4', '\0'};
|
||||
uint8_t ui8ta[6] = {'t', 'e', 's', 't', '5', '\0'};
|
||||
custom_uchar_t tuca[6] = {'t', 'e', 's', 't', '6', '\0'};
|
||||
*/
|
||||
import "C"
|
||||
|
||||
// GetCgoNullCharPointer returns a null char pointer via cgo. This is only
|
||||
// used for tests.
|
||||
func GetCgoNullCharPointer() interface{} {
|
||||
return C.ncp
|
||||
}
|
||||
|
||||
// GetCgoCharPointer returns a char pointer via cgo. This is only used for
|
||||
// tests.
|
||||
func GetCgoCharPointer() interface{} {
|
||||
return C.cp
|
||||
}
|
||||
|
||||
// GetCgoCharArray returns a char array via cgo and the array's len and cap.
|
||||
// This is only used for tests.
|
||||
func GetCgoCharArray() (interface{}, int, int) {
|
||||
return C.ca, len(C.ca), cap(C.ca)
|
||||
}
|
||||
|
||||
// GetCgoUnsignedCharArray returns an unsigned char array via cgo and the
|
||||
// array's len and cap. This is only used for tests.
|
||||
func GetCgoUnsignedCharArray() (interface{}, int, int) {
|
||||
return C.uca, len(C.uca), cap(C.uca)
|
||||
}
|
||||
|
||||
// GetCgoSignedCharArray returns a signed char array via cgo and the array's len
|
||||
// and cap. This is only used for tests.
|
||||
func GetCgoSignedCharArray() (interface{}, int, int) {
|
||||
return C.sca, len(C.sca), cap(C.sca)
|
||||
}
|
||||
|
||||
// GetCgoUint8tArray returns a uint8_t array via cgo and the array's len and
|
||||
// cap. This is only used for tests.
|
||||
func GetCgoUint8tArray() (interface{}, int, int) {
|
||||
return C.ui8ta, len(C.ui8ta), cap(C.ui8ta)
|
||||
}
|
||||
|
||||
// GetCgoTypdefedUnsignedCharArray returns a typedefed unsigned char array via
|
||||
// cgo and the array's len and cap. This is only used for tests.
|
||||
func GetCgoTypdefedUnsignedCharArray() (interface{}, int, int) {
|
||||
return C.tuca, len(C.tuca), cap(C.tuca)
|
||||
}
|
161
Godeps/_workspace/src/github.com/ethereum/ethash/cmake/modules/CMakeParseArguments.cmake
generated
vendored
@ -1,161 +0,0 @@
|
||||
#.rst:
|
||||
# CMakeParseArguments
|
||||
# -------------------
|
||||
#
|
||||
#
|
||||
#
|
||||
# CMAKE_PARSE_ARGUMENTS(<prefix> <options> <one_value_keywords>
|
||||
# <multi_value_keywords> args...)
|
||||
#
|
||||
# CMAKE_PARSE_ARGUMENTS() is intended to be used in macros or functions
|
||||
# for parsing the arguments given to that macro or function. It
|
||||
# processes the arguments and defines a set of variables which hold the
|
||||
# values of the respective options.
|
||||
#
|
||||
# The <options> argument contains all options for the respective macro,
|
||||
# i.e. keywords which can be used when calling the macro without any
|
||||
# value following, like e.g. the OPTIONAL keyword of the install()
|
||||
# command.
|
||||
#
|
||||
# The <one_value_keywords> argument contains all keywords for this macro
|
||||
# which are followed by one value, like e.g. DESTINATION keyword of the
|
||||
# install() command.
|
||||
#
|
||||
# The <multi_value_keywords> argument contains all keywords for this
|
||||
# macro which can be followed by more than one value, like e.g. the
|
||||
# TARGETS or FILES keywords of the install() command.
|
||||
#
|
||||
# When done, CMAKE_PARSE_ARGUMENTS() will have defined for each of the
|
||||
# keywords listed in <options>, <one_value_keywords> and
|
||||
# <multi_value_keywords> a variable composed of the given <prefix>
|
||||
# followed by "_" and the name of the respective keyword. These
|
||||
# variables will then hold the respective value from the argument list.
|
||||
# For the <options> keywords this will be TRUE or FALSE.
|
||||
#
|
||||
# All remaining arguments are collected in a variable
|
||||
# <prefix>_UNPARSED_ARGUMENTS, this can be checked afterwards to see
|
||||
# whether your macro was called with unrecognized parameters.
|
||||
#
|
||||
# As an example here a my_install() macro, which takes similar arguments
|
||||
# as the real install() command:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# function(MY_INSTALL)
|
||||
# set(options OPTIONAL FAST)
|
||||
# set(oneValueArgs DESTINATION RENAME)
|
||||
# set(multiValueArgs TARGETS CONFIGURATIONS)
|
||||
# cmake_parse_arguments(MY_INSTALL "${options}" "${oneValueArgs}"
|
||||
# "${multiValueArgs}" ${ARGN} )
|
||||
# ...
|
||||
#
|
||||
#
|
||||
#
|
||||
# Assume my_install() has been called like this:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# my_install(TARGETS foo bar DESTINATION bin OPTIONAL blub)
|
||||
#
|
||||
#
|
||||
#
|
||||
# After the cmake_parse_arguments() call the macro will have set the
|
||||
# following variables:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# MY_INSTALL_OPTIONAL = TRUE
|
||||
# MY_INSTALL_FAST = FALSE (this option was not used when calling my_install()
|
||||
# MY_INSTALL_DESTINATION = "bin"
|
||||
# MY_INSTALL_RENAME = "" (was not used)
|
||||
# MY_INSTALL_TARGETS = "foo;bar"
|
||||
# MY_INSTALL_CONFIGURATIONS = "" (was not used)
|
||||
# MY_INSTALL_UNPARSED_ARGUMENTS = "blub" (no value expected after "OPTIONAL"
|
||||
#
|
||||
#
|
||||
#
|
||||
# You can then continue and process these variables.
|
||||
#
|
||||
# Keywords terminate lists of values, e.g. if directly after a
|
||||
# one_value_keyword another recognized keyword follows, this is
|
||||
# interpreted as the beginning of the new option. E.g.
|
||||
# my_install(TARGETS foo DESTINATION OPTIONAL) would result in
|
||||
# MY_INSTALL_DESTINATION set to "OPTIONAL", but MY_INSTALL_DESTINATION
|
||||
# would be empty and MY_INSTALL_OPTIONAL would be set to TRUE therefor.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2010 Alexander Neundorf <neundorf@kde.org>
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
|
||||
if(__CMAKE_PARSE_ARGUMENTS_INCLUDED)
|
||||
return()
|
||||
endif()
|
||||
set(__CMAKE_PARSE_ARGUMENTS_INCLUDED TRUE)
|
||||
|
||||
|
||||
function(CMAKE_PARSE_ARGUMENTS prefix _optionNames _singleArgNames _multiArgNames)
|
||||
# first set all result variables to empty/FALSE
|
||||
foreach(arg_name ${_singleArgNames} ${_multiArgNames})
|
||||
set(${prefix}_${arg_name})
|
||||
endforeach()
|
||||
|
||||
foreach(option ${_optionNames})
|
||||
set(${prefix}_${option} FALSE)
|
||||
endforeach()
|
||||
|
||||
set(${prefix}_UNPARSED_ARGUMENTS)
|
||||
|
||||
set(insideValues FALSE)
|
||||
set(currentArgName)
|
||||
|
||||
# now iterate over all arguments and fill the result variables
|
||||
foreach(currentArg ${ARGN})
|
||||
list(FIND _optionNames "${currentArg}" optionIndex) # ... then this marks the end of the arguments belonging to this keyword
|
||||
list(FIND _singleArgNames "${currentArg}" singleArgIndex) # ... then this marks the end of the arguments belonging to this keyword
|
||||
list(FIND _multiArgNames "${currentArg}" multiArgIndex) # ... then this marks the end of the arguments belonging to this keyword
|
||||
|
||||
if(${optionIndex} EQUAL -1 AND ${singleArgIndex} EQUAL -1 AND ${multiArgIndex} EQUAL -1)
|
||||
if(insideValues)
|
||||
if("${insideValues}" STREQUAL "SINGLE")
|
||||
set(${prefix}_${currentArgName} ${currentArg})
|
||||
set(insideValues FALSE)
|
||||
elseif("${insideValues}" STREQUAL "MULTI")
|
||||
list(APPEND ${prefix}_${currentArgName} ${currentArg})
|
||||
endif()
|
||||
else()
|
||||
list(APPEND ${prefix}_UNPARSED_ARGUMENTS ${currentArg})
|
||||
endif()
|
||||
else()
|
||||
if(NOT ${optionIndex} EQUAL -1)
|
||||
set(${prefix}_${currentArg} TRUE)
|
||||
set(insideValues FALSE)
|
||||
elseif(NOT ${singleArgIndex} EQUAL -1)
|
||||
set(currentArgName ${currentArg})
|
||||
set(${prefix}_${currentArgName})
|
||||
set(insideValues "SINGLE")
|
||||
elseif(NOT ${multiArgIndex} EQUAL -1)
|
||||
set(currentArgName ${currentArg})
|
||||
set(${prefix}_${currentArgName})
|
||||
set(insideValues "MULTI")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endforeach()
|
||||
|
||||
# propagate the result variables to the caller:
|
||||
foreach(arg_name ${_singleArgNames} ${_multiArgNames} ${_optionNames})
|
||||
set(${prefix}_${arg_name} ${${prefix}_${arg_name}} PARENT_SCOPE)
|
||||
endforeach()
|
||||
set(${prefix}_UNPARSED_ARGUMENTS ${${prefix}_UNPARSED_ARGUMENTS} PARENT_SCOPE)
|
||||
|
||||
endfunction()
|
108
Godeps/_workspace/src/github.com/ethereum/ethash/cmake/modules/FindCryptoPP.cmake
generated
vendored
@ -1,108 +0,0 @@
|
||||
# Module for locating the Crypto++ encryption library.
|
||||
#
|
||||
# Customizable variables:
|
||||
# CRYPTOPP_ROOT_DIR
|
||||
# This variable points to the CryptoPP root directory. On Windows the
|
||||
# library location typically will have to be provided explicitly using the
|
||||
# -D command-line option. The directory should include the include/cryptopp,
|
||||
# lib and/or bin sub-directories.
|
||||
#
|
||||
# Read-only variables:
|
||||
# CRYPTOPP_FOUND
|
||||
# Indicates whether the library has been found.
|
||||
#
|
||||
# CRYPTOPP_INCLUDE_DIRS
|
||||
# Points to the CryptoPP include directory.
|
||||
#
|
||||
# CRYPTOPP_LIBRARIES
|
||||
# Points to the CryptoPP libraries that should be passed to
|
||||
# target_link_libararies.
|
||||
#
|
||||
#
|
||||
# Copyright (c) 2012 Sergiu Dotenco
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
INCLUDE (FindPackageHandleStandardArgs)
|
||||
|
||||
FIND_PATH (CRYPTOPP_ROOT_DIR
|
||||
NAMES cryptopp/cryptlib.h include/cryptopp/cryptlib.h
|
||||
PATHS ENV CRYPTOPPROOT
|
||||
DOC "CryptoPP root directory")
|
||||
|
||||
# Re-use the previous path:
|
||||
FIND_PATH (CRYPTOPP_INCLUDE_DIR
|
||||
NAMES cryptopp/cryptlib.h
|
||||
HINTS ${CRYPTOPP_ROOT_DIR}
|
||||
PATH_SUFFIXES include
|
||||
DOC "CryptoPP include directory")
|
||||
|
||||
FIND_LIBRARY (CRYPTOPP_LIBRARY_DEBUG
|
||||
NAMES cryptlibd cryptoppd
|
||||
HINTS ${CRYPTOPP_ROOT_DIR}
|
||||
PATH_SUFFIXES lib
|
||||
DOC "CryptoPP debug library")
|
||||
|
||||
FIND_LIBRARY (CRYPTOPP_LIBRARY_RELEASE
|
||||
NAMES cryptlib cryptopp
|
||||
HINTS ${CRYPTOPP_ROOT_DIR}
|
||||
PATH_SUFFIXES lib
|
||||
DOC "CryptoPP release library")
|
||||
|
||||
IF (CRYPTOPP_LIBRARY_DEBUG AND CRYPTOPP_LIBRARY_RELEASE)
|
||||
SET (CRYPTOPP_LIBRARY
|
||||
optimized ${CRYPTOPP_LIBRARY_RELEASE}
|
||||
debug ${CRYPTOPP_LIBRARY_DEBUG} CACHE DOC "CryptoPP library")
|
||||
ELSEIF (CRYPTOPP_LIBRARY_RELEASE)
|
||||
SET (CRYPTOPP_LIBRARY ${CRYPTOPP_LIBRARY_RELEASE} CACHE DOC
|
||||
"CryptoPP library")
|
||||
ENDIF (CRYPTOPP_LIBRARY_DEBUG AND CRYPTOPP_LIBRARY_RELEASE)
|
||||
|
||||
IF (CRYPTOPP_INCLUDE_DIR)
|
||||
SET (_CRYPTOPP_VERSION_HEADER ${CRYPTOPP_INCLUDE_DIR}/cryptopp/config.h)
|
||||
|
||||
IF (EXISTS ${_CRYPTOPP_VERSION_HEADER})
|
||||
FILE (STRINGS ${_CRYPTOPP_VERSION_HEADER} _CRYPTOPP_VERSION_TMP REGEX
|
||||
"^#define CRYPTOPP_VERSION[ \t]+[0-9]+$")
|
||||
|
||||
STRING (REGEX REPLACE
|
||||
"^#define CRYPTOPP_VERSION[ \t]+([0-9]+)" "\\1" _CRYPTOPP_VERSION_TMP
|
||||
${_CRYPTOPP_VERSION_TMP})
|
||||
|
||||
STRING (REGEX REPLACE "([0-9]+)[0-9][0-9]" "\\1" CRYPTOPP_VERSION_MAJOR
|
||||
${_CRYPTOPP_VERSION_TMP})
|
||||
STRING (REGEX REPLACE "[0-9]([0-9])[0-9]" "\\1" CRYPTOPP_VERSION_MINOR
|
||||
${_CRYPTOPP_VERSION_TMP})
|
||||
STRING (REGEX REPLACE "[0-9][0-9]([0-9])" "\\1" CRYPTOPP_VERSION_PATCH
|
||||
${_CRYPTOPP_VERSION_TMP})
|
||||
|
||||
SET (CRYPTOPP_VERSION_COUNT 3)
|
||||
SET (CRYPTOPP_VERSION
|
||||
${CRYPTOPP_VERSION_MAJOR}.${CRYPTOPP_VERSION_MINOR}.${CRYPTOPP_VERSION_PATCH})
|
||||
ENDIF (EXISTS ${_CRYPTOPP_VERSION_HEADER})
|
||||
ENDIF (CRYPTOPP_INCLUDE_DIR)
|
||||
|
||||
SET (CRYPTOPP_INCLUDE_DIRS ${CRYPTOPP_INCLUDE_DIR})
|
||||
SET (CRYPTOPP_LIBRARIES ${CRYPTOPP_LIBRARY})
|
||||
|
||||
MARK_AS_ADVANCED (CRYPTOPP_INCLUDE_DIR CRYPTOPP_LIBRARY CRYPTOPP_LIBRARY_DEBUG
|
||||
CRYPTOPP_LIBRARY_RELEASE)
|
||||
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS (CryptoPP REQUIRED_VARS CRYPTOPP_ROOT_DIR
|
||||
CRYPTOPP_INCLUDE_DIR CRYPTOPP_LIBRARY VERSION_VAR CRYPTOPP_VERSION)
|
148
Godeps/_workspace/src/github.com/ethereum/ethash/cmake/modules/FindOpenCL.cmake
generated
vendored
@ -1,148 +0,0 @@
|
||||
#.rst:
|
||||
# FindOpenCL
|
||||
# ----------
|
||||
#
|
||||
# Try to find OpenCL
|
||||
#
|
||||
# Once done this will define::
|
||||
#
|
||||
# OpenCL_FOUND - True if OpenCL was found
|
||||
# OpenCL_INCLUDE_DIRS - include directories for OpenCL
|
||||
# OpenCL_LIBRARIES - link against this library to use OpenCL
|
||||
# OpenCL_VERSION_STRING - Highest supported OpenCL version (eg. 1.2)
|
||||
# OpenCL_VERSION_MAJOR - The major version of the OpenCL implementation
|
||||
# OpenCL_VERSION_MINOR - The minor version of the OpenCL implementation
|
||||
#
|
||||
# The module will also define two cache variables::
|
||||
#
|
||||
# OpenCL_INCLUDE_DIR - the OpenCL include directory
|
||||
# OpenCL_LIBRARY - the path to the OpenCL library
|
||||
#
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2014 Matthaeus G. Chajdas
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
function(_FIND_OPENCL_VERSION)
|
||||
include(CheckSymbolExists)
|
||||
include(CMakePushCheckState)
|
||||
set(CMAKE_REQUIRED_QUIET ${OpenCL_FIND_QUIETLY})
|
||||
|
||||
CMAKE_PUSH_CHECK_STATE()
|
||||
foreach(VERSION "2_0" "1_2" "1_1" "1_0")
|
||||
set(CMAKE_REQUIRED_INCLUDES "${OpenCL_INCLUDE_DIR}")
|
||||
|
||||
if(APPLE)
|
||||
CHECK_SYMBOL_EXISTS(
|
||||
CL_VERSION_${VERSION}
|
||||
"${OpenCL_INCLUDE_DIR}/OpenCL/cl.h"
|
||||
OPENCL_VERSION_${VERSION})
|
||||
else()
|
||||
CHECK_SYMBOL_EXISTS(
|
||||
CL_VERSION_${VERSION}
|
||||
"${OpenCL_INCLUDE_DIR}/CL/cl.h"
|
||||
OPENCL_VERSION_${VERSION})
|
||||
endif()
|
||||
|
||||
if(OPENCL_VERSION_${VERSION})
|
||||
string(REPLACE "_" "." VERSION "${VERSION}")
|
||||
set(OpenCL_VERSION_STRING ${VERSION} PARENT_SCOPE)
|
||||
string(REGEX MATCHALL "[0-9]+" version_components "${VERSION}")
|
||||
list(GET version_components 0 major_version)
|
||||
list(GET version_components 1 minor_version)
|
||||
set(OpenCL_VERSION_MAJOR ${major_version} PARENT_SCOPE)
|
||||
set(OpenCL_VERSION_MINOR ${minor_version} PARENT_SCOPE)
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
CMAKE_POP_CHECK_STATE()
|
||||
endfunction()
|
||||
|
||||
find_path(OpenCL_INCLUDE_DIR
|
||||
NAMES
|
||||
CL/cl.h OpenCL/cl.h
|
||||
PATHS
|
||||
ENV "PROGRAMFILES(X86)"
|
||||
ENV AMDAPPSDKROOT
|
||||
ENV INTELOCLSDKROOT
|
||||
ENV NVSDKCOMPUTE_ROOT
|
||||
ENV CUDA_PATH
|
||||
ENV ATISTREAMSDKROOT
|
||||
PATH_SUFFIXES
|
||||
include
|
||||
OpenCL/common/inc
|
||||
"AMD APP/include")
|
||||
|
||||
_FIND_OPENCL_VERSION()
|
||||
|
||||
if(WIN32)
|
||||
if(CMAKE_SIZEOF_VOID_P EQUAL 4)
|
||||
find_library(OpenCL_LIBRARY
|
||||
NAMES OpenCL
|
||||
PATHS
|
||||
ENV "PROGRAMFILES(X86)"
|
||||
ENV AMDAPPSDKROOT
|
||||
ENV INTELOCLSDKROOT
|
||||
ENV CUDA_PATH
|
||||
ENV NVSDKCOMPUTE_ROOT
|
||||
ENV ATISTREAMSDKROOT
|
||||
PATH_SUFFIXES
|
||||
"AMD APP/lib/x86"
|
||||
lib/x86
|
||||
lib/Win32
|
||||
OpenCL/common/lib/Win32)
|
||||
elseif(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
find_library(OpenCL_LIBRARY
|
||||
NAMES OpenCL
|
||||
PATHS
|
||||
ENV "PROGRAMFILES(X86)"
|
||||
ENV AMDAPPSDKROOT
|
||||
ENV INTELOCLSDKROOT
|
||||
ENV CUDA_PATH
|
||||
ENV NVSDKCOMPUTE_ROOT
|
||||
ENV ATISTREAMSDKROOT
|
||||
PATH_SUFFIXES
|
||||
"AMD APP/lib/x86_64"
|
||||
lib/x86_64
|
||||
lib/x64
|
||||
OpenCL/common/lib/x64)
|
||||
endif()
|
||||
else()
|
||||
find_library(OpenCL_LIBRARY
|
||||
NAMES OpenCL
|
||||
PATHS
|
||||
ENV "PROGRAMFILES(X86)"
|
||||
ENV AMDAPPSDKROOT
|
||||
ENV INTELOCLSDKROOT
|
||||
ENV CUDA_PATH
|
||||
ENV NVSDKCOMPUTE_ROOT
|
||||
ENV ATISTREAMSDKROOT
|
||||
PATH_SUFFIXES
|
||||
"AMD APP/lib/x86_64"
|
||||
lib/x86_64
|
||||
lib/x64
|
||||
OpenCL/common/lib/x64)
|
||||
endif()
|
||||
|
||||
set(OpenCL_LIBRARIES ${OpenCL_LIBRARY})
|
||||
set(OpenCL_INCLUDE_DIRS ${OpenCL_INCLUDE_DIR})
|
||||
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/FindPackageHandleStandardArgs.cmake)
|
||||
find_package_handle_standard_args(
|
||||
OpenCL
|
||||
FOUND_VAR OpenCL_FOUND
|
||||
REQUIRED_VARS OpenCL_LIBRARY OpenCL_INCLUDE_DIR
|
||||
VERSION_VAR OpenCL_VERSION_STRING)
|
||||
|
||||
mark_as_advanced(
|
||||
OpenCL_INCLUDE_DIR
|
||||
OpenCL_LIBRARY)
|
@ -1,382 +0,0 @@
|
||||
#.rst:
|
||||
# FindPackageHandleStandardArgs
|
||||
# -----------------------------
|
||||
#
|
||||
#
|
||||
#
|
||||
# FIND_PACKAGE_HANDLE_STANDARD_ARGS(<name> ... )
|
||||
#
|
||||
# This function is intended to be used in FindXXX.cmake modules files.
|
||||
# It handles the REQUIRED, QUIET and version-related arguments to
|
||||
# find_package(). It also sets the <packagename>_FOUND variable. The
|
||||
# package is considered found if all variables <var1>... listed contain
|
||||
# valid results, e.g. valid filepaths.
|
||||
#
|
||||
# There are two modes of this function. The first argument in both
|
||||
# modes is the name of the Find-module where it is called (in original
|
||||
# casing).
|
||||
#
|
||||
# The first simple mode looks like this:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# FIND_PACKAGE_HANDLE_STANDARD_ARGS(<name>
|
||||
# (DEFAULT_MSG|"Custom failure message") <var1>...<varN> )
|
||||
#
|
||||
# If the variables <var1> to <varN> are all valid, then
|
||||
# <UPPERCASED_NAME>_FOUND will be set to TRUE. If DEFAULT_MSG is given
|
||||
# as second argument, then the function will generate itself useful
|
||||
# success and error messages. You can also supply a custom error
|
||||
# message for the failure case. This is not recommended.
|
||||
#
|
||||
# The second mode is more powerful and also supports version checking:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# FIND_PACKAGE_HANDLE_STANDARD_ARGS(NAME
|
||||
# [FOUND_VAR <resultVar>]
|
||||
# [REQUIRED_VARS <var1>...<varN>]
|
||||
# [VERSION_VAR <versionvar>]
|
||||
# [HANDLE_COMPONENTS]
|
||||
# [CONFIG_MODE]
|
||||
# [FAIL_MESSAGE "Custom failure message"] )
|
||||
#
|
||||
# In this mode, the name of the result-variable can be set either to
|
||||
# either <UPPERCASED_NAME>_FOUND or <OriginalCase_Name>_FOUND using the
|
||||
# FOUND_VAR option. Other names for the result-variable are not
|
||||
# allowed. So for a Find-module named FindFooBar.cmake, the two
|
||||
# possible names are FooBar_FOUND and FOOBAR_FOUND. It is recommended
|
||||
# to use the original case version. If the FOUND_VAR option is not
|
||||
# used, the default is <UPPERCASED_NAME>_FOUND.
|
||||
#
|
||||
# As in the simple mode, if <var1> through <varN> are all valid,
|
||||
# <packagename>_FOUND will be set to TRUE. After REQUIRED_VARS the
|
||||
# variables which are required for this package are listed. Following
|
||||
# VERSION_VAR the name of the variable can be specified which holds the
|
||||
# version of the package which has been found. If this is done, this
|
||||
# version will be checked against the (potentially) specified required
|
||||
# version used in the find_package() call. The EXACT keyword is also
|
||||
# handled. The default messages include information about the required
|
||||
# version and the version which has been actually found, both if the
|
||||
# version is ok or not. If the package supports components, use the
|
||||
# HANDLE_COMPONENTS option to enable handling them. In this case,
|
||||
# find_package_handle_standard_args() will report which components have
|
||||
# been found and which are missing, and the <packagename>_FOUND variable
|
||||
# will be set to FALSE if any of the required components (i.e. not the
|
||||
# ones listed after OPTIONAL_COMPONENTS) are missing. Use the option
|
||||
# CONFIG_MODE if your FindXXX.cmake module is a wrapper for a
|
||||
# find_package(... NO_MODULE) call. In this case VERSION_VAR will be
|
||||
# set to <NAME>_VERSION and the macro will automatically check whether
|
||||
# the Config module was found. Via FAIL_MESSAGE a custom failure
|
||||
# message can be specified, if this is not used, the default message
|
||||
# will be displayed.
|
||||
#
|
||||
# Example for mode 1:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# find_package_handle_standard_args(LibXml2 DEFAULT_MSG
|
||||
# LIBXML2_LIBRARY LIBXML2_INCLUDE_DIR)
|
||||
#
|
||||
#
|
||||
#
|
||||
# LibXml2 is considered to be found, if both LIBXML2_LIBRARY and
|
||||
# LIBXML2_INCLUDE_DIR are valid. Then also LIBXML2_FOUND is set to
|
||||
# TRUE. If it is not found and REQUIRED was used, it fails with
|
||||
# FATAL_ERROR, independent whether QUIET was used or not. If it is
|
||||
# found, success will be reported, including the content of <var1>. On
|
||||
# repeated Cmake runs, the same message won't be printed again.
|
||||
#
|
||||
# Example for mode 2:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# find_package_handle_standard_args(LibXslt
|
||||
# FOUND_VAR LibXslt_FOUND
|
||||
# REQUIRED_VARS LibXslt_LIBRARIES LibXslt_INCLUDE_DIRS
|
||||
# VERSION_VAR LibXslt_VERSION_STRING)
|
||||
#
|
||||
# In this case, LibXslt is considered to be found if the variable(s)
|
||||
# listed after REQUIRED_VAR are all valid, i.e. LibXslt_LIBRARIES and
|
||||
# LibXslt_INCLUDE_DIRS in this case. The result will then be stored in
|
||||
# LibXslt_FOUND . Also the version of LibXslt will be checked by using
|
||||
# the version contained in LibXslt_VERSION_STRING. Since no
|
||||
# FAIL_MESSAGE is given, the default messages will be printed.
|
||||
#
|
||||
# Another example for mode 2:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# find_package(Automoc4 QUIET NO_MODULE HINTS /opt/automoc4)
|
||||
# find_package_handle_standard_args(Automoc4 CONFIG_MODE)
|
||||
#
|
||||
# In this case, FindAutmoc4.cmake wraps a call to find_package(Automoc4
|
||||
# NO_MODULE) and adds an additional search directory for automoc4. Here
|
||||
# the result will be stored in AUTOMOC4_FOUND. The following
|
||||
# FIND_PACKAGE_HANDLE_STANDARD_ARGS() call produces a proper
|
||||
# success/error message.
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2007-2009 Kitware, Inc.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/FindPackageMessage.cmake)
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/CMakeParseArguments.cmake)
|
||||
|
||||
# internal helper macro
|
||||
macro(_FPHSA_FAILURE_MESSAGE _msg)
|
||||
if (${_NAME}_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "${_msg}")
|
||||
else ()
|
||||
if (NOT ${_NAME}_FIND_QUIETLY)
|
||||
message(STATUS "${_msg}")
|
||||
endif ()
|
||||
endif ()
|
||||
endmacro()
|
||||
|
||||
|
||||
# internal helper macro to generate the failure message when used in CONFIG_MODE:
|
||||
macro(_FPHSA_HANDLE_FAILURE_CONFIG_MODE)
|
||||
# <name>_CONFIG is set, but FOUND is false, this means that some other of the REQUIRED_VARS was not found:
|
||||
if(${_NAME}_CONFIG)
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: missing: ${MISSING_VARS} (found ${${_NAME}_CONFIG} ${VERSION_MSG})")
|
||||
else()
|
||||
# If _CONSIDERED_CONFIGS is set, the config-file has been found, but no suitable version.
|
||||
# List them all in the error message:
|
||||
if(${_NAME}_CONSIDERED_CONFIGS)
|
||||
set(configsText "")
|
||||
list(LENGTH ${_NAME}_CONSIDERED_CONFIGS configsCount)
|
||||
math(EXPR configsCount "${configsCount} - 1")
|
||||
foreach(currentConfigIndex RANGE ${configsCount})
|
||||
list(GET ${_NAME}_CONSIDERED_CONFIGS ${currentConfigIndex} filename)
|
||||
list(GET ${_NAME}_CONSIDERED_VERSIONS ${currentConfigIndex} version)
|
||||
set(configsText "${configsText} ${filename} (version ${version})\n")
|
||||
endforeach()
|
||||
if (${_NAME}_NOT_FOUND_MESSAGE)
|
||||
set(configsText "${configsText} Reason given by package: ${${_NAME}_NOT_FOUND_MESSAGE}\n")
|
||||
endif()
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE} ${VERSION_MSG}, checked the following files:\n${configsText}")
|
||||
|
||||
else()
|
||||
# Simple case: No Config-file was found at all:
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: found neither ${_NAME}Config.cmake nor ${_NAME_LOWER}-config.cmake ${VERSION_MSG}")
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
|
||||
function(FIND_PACKAGE_HANDLE_STANDARD_ARGS _NAME _FIRST_ARG)
|
||||
|
||||
# set up the arguments for CMAKE_PARSE_ARGUMENTS and check whether we are in
|
||||
# new extended or in the "old" mode:
|
||||
set(options CONFIG_MODE HANDLE_COMPONENTS)
|
||||
set(oneValueArgs FAIL_MESSAGE VERSION_VAR FOUND_VAR)
|
||||
set(multiValueArgs REQUIRED_VARS)
|
||||
set(_KEYWORDS_FOR_EXTENDED_MODE ${options} ${oneValueArgs} ${multiValueArgs} )
|
||||
list(FIND _KEYWORDS_FOR_EXTENDED_MODE "${_FIRST_ARG}" INDEX)
|
||||
|
||||
if(${INDEX} EQUAL -1)
|
||||
set(FPHSA_FAIL_MESSAGE ${_FIRST_ARG})
|
||||
set(FPHSA_REQUIRED_VARS ${ARGN})
|
||||
set(FPHSA_VERSION_VAR)
|
||||
else()
|
||||
|
||||
CMAKE_PARSE_ARGUMENTS(FPHSA "${options}" "${oneValueArgs}" "${multiValueArgs}" ${_FIRST_ARG} ${ARGN})
|
||||
|
||||
if(FPHSA_UNPARSED_ARGUMENTS)
|
||||
message(FATAL_ERROR "Unknown keywords given to FIND_PACKAGE_HANDLE_STANDARD_ARGS(): \"${FPHSA_UNPARSED_ARGUMENTS}\"")
|
||||
endif()
|
||||
|
||||
if(NOT FPHSA_FAIL_MESSAGE)
|
||||
set(FPHSA_FAIL_MESSAGE "DEFAULT_MSG")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# now that we collected all arguments, process them
|
||||
|
||||
if("x${FPHSA_FAIL_MESSAGE}" STREQUAL "xDEFAULT_MSG")
|
||||
set(FPHSA_FAIL_MESSAGE "Could NOT find ${_NAME}")
|
||||
endif()
|
||||
|
||||
# In config-mode, we rely on the variable <package>_CONFIG, which is set by find_package()
|
||||
# when it successfully found the config-file, including version checking:
|
||||
if(FPHSA_CONFIG_MODE)
|
||||
list(INSERT FPHSA_REQUIRED_VARS 0 ${_NAME}_CONFIG)
|
||||
list(REMOVE_DUPLICATES FPHSA_REQUIRED_VARS)
|
||||
set(FPHSA_VERSION_VAR ${_NAME}_VERSION)
|
||||
endif()
|
||||
|
||||
if(NOT FPHSA_REQUIRED_VARS)
|
||||
message(FATAL_ERROR "No REQUIRED_VARS specified for FIND_PACKAGE_HANDLE_STANDARD_ARGS()")
|
||||
endif()
|
||||
|
||||
list(GET FPHSA_REQUIRED_VARS 0 _FIRST_REQUIRED_VAR)
|
||||
|
||||
string(TOUPPER ${_NAME} _NAME_UPPER)
|
||||
string(TOLOWER ${_NAME} _NAME_LOWER)
|
||||
|
||||
if(FPHSA_FOUND_VAR)
|
||||
if(FPHSA_FOUND_VAR MATCHES "^${_NAME}_FOUND$" OR FPHSA_FOUND_VAR MATCHES "^${_NAME_UPPER}_FOUND$")
|
||||
set(_FOUND_VAR ${FPHSA_FOUND_VAR})
|
||||
else()
|
||||
message(FATAL_ERROR "The argument for FOUND_VAR is \"${FPHSA_FOUND_VAR}\", but only \"${_NAME}_FOUND\" and \"${_NAME_UPPER}_FOUND\" are valid names.")
|
||||
endif()
|
||||
else()
|
||||
set(_FOUND_VAR ${_NAME_UPPER}_FOUND)
|
||||
endif()
|
||||
|
||||
# collect all variables which were not found, so they can be printed, so the
|
||||
# user knows better what went wrong (#6375)
|
||||
set(MISSING_VARS "")
|
||||
set(DETAILS "")
|
||||
# check if all passed variables are valid
|
||||
unset(${_FOUND_VAR})
|
||||
foreach(_CURRENT_VAR ${FPHSA_REQUIRED_VARS})
|
||||
if(NOT ${_CURRENT_VAR})
|
||||
set(${_FOUND_VAR} FALSE)
|
||||
set(MISSING_VARS "${MISSING_VARS} ${_CURRENT_VAR}")
|
||||
else()
|
||||
set(DETAILS "${DETAILS}[${${_CURRENT_VAR}}]")
|
||||
endif()
|
||||
endforeach()
|
||||
if(NOT "${${_FOUND_VAR}}" STREQUAL "FALSE")
|
||||
set(${_FOUND_VAR} TRUE)
|
||||
endif()
|
||||
|
||||
# component handling
|
||||
unset(FOUND_COMPONENTS_MSG)
|
||||
unset(MISSING_COMPONENTS_MSG)
|
||||
|
||||
if(FPHSA_HANDLE_COMPONENTS)
|
||||
foreach(comp ${${_NAME}_FIND_COMPONENTS})
|
||||
if(${_NAME}_${comp}_FOUND)
|
||||
|
||||
if(NOT DEFINED FOUND_COMPONENTS_MSG)
|
||||
set(FOUND_COMPONENTS_MSG "found components: ")
|
||||
endif()
|
||||
set(FOUND_COMPONENTS_MSG "${FOUND_COMPONENTS_MSG} ${comp}")
|
||||
|
||||
else()
|
||||
|
||||
if(NOT DEFINED MISSING_COMPONENTS_MSG)
|
||||
set(MISSING_COMPONENTS_MSG "missing components: ")
|
||||
endif()
|
||||
set(MISSING_COMPONENTS_MSG "${MISSING_COMPONENTS_MSG} ${comp}")
|
||||
|
||||
if(${_NAME}_FIND_REQUIRED_${comp})
|
||||
set(${_FOUND_VAR} FALSE)
|
||||
set(MISSING_VARS "${MISSING_VARS} ${comp}")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
endforeach()
|
||||
set(COMPONENT_MSG "${FOUND_COMPONENTS_MSG} ${MISSING_COMPONENTS_MSG}")
|
||||
set(DETAILS "${DETAILS}[c${COMPONENT_MSG}]")
|
||||
endif()
|
||||
|
||||
# version handling:
|
||||
set(VERSION_MSG "")
|
||||
set(VERSION_OK TRUE)
|
||||
set(VERSION ${${FPHSA_VERSION_VAR}})
|
||||
|
||||
# check with DEFINED here as the requested or found version may be "0"
|
||||
if (DEFINED ${_NAME}_FIND_VERSION)
|
||||
if(DEFINED ${FPHSA_VERSION_VAR})
|
||||
|
||||
if(${_NAME}_FIND_VERSION_EXACT) # exact version required
|
||||
# count the dots in the version string
|
||||
string(REGEX REPLACE "[^.]" "" _VERSION_DOTS "${VERSION}")
|
||||
# add one dot because there is one dot more than there are components
|
||||
string(LENGTH "${_VERSION_DOTS}." _VERSION_DOTS)
|
||||
if (_VERSION_DOTS GREATER ${_NAME}_FIND_VERSION_COUNT)
|
||||
# Because of the C++ implementation of find_package() ${_NAME}_FIND_VERSION_COUNT
|
||||
# is at most 4 here. Therefore a simple lookup table is used.
|
||||
if (${_NAME}_FIND_VERSION_COUNT EQUAL 1)
|
||||
set(_VERSION_REGEX "[^.]*")
|
||||
elseif (${_NAME}_FIND_VERSION_COUNT EQUAL 2)
|
||||
set(_VERSION_REGEX "[^.]*\\.[^.]*")
|
||||
elseif (${_NAME}_FIND_VERSION_COUNT EQUAL 3)
|
||||
set(_VERSION_REGEX "[^.]*\\.[^.]*\\.[^.]*")
|
||||
else ()
|
||||
set(_VERSION_REGEX "[^.]*\\.[^.]*\\.[^.]*\\.[^.]*")
|
||||
endif ()
|
||||
string(REGEX REPLACE "^(${_VERSION_REGEX})\\..*" "\\1" _VERSION_HEAD "${VERSION}")
|
||||
unset(_VERSION_REGEX)
|
||||
if (NOT ${_NAME}_FIND_VERSION VERSION_EQUAL _VERSION_HEAD)
|
||||
set(VERSION_MSG "Found unsuitable version \"${VERSION}\", but required is exact version \"${${_NAME}_FIND_VERSION}\"")
|
||||
set(VERSION_OK FALSE)
|
||||
else ()
|
||||
set(VERSION_MSG "(found suitable exact version \"${VERSION}\")")
|
||||
endif ()
|
||||
unset(_VERSION_HEAD)
|
||||
else ()
|
||||
if (NOT "${${_NAME}_FIND_VERSION}" VERSION_EQUAL "${VERSION}")
|
||||
set(VERSION_MSG "Found unsuitable version \"${VERSION}\", but required is exact version \"${${_NAME}_FIND_VERSION}\"")
|
||||
set(VERSION_OK FALSE)
|
||||
else ()
|
||||
set(VERSION_MSG "(found suitable exact version \"${VERSION}\")")
|
||||
endif ()
|
||||
endif ()
|
||||
unset(_VERSION_DOTS)
|
||||
|
||||
else() # minimum version specified:
|
||||
if ("${${_NAME}_FIND_VERSION}" VERSION_GREATER "${VERSION}")
|
||||
set(VERSION_MSG "Found unsuitable version \"${VERSION}\", but required is at least \"${${_NAME}_FIND_VERSION}\"")
|
||||
set(VERSION_OK FALSE)
|
||||
else ()
|
||||
set(VERSION_MSG "(found suitable version \"${VERSION}\", minimum required is \"${${_NAME}_FIND_VERSION}\")")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
else()
|
||||
|
||||
# if the package was not found, but a version was given, add that to the output:
|
||||
if(${_NAME}_FIND_VERSION_EXACT)
|
||||
set(VERSION_MSG "(Required is exact version \"${${_NAME}_FIND_VERSION}\")")
|
||||
else()
|
||||
set(VERSION_MSG "(Required is at least version \"${${_NAME}_FIND_VERSION}\")")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
else ()
|
||||
if(VERSION)
|
||||
set(VERSION_MSG "(found version \"${VERSION}\")")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
if(VERSION_OK)
|
||||
set(DETAILS "${DETAILS}[v${VERSION}(${${_NAME}_FIND_VERSION})]")
|
||||
else()
|
||||
set(${_FOUND_VAR} FALSE)
|
||||
endif()
|
||||
|
||||
|
||||
# print the result:
|
||||
if (${_FOUND_VAR})
|
||||
FIND_PACKAGE_MESSAGE(${_NAME} "Found ${_NAME}: ${${_FIRST_REQUIRED_VAR}} ${VERSION_MSG} ${COMPONENT_MSG}" "${DETAILS}")
|
||||
else ()
|
||||
|
||||
if(FPHSA_CONFIG_MODE)
|
||||
_FPHSA_HANDLE_FAILURE_CONFIG_MODE()
|
||||
else()
|
||||
if(NOT VERSION_OK)
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: ${VERSION_MSG} (found ${${_FIRST_REQUIRED_VAR}})")
|
||||
else()
|
||||
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE} (missing: ${MISSING_VARS}) ${VERSION_MSG}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endif ()
|
||||
|
||||
set(${_FOUND_VAR} ${${_FOUND_VAR}} PARENT_SCOPE)
|
||||
|
||||
endfunction()
|
57
Godeps/_workspace/src/github.com/ethereum/ethash/cmake/modules/FindPackageMessage.cmake
generated
vendored
@ -1,57 +0,0 @@
|
||||
#.rst:
|
||||
# FindPackageMessage
|
||||
# ------------------
|
||||
#
|
||||
#
|
||||
#
|
||||
# FIND_PACKAGE_MESSAGE(<name> "message for user" "find result details")
|
||||
#
|
||||
# This macro is intended to be used in FindXXX.cmake modules files. It
|
||||
# will print a message once for each unique find result. This is useful
|
||||
# for telling the user where a package was found. The first argument
|
||||
# specifies the name (XXX) of the package. The second argument
|
||||
# specifies the message to display. The third argument lists details
|
||||
# about the find result so that if they change the message will be
|
||||
# displayed again. The macro also obeys the QUIET argument to the
|
||||
# find_package command.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# ::
|
||||
#
|
||||
# if(X11_FOUND)
|
||||
# FIND_PACKAGE_MESSAGE(X11 "Found X11: ${X11_X11_LIB}"
|
||||
# "[${X11_X11_LIB}][${X11_INCLUDE_DIR}]")
|
||||
# else()
|
||||
# ...
|
||||
# endif()
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2008-2009 Kitware, Inc.
|
||||
#
|
||||
# Distributed under the OSI-approved BSD License (the "License");
|
||||
# see accompanying file Copyright.txt for details.
|
||||
#
|
||||
# This software is distributed WITHOUT ANY WARRANTY; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the License for more information.
|
||||
#=============================================================================
|
||||
# (To distribute this file outside of CMake, substitute the full
|
||||
# License text for the above reference.)
|
||||
|
||||
function(FIND_PACKAGE_MESSAGE pkg msg details)
|
||||
# Avoid printing a message repeatedly for the same find result.
|
||||
if(NOT ${pkg}_FIND_QUIETLY)
|
||||
string(REPLACE "\n" "" details "${details}")
|
||||
set(DETAILS_VAR FIND_PACKAGE_MESSAGE_DETAILS_${pkg})
|
||||
if(NOT "${details}" STREQUAL "${${DETAILS_VAR}}")
|
||||
# The message has not yet been printed.
|
||||
message(STATUS "${msg}")
|
||||
|
||||
# Save the find details in the cache to avoid printing the same
|
||||
# message again.
|
||||
set("${DETAILS_VAR}" "${details}"
|
||||
CACHE INTERNAL "Details about finding ${pkg}")
|
||||
endif()
|
||||
endif()
|
||||
endfunction()
|
13
Godeps/_workspace/src/github.com/ethereum/ethash/cryptopp/CMakeLists.txt
generated
vendored
@ -1,13 +0,0 @@
|
||||
set(LIBRARY cryptopp)
|
||||
|
||||
include_directories(../../cryptopp)
|
||||
|
||||
# todo, subset
|
||||
file(GLOB HEADERS "../../cryptopp/*.h")
|
||||
file(GLOB SOURCE "../../cryptopp/*.cpp")
|
||||
|
||||
add_library(${LIBRARY} ${HEADERS} ${SOURCE})
|
||||
|
||||
set(CRYPTOPP_INCLUDE_DIRS "../.." "../../../" PARENT_SCOPE)
|
||||
set(CRYPTOPP_LIBRARIES ${LIBRARY} PARENT_SCOPE)
|
||||
set(CRYPTOPP_FOUND TRUE PARENT_SCOPE)
|
95
Godeps/_workspace/src/github.com/ethereum/ethash/ethash.go
generated
vendored
@ -75,6 +75,7 @@ func defaultDir() string {
|
||||
// and automatic memory management.
|
||||
type cache struct {
|
||||
epoch uint64
|
||||
used time.Time
|
||||
test bool
|
||||
|
||||
gen sync.Once // ensures cache is only generated once.
|
||||
@ -104,14 +105,25 @@ func freeCache(cache *cache) {
|
||||
cache.ptr = nil
|
||||
}
|
||||
|
||||
// Light implements the Verify half of the proof of work.
|
||||
// It uses a small in-memory cache to verify the nonces
|
||||
// found by Full.
|
||||
func (cache *cache) compute(dagSize uint64, hash common.Hash, nonce uint64) (ok bool, mixDigest, result common.Hash) {
|
||||
ret := C.ethash_light_compute_internal(cache.ptr, C.uint64_t(dagSize), hashToH256(hash), C.uint64_t(nonce))
|
||||
// Make sure cache is live until after the C call.
|
||||
// This is important because a GC might happen and execute
|
||||
// the finalizer before the call completes.
|
||||
_ = cache
|
||||
return bool(ret.success), h256ToHash(ret.mix_hash), h256ToHash(ret.result)
|
||||
}
|
||||
|
||||
// Light implements the Verify half of the proof of work. It uses a few small
|
||||
// in-memory caches to verify the nonces found by Full.
|
||||
type Light struct {
|
||||
test bool // if set use a smaller cache size
|
||||
mu sync.Mutex // protects current
|
||||
current *cache // last cache which was generated.
|
||||
// TODO: keep multiple caches.
|
||||
test bool // If set, use a smaller cache size
|
||||
|
||||
mu sync.Mutex // Protects the per-epoch map of verification caches
|
||||
caches map[uint64]*cache // Currently maintained verification caches
|
||||
future *cache // Pre-generated cache for the estimated future DAG
|
||||
|
||||
NumCaches int // Maximum number of caches to keep before eviction (only init, don't modify)
|
||||
}
|
||||
|
||||
// Verify checks whether the block's nonce is valid.
|
||||
@ -137,29 +149,23 @@ func (l *Light) Verify(block pow.Block) bool {
|
||||
|
||||
cache := l.getCache(blockNum)
|
||||
dagSize := C.ethash_get_datasize(C.uint64_t(blockNum))
|
||||
|
||||
if l.test {
|
||||
dagSize = dagSizeForTesting
|
||||
}
|
||||
// Recompute the hash using the cache.
|
||||
hash := hashToH256(block.HashNoNonce())
|
||||
ret := C.ethash_light_compute_internal(cache.ptr, dagSize, hash, C.uint64_t(block.Nonce()))
|
||||
if !ret.success {
|
||||
ok, mixDigest, result := cache.compute(uint64(dagSize), block.HashNoNonce(), block.Nonce())
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
// avoid mixdigest malleability as it's not included in a block's "hashNononce"
|
||||
if block.MixDigest() != h256ToHash(ret.mix_hash) {
|
||||
if block.MixDigest() != mixDigest {
|
||||
return false
|
||||
}
|
||||
|
||||
// Make sure cache is live until after the C call.
|
||||
// This is important because a GC might happen and execute
|
||||
// the finalizer before the call completes.
|
||||
_ = cache
|
||||
// The actual check.
|
||||
target := new(big.Int).Div(maxUint256, difficulty)
|
||||
return h256ToHash(ret.result).Big().Cmp(target) <= 0
|
||||
return result.Big().Cmp(target) <= 0
|
||||
}
|
||||
|
||||
func h256ToHash(in C.ethash_h256_t) common.Hash {
|
||||
@ -173,16 +179,49 @@ func hashToH256(in common.Hash) C.ethash_h256_t {
|
||||
func (l *Light) getCache(blockNum uint64) *cache {
|
||||
var c *cache
|
||||
epoch := blockNum / epochLength
|
||||
// Update or reuse the last cache.
|
||||
|
||||
// If we have a PoW for that epoch, use that
|
||||
l.mu.Lock()
|
||||
if l.current != nil && l.current.epoch == epoch {
|
||||
c = l.current
|
||||
} else {
|
||||
c = &cache{epoch: epoch, test: l.test}
|
||||
l.current = c
|
||||
if l.caches == nil {
|
||||
l.caches = make(map[uint64]*cache)
|
||||
}
|
||||
if l.NumCaches == 0 {
|
||||
l.NumCaches = 3
|
||||
}
|
||||
c = l.caches[epoch]
|
||||
if c == nil {
|
||||
// No cached DAG, evict the oldest if the cache limit was reached
|
||||
if len(l.caches) >= l.NumCaches {
|
||||
var evict *cache
|
||||
for _, cache := range l.caches {
|
||||
if evict == nil || evict.used.After(cache.used) {
|
||||
evict = cache
|
||||
}
|
||||
}
|
||||
glog.V(logger.Debug).Infof("Evicting DAG for epoch %d in favour of epoch %d", evict.epoch, epoch)
|
||||
delete(l.caches, evict.epoch)
|
||||
}
|
||||
// If we have the new DAG pre-generated, use that, otherwise create a new one
|
||||
if l.future != nil && l.future.epoch == epoch {
|
||||
glog.V(logger.Debug).Infof("Using pre-generated DAG for epoch %d", epoch)
|
||||
c, l.future = l.future, nil
|
||||
} else {
|
||||
glog.V(logger.Debug).Infof("No pre-generated DAG available, creating new for epoch %d", epoch)
|
||||
c = &cache{epoch: epoch, test: l.test}
|
||||
}
|
||||
l.caches[epoch] = c
|
||||
|
||||
// If we just used up the future cache, or need a refresh, regenerate
|
||||
if l.future == nil || l.future.epoch <= epoch {
|
||||
glog.V(logger.Debug).Infof("Pre-generating DAG for epoch %d", epoch+1)
|
||||
l.future = &cache{epoch: epoch + 1, test: l.test}
|
||||
go l.future.generate()
|
||||
}
|
||||
}
|
||||
c.used = time.Now()
|
||||
l.mu.Unlock()
|
||||
// Wait for the cache to finish generating.
|
||||
|
||||
// Wait for generation finish and return the cache
|
||||
c.generate()
|
||||
return c
|
||||
}
|
||||
@ -362,9 +401,13 @@ type Ethash struct {
|
||||
}
|
||||
|
||||
// New creates an instance of the proof of work.
|
||||
// A single instance of Light is shared across all instances
|
||||
// created with New.
|
||||
func New() *Ethash {
|
||||
return &Ethash{new(Light), &Full{turbo: true}}
|
||||
}
|
||||
|
||||
// NewShared creates an instance of the proof of work., where a single instance
|
||||
// of the Light cache is shared across all instances created with NewShared.
|
||||
func NewShared() *Ethash {
|
||||
return &Ethash{sharedLight, &Full{turbo: true}}
|
||||
}
|
||||
|
||||
|
31
Godeps/_workspace/src/github.com/ethereum/ethash/ethash_opencl.go
generated
vendored
@ -138,7 +138,7 @@ func PrintDevices() {
|
||||
|
||||
platforms, err := cl.GetPlatforms()
|
||||
if err != nil {
|
||||
fmt.Println("Plaform error (check your OpenCL installation): %v", err)
|
||||
fmt.Println("Plaform error (check your OpenCL installation):", err)
|
||||
return
|
||||
}
|
||||
|
||||
@ -267,13 +267,13 @@ func initCLDevice(deviceId int, device *cl.Device, c *OpenCLMiner) error {
|
||||
|
||||
context, err := cl.CreateContext([]*cl.Device{device})
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed creating context:", err)
|
||||
return fmt.Errorf("failed creating context: %v", err)
|
||||
}
|
||||
|
||||
// TODO: test running with CL_QUEUE_PROFILING_ENABLE for profiling?
|
||||
queue, err := context.CreateCommandQueue(device, 0)
|
||||
if err != nil {
|
||||
return fmt.Errorf("command queue err:", err)
|
||||
return fmt.Errorf("command queue err: %v", err)
|
||||
}
|
||||
|
||||
// See [4] section 3.2 and [3] "clBuildProgram".
|
||||
@ -287,7 +287,7 @@ func initCLDevice(deviceId int, device *cl.Device, c *OpenCLMiner) error {
|
||||
|
||||
program, err := context.CreateProgramWithSource([]string{kernelCode})
|
||||
if err != nil {
|
||||
return fmt.Errorf("program err:", err)
|
||||
return fmt.Errorf("program err: %v", err)
|
||||
}
|
||||
|
||||
/* if using AMD OpenCL impl, you can set this to debug on x86 CPU device.
|
||||
@ -303,7 +303,7 @@ func initCLDevice(deviceId int, device *cl.Device, c *OpenCLMiner) error {
|
||||
buildOpts := ""
|
||||
err = program.BuildProgram([]*cl.Device{device}, buildOpts)
|
||||
if err != nil {
|
||||
return fmt.Errorf("program build err:", err)
|
||||
return fmt.Errorf("program build err: %v", err)
|
||||
}
|
||||
|
||||
var searchKernelName, hashKernelName string
|
||||
@ -313,7 +313,7 @@ func initCLDevice(deviceId int, device *cl.Device, c *OpenCLMiner) error {
|
||||
searchKernel, err := program.CreateKernel(searchKernelName)
|
||||
hashKernel, err := program.CreateKernel(hashKernelName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("kernel err:", err)
|
||||
return fmt.Errorf("kernel err: %v", err)
|
||||
}
|
||||
|
||||
// TODO: when this DAG size appears, patch the Go bindings
|
||||
@ -328,28 +328,28 @@ func initCLDevice(deviceId int, device *cl.Device, c *OpenCLMiner) error {
|
||||
dagBuf := *(new(*cl.MemObject))
|
||||
dagBuf, err = context.CreateEmptyBuffer(cl.MemReadOnly, int(c.dagSize))
|
||||
if err != nil {
|
||||
return fmt.Errorf("allocating dag buf failed: ", err)
|
||||
return fmt.Errorf("allocating dag buf failed: %v", err)
|
||||
}
|
||||
|
||||
// write DAG to device mem
|
||||
dagPtr := unsafe.Pointer(c.ethash.Full.current.ptr.data)
|
||||
_, err = queue.EnqueueWriteBuffer(dagBuf, true, 0, int(c.dagSize), dagPtr, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("writing to dag buf failed: ", err)
|
||||
return fmt.Errorf("writing to dag buf failed: %v", err)
|
||||
}
|
||||
|
||||
searchBuffers := make([]*cl.MemObject, searchBufSize)
|
||||
for i := 0; i < searchBufSize; i++ {
|
||||
searchBuff, err := context.CreateEmptyBuffer(cl.MemWriteOnly, (1+maxSearchResults)*SIZEOF_UINT32)
|
||||
if err != nil {
|
||||
return fmt.Errorf("search buffer err:", err)
|
||||
return fmt.Errorf("search buffer err: %v", err)
|
||||
}
|
||||
searchBuffers[i] = searchBuff
|
||||
}
|
||||
|
||||
headerBuf, err := context.CreateEmptyBuffer(cl.MemReadOnly, 32)
|
||||
if err != nil {
|
||||
return fmt.Errorf("header buffer err:", err)
|
||||
return fmt.Errorf("header buffer err: %v", err)
|
||||
}
|
||||
|
||||
// Unique, random nonces are crucial for mining efficieny.
|
||||
@ -556,13 +556,13 @@ func (c *OpenCLMiner) Search(block pow.Block, stop <-chan struct{}, index int) (
|
||||
upperNonce := uint64(binary.LittleEndian.Uint32(results[lo:hi]))
|
||||
checkNonce = p.startNonce + upperNonce
|
||||
if checkNonce != 0 {
|
||||
cn := C.uint64_t(checkNonce)
|
||||
ds := C.uint64_t(c.dagSize)
|
||||
// We verify that the nonce is indeed a solution by
|
||||
// executing the Ethash verification function (on the CPU).
|
||||
ret := C.ethash_light_compute_internal(c.ethash.Light.current.ptr, ds, hashToH256(headerHash), cn)
|
||||
cache := c.ethash.Light.getCache(block.NumberU64())
|
||||
ok, mixDigest, result := cache.compute(c.dagSize, headerHash, checkNonce)
|
||||
|
||||
// TODO: return result first
|
||||
if ret.success && h256ToHash(ret.result).Big().Cmp(target256) <= 0 {
|
||||
if ok && result.Big().Cmp(target256) <= 0 {
|
||||
_, err = d.queue.EnqueueUnmapMemObject(d.searchBuffers[p.bufIndex], cres, nil)
|
||||
if err != nil {
|
||||
fmt.Println("Error in Search clEnqueueUnmapMemObject: ", err)
|
||||
@ -573,9 +573,8 @@ func (c *OpenCLMiner) Search(block pow.Block, stop <-chan struct{}, index int) (
|
||||
fmt.Println("Error in Search WaitForEvents: ", err)
|
||||
}
|
||||
}
|
||||
return checkNonce, C.GoBytes(unsafe.Pointer(&ret.mix_hash), C.int(32))
|
||||
return checkNonce, mixDigest.Bytes()
|
||||
}
|
||||
|
||||
_, err := d.queue.EnqueueWriteBuffer(d.searchBuffers[p.bufIndex], false, 0, 4, unsafe.Pointer(&zero), nil)
|
||||
if err != nil {
|
||||
fmt.Println("Error in Search cl: EnqueueWriteBuffer", err)
|
||||
|
221
Godeps/_workspace/src/github.com/ethereum/ethash/ethash_test.go
generated
vendored
@ -1,221 +0,0 @@
|
||||
// Copyright 2015 The go-ethereum Authors
|
||||
// Copyright 2015 Lefteris Karapetsas <lefteris@refu.co>
|
||||
// This file is part of the go-ethereum library.
|
||||
//
|
||||
// The go-ethereum library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Lesser General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// The go-ethereum library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Lesser General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Lesser General Public License
|
||||
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
package ethash
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"encoding/hex"
|
||||
"log"
|
||||
"math/big"
|
||||
"os"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common"
|
||||
"github.com/ethereum/go-ethereum/crypto"
|
||||
)
|
||||
|
||||
func init() {
|
||||
// glog.SetV(6)
|
||||
// glog.SetToStderr(true)
|
||||
}
|
||||
|
||||
type testBlock struct {
|
||||
difficulty *big.Int
|
||||
hashNoNonce common.Hash
|
||||
nonce uint64
|
||||
mixDigest common.Hash
|
||||
number uint64
|
||||
}
|
||||
|
||||
func (b *testBlock) Difficulty() *big.Int { return b.difficulty }
|
||||
func (b *testBlock) HashNoNonce() common.Hash { return b.hashNoNonce }
|
||||
func (b *testBlock) Nonce() uint64 { return b.nonce }
|
||||
func (b *testBlock) MixDigest() common.Hash { return b.mixDigest }
|
||||
func (b *testBlock) NumberU64() uint64 { return b.number }
|
||||
|
||||
var validBlocks = []*testBlock{
|
||||
// from proof of concept nine testnet, epoch 0
|
||||
{
|
||||
number: 22,
|
||||
hashNoNonce: common.HexToHash("372eca2454ead349c3df0ab5d00b0b706b23e49d469387db91811cee0358fc6d"),
|
||||
difficulty: big.NewInt(132416),
|
||||
nonce: 0x495732e0ed7a801c,
|
||||
mixDigest: common.HexToHash("2f74cdeb198af0b9abe65d22d372e22fb2d474371774a9583c1cc427a07939f5"),
|
||||
},
|
||||
// from proof of concept nine testnet, epoch 1
|
||||
{
|
||||
number: 30001,
|
||||
hashNoNonce: common.HexToHash("7e44356ee3441623bc72a683fd3708fdf75e971bbe294f33e539eedad4b92b34"),
|
||||
difficulty: big.NewInt(1532671),
|
||||
nonce: 0x318df1c8adef7e5e,
|
||||
mixDigest: common.HexToHash("144b180aad09ae3c81fb07be92c8e6351b5646dda80e6844ae1b697e55ddde84"),
|
||||
},
|
||||
// from proof of concept nine testnet, epoch 2
|
||||
{
|
||||
number: 60000,
|
||||
hashNoNonce: common.HexToHash("5fc898f16035bf5ac9c6d9077ae1e3d5fc1ecc3c9fd5bee8bb00e810fdacbaa0"),
|
||||
difficulty: big.NewInt(2467358),
|
||||
nonce: 0x50377003e5d830ca,
|
||||
mixDigest: common.HexToHash("ab546a5b73c452ae86dadd36f0ed83a6745226717d3798832d1b20b489e82063"),
|
||||
},
|
||||
}
|
||||
|
||||
var invalidZeroDiffBlock = testBlock{
|
||||
number: 61440000,
|
||||
hashNoNonce: crypto.Sha3Hash([]byte("foo")),
|
||||
difficulty: big.NewInt(0),
|
||||
nonce: 0xcafebabec00000fe,
|
||||
mixDigest: crypto.Sha3Hash([]byte("bar")),
|
||||
}
|
||||
|
||||
func TestEthashVerifyValid(t *testing.T) {
|
||||
eth := New()
|
||||
for i, block := range validBlocks {
|
||||
if !eth.Verify(block) {
|
||||
t.Errorf("block %d (%x) did not validate.", i, block.hashNoNonce[:6])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEthashVerifyInvalid(t *testing.T) {
|
||||
eth := New()
|
||||
if eth.Verify(&invalidZeroDiffBlock) {
|
||||
t.Errorf("should not validate - we just ensure it does not panic on this block")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEthashConcurrentVerify(t *testing.T) {
|
||||
eth, err := NewForTesting()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer os.RemoveAll(eth.Full.Dir)
|
||||
|
||||
block := &testBlock{difficulty: big.NewInt(10)}
|
||||
nonce, md := eth.Search(block, nil, 0)
|
||||
block.nonce = nonce
|
||||
block.mixDigest = common.BytesToHash(md)
|
||||
|
||||
// Verify the block concurrently to check for data races.
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(100)
|
||||
for i := 0; i < 100; i++ {
|
||||
go func() {
|
||||
if !eth.Verify(block) {
|
||||
t.Error("Block could not be verified")
|
||||
}
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func TestEthashConcurrentSearch(t *testing.T) {
|
||||
eth, err := NewForTesting()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
eth.Turbo(true)
|
||||
defer os.RemoveAll(eth.Full.Dir)
|
||||
|
||||
type searchRes struct {
|
||||
n uint64
|
||||
md []byte
|
||||
}
|
||||
|
||||
var (
|
||||
block = &testBlock{difficulty: big.NewInt(35000)}
|
||||
nsearch = 10
|
||||
wg = new(sync.WaitGroup)
|
||||
found = make(chan searchRes)
|
||||
stop = make(chan struct{})
|
||||
)
|
||||
rand.Read(block.hashNoNonce[:])
|
||||
wg.Add(nsearch)
|
||||
// launch n searches concurrently.
|
||||
for i := 0; i < nsearch; i++ {
|
||||
go func() {
|
||||
nonce, md := eth.Search(block, stop, 0)
|
||||
select {
|
||||
case found <- searchRes{n: nonce, md: md}:
|
||||
case <-stop:
|
||||
}
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
|
||||
// wait for one of them to find the nonce
|
||||
res := <-found
|
||||
// stop the others
|
||||
close(stop)
|
||||
wg.Wait()
|
||||
|
||||
block.nonce = res.n
|
||||
block.mixDigest = common.BytesToHash(res.md)
|
||||
if !eth.Verify(block) {
|
||||
t.Error("Block could not be verified")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEthashSearchAcrossEpoch(t *testing.T) {
|
||||
eth, err := NewForTesting()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer os.RemoveAll(eth.Full.Dir)
|
||||
|
||||
for i := epochLength - 40; i < epochLength+40; i++ {
|
||||
block := &testBlock{number: i, difficulty: big.NewInt(90)}
|
||||
rand.Read(block.hashNoNonce[:])
|
||||
nonce, md := eth.Search(block, nil, 0)
|
||||
block.nonce = nonce
|
||||
block.mixDigest = common.BytesToHash(md)
|
||||
if !eth.Verify(block) {
|
||||
t.Fatalf("Block could not be verified")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSeedHash(t *testing.T) {
|
||||
seed0, err := GetSeedHash(0)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to get seedHash for block 0: %v", err)
|
||||
}
|
||||
if bytes.Compare(seed0, make([]byte, 32)) != 0 {
|
||||
log.Printf("seedHash for block 0 should be 0s, was: %v\n", seed0)
|
||||
}
|
||||
seed1, err := GetSeedHash(30000)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
// From python:
|
||||
// > from pyethash import get_seedhash
|
||||
// > get_seedhash(30000)
|
||||
expectedSeed1, err := hex.DecodeString("290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if bytes.Compare(seed1, expectedSeed1) != 0 {
|
||||
log.Printf("seedHash for block 1 should be: %v,\nactual value: %v\n", expectedSeed1, seed1)
|
||||
}
|
||||
|
||||
}
|
190
Godeps/_workspace/src/github.com/ethereum/ethash/js/ethash.js
generated
vendored
@ -1,190 +0,0 @@
|
||||
// ethash.js
|
||||
// Tim Hughes <tim@twistedfury.com>
|
||||
// Revision 19
|
||||
|
||||
/*jslint node: true, shadow:true */
|
||||
"use strict";
|
||||
|
||||
var Keccak = require('./keccak');
|
||||
var util = require('./util');
|
||||
|
||||
// 32-bit unsigned modulo
|
||||
function mod32(x, n)
|
||||
{
|
||||
return (x>>>0) % (n>>>0);
|
||||
}
|
||||
|
||||
function fnv(x, y)
|
||||
{
|
||||
// js integer multiply by 0x01000193 will lose precision
|
||||
return ((x*0x01000000 | 0) + (x*0x193 | 0)) ^ y;
|
||||
}
|
||||
|
||||
function computeCache(params, seedWords)
|
||||
{
|
||||
var cache = new Uint32Array(params.cacheSize >> 2);
|
||||
var cacheNodeCount = params.cacheSize >> 6;
|
||||
|
||||
// Initialize cache
|
||||
var keccak = new Keccak();
|
||||
keccak.digestWords(cache, 0, 16, seedWords, 0, seedWords.length);
|
||||
for (var n = 1; n < cacheNodeCount; ++n)
|
||||
{
|
||||
keccak.digestWords(cache, n<<4, 16, cache, (n-1)<<4, 16);
|
||||
}
|
||||
|
||||
var tmp = new Uint32Array(16);
|
||||
|
||||
// Do randmemohash passes
|
||||
for (var r = 0; r < params.cacheRounds; ++r)
|
||||
{
|
||||
for (var n = 0; n < cacheNodeCount; ++n)
|
||||
{
|
||||
var p0 = mod32(n + cacheNodeCount - 1, cacheNodeCount) << 4;
|
||||
var p1 = mod32(cache[n<<4|0], cacheNodeCount) << 4;
|
||||
|
||||
for (var w = 0; w < 16; w=(w+1)|0)
|
||||
{
|
||||
tmp[w] = cache[p0 | w] ^ cache[p1 | w];
|
||||
}
|
||||
|
||||
keccak.digestWords(cache, n<<4, 16, tmp, 0, tmp.length);
|
||||
}
|
||||
}
|
||||
return cache;
|
||||
}
|
||||
|
||||
function computeDagNode(o_node, params, cache, keccak, nodeIndex)
|
||||
{
|
||||
var cacheNodeCount = params.cacheSize >> 6;
|
||||
var dagParents = params.dagParents;
|
||||
|
||||
var c = (nodeIndex % cacheNodeCount) << 4;
|
||||
var mix = o_node;
|
||||
for (var w = 0; w < 16; ++w)
|
||||
{
|
||||
mix[w] = cache[c|w];
|
||||
}
|
||||
mix[0] ^= nodeIndex;
|
||||
keccak.digestWords(mix, 0, 16, mix, 0, 16);
|
||||
|
||||
for (var p = 0; p < dagParents; ++p)
|
||||
{
|
||||
// compute cache node (word) index
|
||||
c = mod32(fnv(nodeIndex ^ p, mix[p&15]), cacheNodeCount) << 4;
|
||||
|
||||
for (var w = 0; w < 16; ++w)
|
||||
{
|
||||
mix[w] = fnv(mix[w], cache[c|w]);
|
||||
}
|
||||
}
|
||||
|
||||
keccak.digestWords(mix, 0, 16, mix, 0, 16);
|
||||
}
|
||||
|
||||
function computeHashInner(mix, params, cache, keccak, tempNode)
|
||||
{
|
||||
var mixParents = params.mixParents|0;
|
||||
var mixWordCount = params.mixSize >> 2;
|
||||
var mixNodeCount = mixWordCount >> 4;
|
||||
var dagPageCount = (params.dagSize / params.mixSize) >> 0;
|
||||
|
||||
// grab initial first word
|
||||
var s0 = mix[0];
|
||||
|
||||
// initialise mix from initial 64 bytes
|
||||
for (var w = 16; w < mixWordCount; ++w)
|
||||
{
|
||||
mix[w] = mix[w & 15];
|
||||
}
|
||||
|
||||
for (var a = 0; a < mixParents; ++a)
|
||||
{
|
||||
var p = mod32(fnv(s0 ^ a, mix[a & (mixWordCount-1)]), dagPageCount);
|
||||
var d = (p * mixNodeCount)|0;
|
||||
|
||||
for (var n = 0, w = 0; n < mixNodeCount; ++n, w += 16)
|
||||
{
|
||||
computeDagNode(tempNode, params, cache, keccak, (d + n)|0);
|
||||
|
||||
for (var v = 0; v < 16; ++v)
|
||||
{
|
||||
mix[w|v] = fnv(mix[w|v], tempNode[v]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function convertSeed(seed)
|
||||
{
|
||||
// todo, reconcile with spec, byte ordering?
|
||||
// todo, big-endian conversion
|
||||
var newSeed = util.toWords(seed);
|
||||
if (newSeed === null)
|
||||
throw Error("Invalid seed '" + seed + "'");
|
||||
return newSeed;
|
||||
}
|
||||
|
||||
exports.defaultParams = function()
|
||||
{
|
||||
return {
|
||||
cacheSize: 1048384,
|
||||
cacheRounds: 3,
|
||||
dagSize: 1073739904,
|
||||
dagParents: 256,
|
||||
mixSize: 128,
|
||||
mixParents: 64,
|
||||
};
|
||||
};
|
||||
|
||||
exports.Ethash = function(params, seed)
|
||||
{
|
||||
// precompute cache and related values
|
||||
seed = convertSeed(seed);
|
||||
var cache = computeCache(params, seed);
|
||||
|
||||
// preallocate buffers/etc
|
||||
var initBuf = new ArrayBuffer(96);
|
||||
var initBytes = new Uint8Array(initBuf);
|
||||
var initWords = new Uint32Array(initBuf);
|
||||
var mixWords = new Uint32Array(params.mixSize / 4);
|
||||
var tempNode = new Uint32Array(16);
|
||||
var keccak = new Keccak();
|
||||
|
||||
var retWords = new Uint32Array(8);
|
||||
var retBytes = new Uint8Array(retWords.buffer); // supposedly read-only
|
||||
|
||||
this.hash = function(header, nonce)
|
||||
{
|
||||
// compute initial hash
|
||||
initBytes.set(header, 0);
|
||||
initBytes.set(nonce, 32);
|
||||
keccak.digestWords(initWords, 0, 16, initWords, 0, 8 + nonce.length/4);
|
||||
|
||||
// compute mix
|
||||
for (var i = 0; i != 16; ++i)
|
||||
{
|
||||
mixWords[i] = initWords[i];
|
||||
}
|
||||
computeHashInner(mixWords, params, cache, keccak, tempNode);
|
||||
|
||||
// compress mix and append to initWords
|
||||
for (var i = 0; i != mixWords.length; i += 4)
|
||||
{
|
||||
initWords[16 + i/4] = fnv(fnv(fnv(mixWords[i], mixWords[i+1]), mixWords[i+2]), mixWords[i+3]);
|
||||
}
|
||||
|
||||
// final Keccak hashes
|
||||
keccak.digestWords(retWords, 0, 8, initWords, 0, 24); // Keccak-256(s + cmix)
|
||||
return retBytes;
|
||||
};
|
||||
|
||||
this.cacheDigest = function()
|
||||
{
|
||||
return keccak.digest(32, new Uint8Array(cache.buffer));
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
404
Godeps/_workspace/src/github.com/ethereum/ethash/js/keccak.js
generated
vendored
@ -1,404 +0,0 @@
|
||||
// keccak.js
|
||||
// Tim Hughes <tim@twistedfury.com>
|
||||
// derived from Markku-Juhani O. Saarinen's C code (http://keccak.noekeon.org/readable_code.html)
|
||||
|
||||
/*jslint node: true, shadow:true */
|
||||
"use strict";
|
||||
|
||||
var Keccak_f1600_RC = new Uint32Array([
|
||||
0x00000001, 0x00000000,
|
||||
0x00008082, 0x00000000,
|
||||
0x0000808a, 0x80000000,
|
||||
0x80008000, 0x80000000,
|
||||
0x0000808b, 0x00000000,
|
||||
0x80000001, 0x00000000,
|
||||
0x80008081, 0x80000000,
|
||||
0x00008009, 0x80000000,
|
||||
0x0000008a, 0x00000000,
|
||||
0x00000088, 0x00000000,
|
||||
0x80008009, 0x00000000,
|
||||
0x8000000a, 0x00000000,
|
||||
0x8000808b, 0x00000000,
|
||||
0x0000008b, 0x80000000,
|
||||
0x00008089, 0x80000000,
|
||||
0x00008003, 0x80000000,
|
||||
0x00008002, 0x80000000,
|
||||
0x00000080, 0x80000000,
|
||||
0x0000800a, 0x00000000,
|
||||
0x8000000a, 0x80000000,
|
||||
0x80008081, 0x80000000,
|
||||
0x00008080, 0x80000000,
|
||||
0x80000001, 0x00000000,
|
||||
0x80008008, 0x80000000
|
||||
]);
|
||||
|
||||
function keccak_f1600(outState, outOffset, outSize, inState)
|
||||
{
|
||||
// todo, handle big endian loads
|
||||
var a00l = inState[0]|0;
|
||||
var a00h = inState[1]|0;
|
||||
var a01l = inState[2]|0;
|
||||
var a01h = inState[3]|0;
|
||||
var a02l = inState[4]|0;
|
||||
var a02h = inState[5]|0;
|
||||
var a03l = inState[6]|0;
|
||||
var a03h = inState[7]|0;
|
||||
var a04l = inState[8]|0;
|
||||
var a04h = inState[9]|0;
|
||||
var a05l = inState[10]|0;
|
||||
var a05h = inState[11]|0;
|
||||
var a06l = inState[12]|0;
|
||||
var a06h = inState[13]|0;
|
||||
var a07l = inState[14]|0;
|
||||
var a07h = inState[15]|0;
|
||||
var a08l = inState[16]|0;
|
||||
var a08h = inState[17]|0;
|
||||
var a09l = inState[18]|0;
|
||||
var a09h = inState[19]|0;
|
||||
var a10l = inState[20]|0;
|
||||
var a10h = inState[21]|0;
|
||||
var a11l = inState[22]|0;
|
||||
var a11h = inState[23]|0;
|
||||
var a12l = inState[24]|0;
|
||||
var a12h = inState[25]|0;
|
||||
var a13l = inState[26]|0;
|
||||
var a13h = inState[27]|0;
|
||||
var a14l = inState[28]|0;
|
||||
var a14h = inState[29]|0;
|
||||
var a15l = inState[30]|0;
|
||||
var a15h = inState[31]|0;
|
||||
var a16l = inState[32]|0;
|
||||
var a16h = inState[33]|0;
|
||||
var a17l = inState[34]|0;
|
||||
var a17h = inState[35]|0;
|
||||
var a18l = inState[36]|0;
|
||||
var a18h = inState[37]|0;
|
||||
var a19l = inState[38]|0;
|
||||
var a19h = inState[39]|0;
|
||||
var a20l = inState[40]|0;
|
||||
var a20h = inState[41]|0;
|
||||
var a21l = inState[42]|0;
|
||||
var a21h = inState[43]|0;
|
||||
var a22l = inState[44]|0;
|
||||
var a22h = inState[45]|0;
|
||||
var a23l = inState[46]|0;
|
||||
var a23h = inState[47]|0;
|
||||
var a24l = inState[48]|0;
|
||||
var a24h = inState[49]|0;
|
||||
var b00l, b00h, b01l, b01h, b02l, b02h, b03l, b03h, b04l, b04h;
|
||||
var b05l, b05h, b06l, b06h, b07l, b07h, b08l, b08h, b09l, b09h;
|
||||
var b10l, b10h, b11l, b11h, b12l, b12h, b13l, b13h, b14l, b14h;
|
||||
var b15l, b15h, b16l, b16h, b17l, b17h, b18l, b18h, b19l, b19h;
|
||||
var b20l, b20h, b21l, b21h, b22l, b22h, b23l, b23h, b24l, b24h;
|
||||
var tl, nl;
|
||||
var th, nh;
|
||||
|
||||
for (var r = 0; r < 48; r = (r+2)|0)
|
||||
{
|
||||
// Theta
|
||||
b00l = a00l ^ a05l ^ a10l ^ a15l ^ a20l;
|
||||
b00h = a00h ^ a05h ^ a10h ^ a15h ^ a20h;
|
||||
b01l = a01l ^ a06l ^ a11l ^ a16l ^ a21l;
|
||||
b01h = a01h ^ a06h ^ a11h ^ a16h ^ a21h;
|
||||
b02l = a02l ^ a07l ^ a12l ^ a17l ^ a22l;
|
||||
b02h = a02h ^ a07h ^ a12h ^ a17h ^ a22h;
|
||||
b03l = a03l ^ a08l ^ a13l ^ a18l ^ a23l;
|
||||
b03h = a03h ^ a08h ^ a13h ^ a18h ^ a23h;
|
||||
b04l = a04l ^ a09l ^ a14l ^ a19l ^ a24l;
|
||||
b04h = a04h ^ a09h ^ a14h ^ a19h ^ a24h;
|
||||
tl = b04l ^ (b01l << 1 | b01h >>> 31);
|
||||
th = b04h ^ (b01h << 1 | b01l >>> 31);
|
||||
a00l ^= tl;
|
||||
a00h ^= th;
|
||||
a05l ^= tl;
|
||||
a05h ^= th;
|
||||
a10l ^= tl;
|
||||
a10h ^= th;
|
||||
a15l ^= tl;
|
||||
a15h ^= th;
|
||||
a20l ^= tl;
|
||||
a20h ^= th;
|
||||
tl = b00l ^ (b02l << 1 | b02h >>> 31);
|
||||
th = b00h ^ (b02h << 1 | b02l >>> 31);
|
||||
a01l ^= tl;
|
||||
a01h ^= th;
|
||||
a06l ^= tl;
|
||||
a06h ^= th;
|
||||
a11l ^= tl;
|
||||
a11h ^= th;
|
||||
a16l ^= tl;
|
||||
a16h ^= th;
|
||||
a21l ^= tl;
|
||||
a21h ^= th;
|
||||
tl = b01l ^ (b03l << 1 | b03h >>> 31);
|
||||
th = b01h ^ (b03h << 1 | b03l >>> 31);
|
||||
a02l ^= tl;
|
||||
a02h ^= th;
|
||||
a07l ^= tl;
|
||||
a07h ^= th;
|
||||
a12l ^= tl;
|
||||
a12h ^= th;
|
||||
a17l ^= tl;
|
||||
a17h ^= th;
|
||||
a22l ^= tl;
|
||||
a22h ^= th;
|
||||
tl = b02l ^ (b04l << 1 | b04h >>> 31);
|
||||
th = b02h ^ (b04h << 1 | b04l >>> 31);
|
||||
a03l ^= tl;
|
||||
a03h ^= th;
|
||||
a08l ^= tl;
|
||||
a08h ^= th;
|
||||
a13l ^= tl;
|
||||
a13h ^= th;
|
||||
a18l ^= tl;
|
||||
a18h ^= th;
|
||||
a23l ^= tl;
|
||||
a23h ^= th;
|
||||
tl = b03l ^ (b00l << 1 | b00h >>> 31);
|
||||
th = b03h ^ (b00h << 1 | b00l >>> 31);
|
||||
a04l ^= tl;
|
||||
a04h ^= th;
|
||||
a09l ^= tl;
|
||||
a09h ^= th;
|
||||
a14l ^= tl;
|
||||
a14h ^= th;
|
||||
a19l ^= tl;
|
||||
a19h ^= th;
|
||||
a24l ^= tl;
|
||||
a24h ^= th;
|
||||
|
||||
// Rho Pi
|
||||
b00l = a00l;
|
||||
b00h = a00h;
|
||||
b10l = a01l << 1 | a01h >>> 31;
|
||||
b10h = a01h << 1 | a01l >>> 31;
|
||||
b07l = a10l << 3 | a10h >>> 29;
|
||||
b07h = a10h << 3 | a10l >>> 29;
|
||||
b11l = a07l << 6 | a07h >>> 26;
|
||||
b11h = a07h << 6 | a07l >>> 26;
|
||||
b17l = a11l << 10 | a11h >>> 22;
|
||||
b17h = a11h << 10 | a11l >>> 22;
|
||||
b18l = a17l << 15 | a17h >>> 17;
|
||||
b18h = a17h << 15 | a17l >>> 17;
|
||||
b03l = a18l << 21 | a18h >>> 11;
|
||||
b03h = a18h << 21 | a18l >>> 11;
|
||||
b05l = a03l << 28 | a03h >>> 4;
|
||||
b05h = a03h << 28 | a03l >>> 4;
|
||||
b16l = a05h << 4 | a05l >>> 28;
|
||||
b16h = a05l << 4 | a05h >>> 28;
|
||||
b08l = a16h << 13 | a16l >>> 19;
|
||||
b08h = a16l << 13 | a16h >>> 19;
|
||||
b21l = a08h << 23 | a08l >>> 9;
|
||||
b21h = a08l << 23 | a08h >>> 9;
|
||||
b24l = a21l << 2 | a21h >>> 30;
|
||||
b24h = a21h << 2 | a21l >>> 30;
|
||||
b04l = a24l << 14 | a24h >>> 18;
|
||||
b04h = a24h << 14 | a24l >>> 18;
|
||||
b15l = a04l << 27 | a04h >>> 5;
|
||||
b15h = a04h << 27 | a04l >>> 5;
|
||||
b23l = a15h << 9 | a15l >>> 23;
|
||||
b23h = a15l << 9 | a15h >>> 23;
|
||||
b19l = a23h << 24 | a23l >>> 8;
|
||||
b19h = a23l << 24 | a23h >>> 8;
|
||||
b13l = a19l << 8 | a19h >>> 24;
|
||||
b13h = a19h << 8 | a19l >>> 24;
|
||||
b12l = a13l << 25 | a13h >>> 7;
|
||||
b12h = a13h << 25 | a13l >>> 7;
|
||||
b02l = a12h << 11 | a12l >>> 21;
|
||||
b02h = a12l << 11 | a12h >>> 21;
|
||||
b20l = a02h << 30 | a02l >>> 2;
|
||||
b20h = a02l << 30 | a02h >>> 2;
|
||||
b14l = a20l << 18 | a20h >>> 14;
|
||||
b14h = a20h << 18 | a20l >>> 14;
|
||||
b22l = a14h << 7 | a14l >>> 25;
|
||||
b22h = a14l << 7 | a14h >>> 25;
|
||||
b09l = a22h << 29 | a22l >>> 3;
|
||||
b09h = a22l << 29 | a22h >>> 3;
|
||||
b06l = a09l << 20 | a09h >>> 12;
|
||||
b06h = a09h << 20 | a09l >>> 12;
|
||||
b01l = a06h << 12 | a06l >>> 20;
|
||||
b01h = a06l << 12 | a06h >>> 20;
|
||||
|
||||
// Chi
|
||||
a00l = b00l ^ ~b01l & b02l;
|
||||
a00h = b00h ^ ~b01h & b02h;
|
||||
a01l = b01l ^ ~b02l & b03l;
|
||||
a01h = b01h ^ ~b02h & b03h;
|
||||
a02l = b02l ^ ~b03l & b04l;
|
||||
a02h = b02h ^ ~b03h & b04h;
|
||||
a03l = b03l ^ ~b04l & b00l;
|
||||
a03h = b03h ^ ~b04h & b00h;
|
||||
a04l = b04l ^ ~b00l & b01l;
|
||||
a04h = b04h ^ ~b00h & b01h;
|
||||
a05l = b05l ^ ~b06l & b07l;
|
||||
a05h = b05h ^ ~b06h & b07h;
|
||||
a06l = b06l ^ ~b07l & b08l;
|
||||
a06h = b06h ^ ~b07h & b08h;
|
||||
a07l = b07l ^ ~b08l & b09l;
|
||||
a07h = b07h ^ ~b08h & b09h;
|
||||
a08l = b08l ^ ~b09l & b05l;
|
||||
a08h = b08h ^ ~b09h & b05h;
|
||||
a09l = b09l ^ ~b05l & b06l;
|
||||
a09h = b09h ^ ~b05h & b06h;
|
||||
a10l = b10l ^ ~b11l & b12l;
|
||||
a10h = b10h ^ ~b11h & b12h;
|
||||
a11l = b11l ^ ~b12l & b13l;
|
||||
a11h = b11h ^ ~b12h & b13h;
|
||||
a12l = b12l ^ ~b13l & b14l;
|
||||
a12h = b12h ^ ~b13h & b14h;
|
||||
a13l = b13l ^ ~b14l & b10l;
|
||||
a13h = b13h ^ ~b14h & b10h;
|
||||
a14l = b14l ^ ~b10l & b11l;
|
||||
a14h = b14h ^ ~b10h & b11h;
|
||||
a15l = b15l ^ ~b16l & b17l;
|
||||
a15h = b15h ^ ~b16h & b17h;
|
||||
a16l = b16l ^ ~b17l & b18l;
|
||||
a16h = b16h ^ ~b17h & b18h;
|
||||
a17l = b17l ^ ~b18l & b19l;
|
||||
a17h = b17h ^ ~b18h & b19h;
|
||||
a18l = b18l ^ ~b19l & b15l;
|
||||
a18h = b18h ^ ~b19h & b15h;
|
||||
a19l = b19l ^ ~b15l & b16l;
|
||||
a19h = b19h ^ ~b15h & b16h;
|
||||
a20l = b20l ^ ~b21l & b22l;
|
||||
a20h = b20h ^ ~b21h & b22h;
|
||||
a21l = b21l ^ ~b22l & b23l;
|
||||
a21h = b21h ^ ~b22h & b23h;
|
||||
a22l = b22l ^ ~b23l & b24l;
|
||||
a22h = b22h ^ ~b23h & b24h;
|
||||
a23l = b23l ^ ~b24l & b20l;
|
||||
a23h = b23h ^ ~b24h & b20h;
|
||||
a24l = b24l ^ ~b20l & b21l;
|
||||
a24h = b24h ^ ~b20h & b21h;
|
||||
|
||||
// Iota
|
||||
a00l ^= Keccak_f1600_RC[r|0];
|
||||
a00h ^= Keccak_f1600_RC[r|1];
|
||||
}
|
||||
|
||||
// todo, handle big-endian stores
|
||||
outState[outOffset|0] = a00l;
|
||||
outState[outOffset|1] = a00h;
|
||||
outState[outOffset|2] = a01l;
|
||||
outState[outOffset|3] = a01h;
|
||||
outState[outOffset|4] = a02l;
|
||||
outState[outOffset|5] = a02h;
|
||||
outState[outOffset|6] = a03l;
|
||||
outState[outOffset|7] = a03h;
|
||||
if (outSize == 8)
|
||||
return;
|
||||
outState[outOffset|8] = a04l;
|
||||
outState[outOffset|9] = a04h;
|
||||
outState[outOffset|10] = a05l;
|
||||
outState[outOffset|11] = a05h;
|
||||
outState[outOffset|12] = a06l;
|
||||
outState[outOffset|13] = a06h;
|
||||
outState[outOffset|14] = a07l;
|
||||
outState[outOffset|15] = a07h;
|
||||
if (outSize == 16)
|
||||
return;
|
||||
outState[outOffset|16] = a08l;
|
||||
outState[outOffset|17] = a08h;
|
||||
outState[outOffset|18] = a09l;
|
||||
outState[outOffset|19] = a09h;
|
||||
outState[outOffset|20] = a10l;
|
||||
outState[outOffset|21] = a10h;
|
||||
outState[outOffset|22] = a11l;
|
||||
outState[outOffset|23] = a11h;
|
||||
outState[outOffset|24] = a12l;
|
||||
outState[outOffset|25] = a12h;
|
||||
outState[outOffset|26] = a13l;
|
||||
outState[outOffset|27] = a13h;
|
||||
outState[outOffset|28] = a14l;
|
||||
outState[outOffset|29] = a14h;
|
||||
outState[outOffset|30] = a15l;
|
||||
outState[outOffset|31] = a15h;
|
||||
outState[outOffset|32] = a16l;
|
||||
outState[outOffset|33] = a16h;
|
||||
outState[outOffset|34] = a17l;
|
||||
outState[outOffset|35] = a17h;
|
||||
outState[outOffset|36] = a18l;
|
||||
outState[outOffset|37] = a18h;
|
||||
outState[outOffset|38] = a19l;
|
||||
outState[outOffset|39] = a19h;
|
||||
outState[outOffset|40] = a20l;
|
||||
outState[outOffset|41] = a20h;
|
||||
outState[outOffset|42] = a21l;
|
||||
outState[outOffset|43] = a21h;
|
||||
outState[outOffset|44] = a22l;
|
||||
outState[outOffset|45] = a22h;
|
||||
outState[outOffset|46] = a23l;
|
||||
outState[outOffset|47] = a23h;
|
||||
outState[outOffset|48] = a24l;
|
||||
outState[outOffset|49] = a24h;
|
||||
}
|
||||
|
||||
var Keccak = function()
|
||||
{
|
||||
var stateBuf = new ArrayBuffer(200);
|
||||
var stateBytes = new Uint8Array(stateBuf);
|
||||
var stateWords = new Uint32Array(stateBuf);
|
||||
|
||||
this.digest = function(oSize, iBytes)
|
||||
{
|
||||
for (var i = 0; i < 50; ++i)
|
||||
{
|
||||
stateWords[i] = 0;
|
||||
}
|
||||
|
||||
var r = 200 - oSize*2;
|
||||
var iLength = iBytes.length;
|
||||
var iOffset = 0;
|
||||
for ( ; ;)
|
||||
{
|
||||
var len = iLength < r ? iLength : r;
|
||||
for (i = 0; i < len; ++i, ++iOffset)
|
||||
{
|
||||
stateBytes[i] ^= iBytes[iOffset];
|
||||
}
|
||||
|
||||
if (iLength < r)
|
||||
break;
|
||||
iLength -= len;
|
||||
|
||||
keccak_f1600(stateWords, 0, 50, stateWords);
|
||||
}
|
||||
|
||||
stateBytes[iLength] ^= 1;
|
||||
stateBytes[r-1] ^= 0x80;
|
||||
keccak_f1600(stateWords, 0, 50, stateWords);
|
||||
return stateBytes.subarray(0, oSize);
|
||||
};
|
||||
|
||||
this.digestWords = function(oWords, oOffset, oLength, iWords, iOffset, iLength)
|
||||
{
|
||||
for (var i = 0; i < 50; ++i)
|
||||
{
|
||||
stateWords[i] = 0;
|
||||
}
|
||||
|
||||
var r = 50 - oLength*2;
|
||||
for (; ; )
|
||||
{
|
||||
var len = iLength < r ? iLength : r;
|
||||
for (i = 0; i < len; ++i, ++iOffset)
|
||||
{
|
||||
stateWords[i] ^= iWords[iOffset];
|
||||
}
|
||||
|
||||
if (iLength < r)
|
||||
break;
|
||||
iLength -= len;
|
||||
|
||||
keccak_f1600(stateWords, 0, 50, stateWords);
|
||||
}
|
||||
|
||||
stateBytes[iLength<<2] ^= 1;
|
||||
stateBytes[(r<<2) - 1] ^= 0x80;
|
||||
keccak_f1600(oWords, oOffset, oLength, stateWords);
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = Keccak;
|
||||
|
||||
|
201
Godeps/_workspace/src/github.com/ethereum/ethash/js/makekeccak.js
generated
vendored
@ -1,201 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
// makekeccak.js
|
||||
// Tim Hughes <tim@twistedfury.com>
|
||||
|
||||
/*jslint node: true, shadow:true */
|
||||
"use strict";
|
||||
|
||||
var Keccak_f1600_Rho = [
|
||||
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14,
|
||||
27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44
|
||||
];
|
||||
|
||||
var Keccak_f1600_Pi= [
|
||||
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4,
|
||||
15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1
|
||||
];
|
||||
|
||||
var Keccak_f1600_RC = [
|
||||
0x00000001, 0x00000000,
|
||||
0x00008082, 0x00000000,
|
||||
0x0000808a, 0x80000000,
|
||||
0x80008000, 0x80000000,
|
||||
0x0000808b, 0x00000000,
|
||||
0x80000001, 0x00000000,
|
||||
0x80008081, 0x80000000,
|
||||
0x00008009, 0x80000000,
|
||||
0x0000008a, 0x00000000,
|
||||
0x00000088, 0x00000000,
|
||||
0x80008009, 0x00000000,
|
||||
0x8000000a, 0x00000000,
|
||||
0x8000808b, 0x00000000,
|
||||
0x0000008b, 0x80000000,
|
||||
0x00008089, 0x80000000,
|
||||
0x00008003, 0x80000000,
|
||||
0x00008002, 0x80000000,
|
||||
0x00000080, 0x80000000,
|
||||
0x0000800a, 0x00000000,
|
||||
0x8000000a, 0x80000000,
|
||||
0x80008081, 0x80000000,
|
||||
0x00008080, 0x80000000,
|
||||
0x80000001, 0x00000000,
|
||||
0x80008008, 0x80000000,
|
||||
];
|
||||
|
||||
function makeRotLow(lo, hi, n)
|
||||
{
|
||||
if (n === 0 || n === 32) throw Error("unsupported");
|
||||
if ((n & 0x20) !== 0)
|
||||
{
|
||||
n &= ~0x20;
|
||||
var t = hi;
|
||||
hi = lo;
|
||||
lo = t;
|
||||
}
|
||||
var hir = hi + " >>> " + (32 - n);
|
||||
var los = lo + " << " + n;
|
||||
return los + " | " + hir;
|
||||
}
|
||||
|
||||
function makeRotHigh(lo, hi, n)
|
||||
{
|
||||
if (n === 0 || n === 32) throw Error("unsupported");
|
||||
if ((n & 0x20) !== 0)
|
||||
{
|
||||
n &= ~0x20;
|
||||
var t = hi;
|
||||
hi = lo;
|
||||
lo = t;
|
||||
}
|
||||
var his = hi + " << " + n;
|
||||
var lor = lo + " >>> " + (32 - n);
|
||||
return his + " | " + lor;
|
||||
}
|
||||
|
||||
function makeKeccak_f1600()
|
||||
{
|
||||
var format = function(n)
|
||||
{
|
||||
return n < 10 ? "0"+n : ""+n;
|
||||
};
|
||||
|
||||
var a = function(n, w)
|
||||
{
|
||||
return "a" + format(n) + (w !== 0?'h':'l');
|
||||
};
|
||||
|
||||
var b = function(n, w)
|
||||
{
|
||||
return "b" + format(n) + (w !== 0?'h':'l');
|
||||
};
|
||||
|
||||
var str = "";
|
||||
str += "function keccak_f1600(outState, outOffset, outSize, inState)\n";
|
||||
str += "{\n";
|
||||
|
||||
for (var i = 0; i < 25; ++i)
|
||||
{
|
||||
for (var w = 0; w <= 1; ++w)
|
||||
{
|
||||
str += "\tvar " + a(i,w) + " = inState["+(i<<1|w)+"]|0;\n";
|
||||
}
|
||||
}
|
||||
|
||||
for (var j = 0; j < 5; ++j)
|
||||
{
|
||||
str += "\tvar ";
|
||||
for (var i = 0; i < 5; ++i)
|
||||
{
|
||||
if (i !== 0)
|
||||
str += ", ";
|
||||
str += b(j*5+i,0) + ", " + b(j*5+i,1);
|
||||
}
|
||||
str += ";\n";
|
||||
}
|
||||
|
||||
str += "\tvar tl, th;\n";
|
||||
str += "\n";
|
||||
str += "\tfor (var r = 0; r < 48; r = (r+2)|0)\n";
|
||||
str += "\t{\n";
|
||||
|
||||
|
||||
// Theta
|
||||
str += "\t\t// Theta\n";
|
||||
for (var i = 0; i < 5; ++i)
|
||||
{
|
||||
for (var w = 0; w <= 1; ++w)
|
||||
{
|
||||
str += "\t\t" + b(i,w) + " = " + a(i,w) + " ^ " + a(i+5,w) + " ^ " + a(i+10,w) + " ^ " + a(i+15,w) + " ^ " + a(i+20,w) + ";\n";
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0; i < 5; ++i)
|
||||
{
|
||||
var i4 = (i + 4) % 5;
|
||||
var i1 = (i + 1) % 5;
|
||||
str += "\t\ttl = " + b(i4,0) + " ^ (" + b(i1,0) + " << 1 | " + b(i1,1) + " >>> 31);\n";
|
||||
str += "\t\tth = " + b(i4,1) + " ^ (" + b(i1,1) + " << 1 | " + b(i1,0) + " >>> 31);\n";
|
||||
|
||||
for (var j = 0; j < 25; j = (j+5)|0)
|
||||
{
|
||||
str += "\t\t" + a((j+i),0) + " ^= tl;\n";
|
||||
str += "\t\t" + a((j+i),1) + " ^= th;\n";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Rho Pi
|
||||
str += "\n\t\t// Rho Pi\n";
|
||||
for (var w = 0; w <= 1; ++w)
|
||||
{
|
||||
str += "\t\t" + b(0,w) + " = " + a(0,w) + ";\n";
|
||||
}
|
||||
var opi = 1;
|
||||
for (var i = 0; i < 24; ++i)
|
||||
{
|
||||
var pi = Keccak_f1600_Pi[i];
|
||||
str += "\t\t" + b(pi,0) + " = " + makeRotLow(a(opi,0), a(opi,1), Keccak_f1600_Rho[i]) + ";\n";
|
||||
str += "\t\t" + b(pi,1) + " = " + makeRotHigh(a(opi,0), a(opi,1), Keccak_f1600_Rho[i]) + ";\n";
|
||||
opi = pi;
|
||||
}
|
||||
|
||||
// Chi
|
||||
str += "\n\t\t// Chi\n";
|
||||
for (var j = 0; j < 25; j += 5)
|
||||
{
|
||||
for (var i = 0; i < 5; ++i)
|
||||
{
|
||||
for (var w = 0; w <= 1; ++w)
|
||||
{
|
||||
str += "\t\t" + a(j+i,w) + " = " + b(j+i,w) + " ^ ~" + b(j+(i+1)%5,w) + " & " + b(j+(i+2)%5,w) + ";\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Iota
|
||||
str += "\n\t\t// Iota\n";
|
||||
for (var w = 0; w <= 1; ++w)
|
||||
{
|
||||
str += "\t\t" + a(0,w) + " ^= Keccak_f1600_RC[r|" + w + "];\n";
|
||||
}
|
||||
|
||||
|
||||
str += "\t}\n";
|
||||
|
||||
for (var i = 0; i < 25; ++i)
|
||||
{
|
||||
if (i == 4 || i == 8)
|
||||
{
|
||||
str += "\tif (outSize == " + i*2 + ")\n\t\treturn;\n";
|
||||
}
|
||||
for (var w = 0; w <= 1; ++w)
|
||||
{
|
||||
str += "\toutState[outOffset|"+(i<<1|w)+"] = " + a(i,w) + ";\n";
|
||||
}
|
||||
}
|
||||
str += "}\n";
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
console.log(makeKeccak_f1600());
|
53
Godeps/_workspace/src/github.com/ethereum/ethash/js/test.js
generated
vendored
@ -1,53 +0,0 @@
|
||||
// test.js
|
||||
// Tim Hughes <tim@twistedfury.com>
|
||||
|
||||
/*jslint node: true, shadow:true */
|
||||
"use strict";
|
||||
|
||||
var ethash = require('./ethash');
|
||||
var util = require('./util');
|
||||
var Keccak = require('./keccak');
|
||||
|
||||
// sanity check hash functions
|
||||
var src = util.stringToBytes("");
|
||||
if (util.bytesToHexString(new Keccak().digest(32, src)) != "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470") throw Error("Keccak-256 failed");
|
||||
if (util.bytesToHexString(new Keccak().digest(64, src)) != "0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e") throw Error("Keccak-512 failed");
|
||||
|
||||
src = new Uint32Array(src.buffer);
|
||||
var dst = new Uint32Array(8);
|
||||
new Keccak().digestWords(dst, 0, dst.length, src, 0, src.length);
|
||||
if (util.wordsToHexString(dst) != "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470") throw Error("Keccak-256 Fast failed");
|
||||
|
||||
var dst = new Uint32Array(16);
|
||||
new Keccak().digestWords(dst, 0, dst.length, src, 0, src.length);
|
||||
if (util.wordsToHexString(dst) != "0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e") throw Error("Keccak-512 Fast failed");
|
||||
|
||||
|
||||
// init params
|
||||
var ethashParams = ethash.defaultParams();
|
||||
//ethashParams.cacheRounds = 0;
|
||||
|
||||
// create hasher
|
||||
var seed = util.hexStringToBytes("9410b944535a83d9adf6bbdcc80e051f30676173c16ca0d32d6f1263fc246466")
|
||||
var startTime = new Date().getTime();
|
||||
var hasher = new ethash.Ethash(ethashParams, seed);
|
||||
console.log('Ethash startup took: '+(new Date().getTime() - startTime) + "ms");
|
||||
console.log('Ethash cache hash: ' + util.bytesToHexString(hasher.cacheDigest()));
|
||||
|
||||
var testHexString = "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470";
|
||||
if (testHexString != util.bytesToHexString(util.hexStringToBytes(testHexString)))
|
||||
throw Error("bytesToHexString or hexStringToBytes broken");
|
||||
|
||||
|
||||
var header = util.hexStringToBytes("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
|
||||
var nonce = util.hexStringToBytes("0000000000000000");
|
||||
var hash;
|
||||
|
||||
startTime = new Date().getTime();
|
||||
var trials = 10;
|
||||
for (var i = 0; i < trials; ++i)
|
||||
{
|
||||
hash = hasher.hash(header, nonce);
|
||||
}
|
||||
console.log("Light client hashes averaged: " + (new Date().getTime() - startTime)/trials + "ms");
|
||||
console.log("Hash = " + util.bytesToHexString(hash));
|
100
Godeps/_workspace/src/github.com/ethereum/ethash/js/util.js
generated
vendored
@ -1,100 +0,0 @@
|
||||
// util.js
|
||||
// Tim Hughes <tim@twistedfury.com>
|
||||
|
||||
/*jslint node: true, shadow:true */
|
||||
"use strict";
|
||||
|
||||
function nibbleToChar(nibble)
|
||||
{
|
||||
return String.fromCharCode((nibble < 10 ? 48 : 87) + nibble);
|
||||
}
|
||||
|
||||
function charToNibble(chr)
|
||||
{
|
||||
if (chr >= 48 && chr <= 57)
|
||||
{
|
||||
return chr - 48;
|
||||
}
|
||||
if (chr >= 65 && chr <= 70)
|
||||
{
|
||||
return chr - 65 + 10;
|
||||
}
|
||||
if (chr >= 97 && chr <= 102)
|
||||
{
|
||||
return chr - 97 + 10;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
function stringToBytes(str)
|
||||
{
|
||||
var bytes = new Uint8Array(str.length);
|
||||
for (var i = 0; i != str.length; ++i)
|
||||
{
|
||||
bytes[i] = str.charCodeAt(i);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
function hexStringToBytes(str)
|
||||
{
|
||||
var bytes = new Uint8Array(str.length>>>1);
|
||||
for (var i = 0; i != bytes.length; ++i)
|
||||
{
|
||||
bytes[i] = charToNibble(str.charCodeAt(i<<1 | 0)) << 4;
|
||||
bytes[i] |= charToNibble(str.charCodeAt(i<<1 | 1));
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
function bytesToHexString(bytes)
|
||||
{
|
||||
var str = "";
|
||||
for (var i = 0; i != bytes.length; ++i)
|
||||
{
|
||||
str += nibbleToChar(bytes[i] >>> 4);
|
||||
str += nibbleToChar(bytes[i] & 0xf);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
function wordsToHexString(words)
|
||||
{
|
||||
return bytesToHexString(new Uint8Array(words.buffer));
|
||||
}
|
||||
|
||||
function uint32ToHexString(num)
|
||||
{
|
||||
var buf = new Uint8Array(4);
|
||||
buf[0] = (num >> 24) & 0xff;
|
||||
buf[1] = (num >> 16) & 0xff;
|
||||
buf[2] = (num >> 8) & 0xff;
|
||||
buf[3] = (num >> 0) & 0xff;
|
||||
return bytesToHexString(buf);
|
||||
}
|
||||
|
||||
function toWords(input)
|
||||
{
|
||||
if (input instanceof Uint32Array)
|
||||
{
|
||||
return input;
|
||||
}
|
||||
else if (input instanceof Uint8Array)
|
||||
{
|
||||
var tmp = new Uint8Array((input.length + 3) & ~3);
|
||||
tmp.set(input);
|
||||
return new Uint32Array(tmp.buffer);
|
||||
}
|
||||
else if (typeof input === typeof "")
|
||||
{
|
||||
return toWords(stringToBytes(input));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
exports.stringToBytes = stringToBytes;
|
||||
exports.hexStringToBytes = hexStringToBytes;
|
||||
exports.bytesToHexString = bytesToHexString;
|
||||
exports.wordsToHexString = wordsToHexString;
|
||||
exports.uint32ToHexString = uint32ToHexString;
|
||||
exports.toWords = toWords;
|
58
Godeps/_workspace/src/github.com/ethereum/ethash/src/benchmark/CMakeLists.txt
generated
vendored
@ -1,58 +0,0 @@
|
||||
include_directories(..)
|
||||
|
||||
set(CMAKE_BUILD_TYPE Release)
|
||||
|
||||
if (MSVC)
|
||||
add_definitions("/openmp")
|
||||
endif()
|
||||
|
||||
# enable C++11, should probably be a bit more specific about compiler
|
||||
if (NOT MSVC)
|
||||
SET(CMAKE_CXX_FLAGS "-std=c++11")
|
||||
endif()
|
||||
|
||||
if (NOT MPI_FOUND)
|
||||
find_package(MPI)
|
||||
endif()
|
||||
|
||||
if (NOT CRYPTOPP_FOUND)
|
||||
find_package(CryptoPP 5.6.2)
|
||||
endif()
|
||||
|
||||
if (CRYPTOPP_FOUND)
|
||||
add_definitions(-DWITH_CRYPTOPP)
|
||||
find_package (Threads REQUIRED)
|
||||
endif()
|
||||
|
||||
if (NOT OpenCL_FOUND)
|
||||
find_package(OpenCL)
|
||||
endif()
|
||||
if (OpenCL_FOUND)
|
||||
add_definitions(-DWITH_OPENCL)
|
||||
include_directories(${OpenCL_INCLUDE_DIRS})
|
||||
list(APPEND FILES ethash_cl_miner.cpp ethash_cl_miner.h)
|
||||
endif()
|
||||
|
||||
if (MPI_FOUND)
|
||||
include_directories(${MPI_INCLUDE_PATH})
|
||||
add_executable (Benchmark_MPI_FULL benchmark.cpp)
|
||||
target_link_libraries (Benchmark_MPI_FULL ${ETHHASH_LIBS} ${MPI_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
|
||||
SET_TARGET_PROPERTIES(Benchmark_MPI_FULL PROPERTIES COMPILE_FLAGS "${COMPILE_FLAGS} ${MPI_COMPILE_FLAGS} -DFULL -DMPI")
|
||||
|
||||
add_executable (Benchmark_MPI_LIGHT benchmark.cpp)
|
||||
target_link_libraries (Benchmark_MPI_LIGHT ${ETHHASH_LIBS} ${MPI_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
|
||||
SET_TARGET_PROPERTIES(Benchmark_MPI_LIGHT PROPERTIES COMPILE_FLAGS "${COMPILE_FLAGS} ${MPI_COMPILE_FLAGS} -DMPI")
|
||||
endif()
|
||||
|
||||
add_executable (Benchmark_FULL benchmark.cpp)
|
||||
target_link_libraries (Benchmark_FULL ${ETHHASH_LIBS} ${CMAKE_THREAD_LIBS_INIT})
|
||||
SET_TARGET_PROPERTIES(Benchmark_FULL PROPERTIES COMPILE_FLAGS "${COMPILE_FLAGS} -DFULL")
|
||||
|
||||
add_executable (Benchmark_LIGHT benchmark.cpp)
|
||||
target_link_libraries (Benchmark_LIGHT ${ETHHASH_LIBS} ${CMAKE_THREAD_LIBS_INIT})
|
||||
|
||||
if (OpenCL_FOUND)
|
||||
add_executable (Benchmark_CL benchmark.cpp)
|
||||
target_link_libraries (Benchmark_CL ${ETHHASH_LIBS} ethash-cl ${CMAKE_THREAD_LIBS_INIT})
|
||||
SET_TARGET_PROPERTIES(Benchmark_CL PROPERTIES COMPILE_FLAGS "${COMPILE_FLAGS} -DOPENCL")
|
||||
endif()
|
278
Godeps/_workspace/src/github.com/ethereum/ethash/src/benchmark/benchmark.cpp
generated
vendored
@ -1,278 +0,0 @@
|
||||
/*
|
||||
This file is part of cpp-ethereum.
|
||||
|
||||
cpp-ethereum is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
cpp-ethereum is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with cpp-ethereum. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
/** @file benchmark.cpp
|
||||
* @author Tim Hughes <tim@twistedfury.com>
|
||||
* @date 2015
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <chrono>
|
||||
#include <libethash/ethash.h>
|
||||
#include <libethash/util.h>
|
||||
#ifdef OPENCL
|
||||
#include <libethash-cl/ethash_cl_miner.h>
|
||||
#endif
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
|
||||
#ifdef WITH_CRYPTOPP
|
||||
#include <libethash/sha3_cryptopp.h>
|
||||
#include <string>
|
||||
|
||||
#else
|
||||
#include "libethash/sha3.h"
|
||||
#endif // WITH_CRYPTOPP
|
||||
|
||||
#undef min
|
||||
#undef max
|
||||
|
||||
using std::chrono::high_resolution_clock;
|
||||
|
||||
#if defined(OPENCL)
|
||||
const unsigned trials = 1024*1024*32;
|
||||
#elif defined(FULL)
|
||||
const unsigned trials = 1024*1024/8;
|
||||
#else
|
||||
const unsigned trials = 1024*1024/1024;
|
||||
#endif
|
||||
uint8_t g_hashes[1024*32];
|
||||
|
||||
static char nibbleToChar(unsigned nibble)
|
||||
{
|
||||
return (char) ((nibble >= 10 ? 'a'-10 : '0') + nibble);
|
||||
}
|
||||
|
||||
static uint8_t charToNibble(char chr)
|
||||
{
|
||||
if (chr >= '0' && chr <= '9')
|
||||
{
|
||||
return (uint8_t) (chr - '0');
|
||||
}
|
||||
if (chr >= 'a' && chr <= 'z')
|
||||
{
|
||||
return (uint8_t) (chr - 'a' + 10);
|
||||
}
|
||||
if (chr >= 'A' && chr <= 'Z')
|
||||
{
|
||||
return (uint8_t) (chr - 'A' + 10);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static std::vector<uint8_t> hexStringToBytes(char const* str)
|
||||
{
|
||||
std::vector<uint8_t> bytes(strlen(str) >> 1);
|
||||
for (unsigned i = 0; i != bytes.size(); ++i)
|
||||
{
|
||||
bytes[i] = charToNibble(str[i*2 | 0]) << 4;
|
||||
bytes[i] |= charToNibble(str[i*2 | 1]);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
static std::string bytesToHexString(uint8_t const* bytes, unsigned size)
|
||||
{
|
||||
std::string str;
|
||||
for (unsigned i = 0; i != size; ++i)
|
||||
{
|
||||
str += nibbleToChar(bytes[i] >> 4);
|
||||
str += nibbleToChar(bytes[i] & 0xf);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
static std::string bytesToHexString(ethash_h256_t const *hash, unsigned size)
|
||||
{
|
||||
return bytesToHexString((uint8_t*)hash, size);
|
||||
}
|
||||
|
||||
extern "C" int main(void)
|
||||
{
|
||||
// params for ethash
|
||||
ethash_params params;
|
||||
ethash_params_init(¶ms, 0);
|
||||
//params.full_size = 262147 * 4096; // 1GBish;
|
||||
//params.full_size = 32771 * 4096; // 128MBish;
|
||||
//params.full_size = 8209 * 4096; // 8MBish;
|
||||
//params.cache_size = 8209*4096;
|
||||
//params.cache_size = 2053*4096;
|
||||
ethash_h256_t seed;
|
||||
ethash_h256_t previous_hash;
|
||||
|
||||
memcpy(&seed, hexStringToBytes("9410b944535a83d9adf6bbdcc80e051f30676173c16ca0d32d6f1263fc246466").data(), 32);
|
||||
memcpy(&previous_hash, hexStringToBytes("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").data(), 32);
|
||||
|
||||
// allocate page aligned buffer for dataset
|
||||
#ifdef FULL
|
||||
void* full_mem_buf = malloc(params.full_size + 4095);
|
||||
void* full_mem = (void*)((uintptr_t(full_mem_buf) + 4095) & ~4095);
|
||||
#endif
|
||||
void* cache_mem_buf = malloc(params.cache_size + 63);
|
||||
void* cache_mem = (void*)((uintptr_t(cache_mem_buf) + 63) & ~63);
|
||||
|
||||
ethash_cache cache;
|
||||
cache.mem = cache_mem;
|
||||
|
||||
// compute cache or full data
|
||||
{
|
||||
auto startTime = high_resolution_clock::now();
|
||||
ethash_mkcache(&cache, ¶ms, &seed);
|
||||
auto time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
|
||||
|
||||
ethash_h256_t cache_hash;
|
||||
SHA3_256(&cache_hash, (uint8_t const*)cache_mem, params.cache_size);
|
||||
debugf("ethash_mkcache: %ums, sha3: %s\n", (unsigned)((time*1000)/CLOCKS_PER_SEC), bytesToHexString(&cache_hash, sizeof(cache_hash)).data());
|
||||
|
||||
// print a couple of test hashes
|
||||
{
|
||||
auto startTime = high_resolution_clock::now();
|
||||
ethash_return_value hash;
|
||||
ethash_light(&hash, &cache, ¶ms, &previous_hash, 0);
|
||||
auto time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
|
||||
debugf("ethash_light test: %ums, %s\n", (unsigned)time, bytesToHexString(&hash.result, 32).data());
|
||||
}
|
||||
|
||||
#ifdef FULL
|
||||
startTime = high_resolution_clock::now();
|
||||
ethash_compute_full_data(full_mem, ¶ms, &cache);
|
||||
time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
|
||||
debugf("ethash_compute_full_data: %ums\n", (unsigned)time);
|
||||
#endif // FULL
|
||||
}
|
||||
|
||||
#ifdef OPENCL
|
||||
ethash_cl_miner miner;
|
||||
{
|
||||
auto startTime = high_resolution_clock::now();
|
||||
if (!miner.init(params, &seed))
|
||||
exit(-1);
|
||||
auto time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
|
||||
debugf("ethash_cl_miner init: %ums\n", (unsigned)time);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef FULL
|
||||
{
|
||||
auto startTime = high_resolution_clock::now();
|
||||
ethash_return_value hash;
|
||||
ethash_full(&hash, full_mem, ¶ms, &previous_hash, 0);
|
||||
auto time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
|
||||
debugf("ethash_full test: %uns\n", (unsigned)time);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef OPENCL
|
||||
// validate 1024 hashes against CPU
|
||||
miner.hash(g_hashes, (uint8_t*)&previous_hash, 0, 1024);
|
||||
for (unsigned i = 0; i != 1024; ++i)
|
||||
{
|
||||
ethash_return_value hash;
|
||||
ethash_light(&hash, &cache, ¶ms, &previous_hash, i);
|
||||
if (memcmp(&hash.result, g_hashes + 32*i, 32) != 0)
|
||||
{
|
||||
debugf("nonce %u failed: %s %s\n", i, bytesToHexString(g_hashes + 32*i, 32).c_str(), bytesToHexString(&hash.result, 32).c_str());
|
||||
static unsigned c = 0;
|
||||
if (++c == 16)
|
||||
{
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ensure nothing else is going on
|
||||
miner.finish();
|
||||
#endif
|
||||
|
||||
auto startTime = high_resolution_clock::now();
|
||||
unsigned hash_count = trials;
|
||||
|
||||
#ifdef OPENCL
|
||||
{
|
||||
struct search_hook : ethash_cl_miner::search_hook
|
||||
{
|
||||
unsigned hash_count;
|
||||
std::vector<uint64_t> nonce_vec;
|
||||
|
||||
virtual bool found(uint64_t const* nonces, uint32_t count)
|
||||
{
|
||||
nonce_vec.insert(nonce_vec.end(), nonces, nonces + count);
|
||||
return false;
|
||||
}
|
||||
|
||||
virtual bool searched(uint64_t start_nonce, uint32_t count)
|
||||
{
|
||||
// do nothing
|
||||
hash_count += count;
|
||||
return hash_count >= trials;
|
||||
}
|
||||
};
|
||||
search_hook hook;
|
||||
hook.hash_count = 0;
|
||||
|
||||
miner.search((uint8_t*)&previous_hash, 0x000000ffffffffff, hook);
|
||||
|
||||
for (unsigned i = 0; i != hook.nonce_vec.size(); ++i)
|
||||
{
|
||||
uint64_t nonce = hook.nonce_vec[i];
|
||||
ethash_return_value hash;
|
||||
ethash_light(&hash, &cache, ¶ms, &previous_hash, nonce);
|
||||
debugf("found: %.8x%.8x -> %s\n", unsigned(nonce>>32), unsigned(nonce), bytesToHexString(&hash.result, 32).c_str());
|
||||
}
|
||||
|
||||
hash_count = hook.hash_count;
|
||||
}
|
||||
#else
|
||||
{
|
||||
//#pragma omp parallel for
|
||||
for (int nonce = 0; nonce < trials; ++nonce)
|
||||
{
|
||||
ethash_return_value hash;
|
||||
#ifdef FULL
|
||||
ethash_full(&hash, full_mem, ¶ms, &previous_hash, nonce);
|
||||
#else
|
||||
ethash_light(&hash, &cache, ¶ms, &previous_hash, nonce);
|
||||
#endif // FULL
|
||||
}
|
||||
}
|
||||
#endif
|
||||
auto time = std::chrono::duration_cast<std::chrono::microseconds>(high_resolution_clock::now() - startTime).count();
|
||||
debugf("Search took: %ums\n", (unsigned)time/1000);
|
||||
|
||||
unsigned read_size = ETHASH_ACCESSES * ETHASH_MIX_BYTES;
|
||||
#if defined(OPENCL) || defined(FULL)
|
||||
debugf(
|
||||
"hashrate: %8.2f Mh/s, bw: %8.2f GB/s\n",
|
||||
(double)hash_count * (1000*1000)/time / (1000*1000),
|
||||
(double)hash_count*read_size * (1000*1000)/time / (1024*1024*1024)
|
||||
);
|
||||
#else
|
||||
debugf(
|
||||
"hashrate: %8.2f Kh/s, bw: %8.2f MB/s\n",
|
||||
(double)hash_count * (1000*1000)/time / (1000),
|
||||
(double)hash_count*read_size * (1000*1000)/time / (1024*1024)
|
||||
);
|
||||
#endif
|
||||
|
||||
free(cache_mem_buf);
|
||||
#ifdef FULL
|
||||
free(full_mem_buf);
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
267
Godeps/_workspace/src/github.com/ethereum/ethash/src/python/core.c
generated
vendored
@ -1,267 +0,0 @@
|
||||
#include <Python.h>
|
||||
#include <alloca.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
#include <time.h>
|
||||
#include "../libethash/ethash.h"
|
||||
#include "../libethash/internal.h"
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define PY_STRING_FORMAT "y#"
|
||||
#define PY_CONST_STRING_FORMAT "y"
|
||||
#else
|
||||
#define PY_STRING_FORMAT "s#"
|
||||
#define PY_CONST_STRING_FORMAT "s"
|
||||
#endif
|
||||
|
||||
#define MIX_WORDS (ETHASH_MIX_BYTES/4)
|
||||
|
||||
static PyObject *
|
||||
mkcache_bytes(PyObject *self, PyObject *args) {
|
||||
unsigned long block_number;
|
||||
unsigned long cache_size;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "k", &block_number))
|
||||
return 0;
|
||||
|
||||
ethash_light_t L = ethash_light_new(block_number);
|
||||
PyObject * val = Py_BuildValue(PY_STRING_FORMAT, L->cache, L->cache_size);
|
||||
free(L->cache);
|
||||
return val;
|
||||
}
|
||||
|
||||
/*
|
||||
static PyObject *
|
||||
calc_dataset_bytes(PyObject *self, PyObject *args) {
|
||||
char *cache_bytes;
|
||||
unsigned long full_size;
|
||||
int cache_size;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "k" PY_STRING_FORMAT, &full_size, &cache_bytes, &cache_size))
|
||||
return 0;
|
||||
|
||||
if (full_size % MIX_WORDS != 0) {
|
||||
char error_message[1024];
|
||||
sprintf(error_message, "The size of data set must be a multiple of %i bytes (was %lu)", MIX_WORDS, full_size);
|
||||
PyErr_SetString(PyExc_ValueError, error_message);
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (cache_size % ETHASH_HASH_BYTES != 0) {
|
||||
char error_message[1024];
|
||||
sprintf(error_message, "The size of the cache must be a multiple of %i bytes (was %i)", ETHASH_HASH_BYTES, cache_size);
|
||||
PyErr_SetString(PyExc_ValueError, error_message);
|
||||
return 0;
|
||||
}
|
||||
|
||||
ethash_params params;
|
||||
params.cache_size = (size_t) cache_size;
|
||||
params.full_size = (size_t) full_size;
|
||||
ethash_cache cache;
|
||||
cache.mem = (void *) cache_bytes;
|
||||
void *mem = malloc(params.full_size);
|
||||
ethash_compute_full_data(mem, ¶ms, &cache);
|
||||
PyObject * val = Py_BuildValue(PY_STRING_FORMAT, (char *) mem, full_size);
|
||||
free(mem);
|
||||
return val;
|
||||
}*/
|
||||
|
||||
// hashimoto_light(full_size, cache, header, nonce)
|
||||
static PyObject *
|
||||
hashimoto_light(PyObject *self, PyObject *args) {
|
||||
char *cache_bytes;
|
||||
char *header;
|
||||
unsigned long block_number;
|
||||
unsigned long long nonce;
|
||||
int cache_size, header_size;
|
||||
if (!PyArg_ParseTuple(args, "k" PY_STRING_FORMAT PY_STRING_FORMAT "K", &block_number, &cache_bytes, &cache_size, &header, &header_size, &nonce))
|
||||
return 0;
|
||||
if (header_size != 32) {
|
||||
char error_message[1024];
|
||||
sprintf(error_message, "Seed must be 32 bytes long (was %i)", header_size);
|
||||
PyErr_SetString(PyExc_ValueError, error_message);
|
||||
return 0;
|
||||
}
|
||||
struct ethash_light *s;
|
||||
s = calloc(sizeof(*s), 1);
|
||||
s->cache = cache_bytes;
|
||||
s->cache_size = cache_size;
|
||||
s->block_number = block_number;
|
||||
struct ethash_h256 *h;
|
||||
h = calloc(sizeof(*h), 1);
|
||||
for (int i = 0; i < 32; i++) h->b[i] = header[i];
|
||||
struct ethash_return_value out = ethash_light_compute(s, *h, nonce);
|
||||
return Py_BuildValue("{" PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT "," PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT "}",
|
||||
"mix digest", &out.mix_hash, 32,
|
||||
"result", &out.result, 32);
|
||||
}
|
||||
/*
|
||||
// hashimoto_full(dataset, header, nonce)
|
||||
static PyObject *
|
||||
hashimoto_full(PyObject *self, PyObject *args) {
|
||||
char *full_bytes;
|
||||
char *header;
|
||||
unsigned long long nonce;
|
||||
int full_size, header_size;
|
||||
|
||||
if (!PyArg_ParseTuple(args, PY_STRING_FORMAT PY_STRING_FORMAT "K", &full_bytes, &full_size, &header, &header_size, &nonce))
|
||||
return 0;
|
||||
|
||||
if (full_size % MIX_WORDS != 0) {
|
||||
char error_message[1024];
|
||||
sprintf(error_message, "The size of data set must be a multiple of %i bytes (was %i)", MIX_WORDS, full_size);
|
||||
PyErr_SetString(PyExc_ValueError, error_message);
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (header_size != 32) {
|
||||
char error_message[1024];
|
||||
sprintf(error_message, "Header must be 32 bytes long (was %i)", header_size);
|
||||
PyErr_SetString(PyExc_ValueError, error_message);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
ethash_return_value out;
|
||||
ethash_params params;
|
||||
params.full_size = (size_t) full_size;
|
||||
ethash_full(&out, (void *) full_bytes, ¶ms, (ethash_h256_t *) header, nonce);
|
||||
return Py_BuildValue("{" PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT ", " PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT "}",
|
||||
"mix digest", &out.mix_hash, 32,
|
||||
"result", &out.result, 32);
|
||||
}
|
||||
|
||||
// mine(dataset_bytes, header, difficulty_bytes)
|
||||
static PyObject *
|
||||
mine(PyObject *self, PyObject *args) {
|
||||
char *full_bytes;
|
||||
char *header;
|
||||
char *difficulty;
|
||||
srand(time(0));
|
||||
uint64_t nonce = ((uint64_t) rand()) << 32 | rand();
|
||||
int full_size, header_size, difficulty_size;
|
||||
|
||||
if (!PyArg_ParseTuple(args, PY_STRING_FORMAT PY_STRING_FORMAT PY_STRING_FORMAT, &full_bytes, &full_size, &header, &header_size, &difficulty, &difficulty_size))
|
||||
return 0;
|
||||
|
||||
if (full_size % MIX_WORDS != 0) {
|
||||
char error_message[1024];
|
||||
sprintf(error_message, "The size of data set must be a multiple of %i bytes (was %i)", MIX_WORDS, full_size);
|
||||
PyErr_SetString(PyExc_ValueError, error_message);
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (header_size != 32) {
|
||||
char error_message[1024];
|
||||
sprintf(error_message, "Header must be 32 bytes long (was %i)", header_size);
|
||||
PyErr_SetString(PyExc_ValueError, error_message);
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (difficulty_size != 32) {
|
||||
char error_message[1024];
|
||||
sprintf(error_message, "Difficulty must be an array of 32 bytes (only had %i)", difficulty_size);
|
||||
PyErr_SetString(PyExc_ValueError, error_message);
|
||||
return 0;
|
||||
}
|
||||
|
||||
ethash_return_value out;
|
||||
ethash_params params;
|
||||
params.full_size = (size_t) full_size;
|
||||
|
||||
// TODO: Multi threading?
|
||||
do {
|
||||
ethash_full(&out, (void *) full_bytes, ¶ms, (const ethash_h256_t *) header, nonce++);
|
||||
// TODO: disagrees with the spec https://github.com/ethereum/wiki/wiki/Ethash#mining
|
||||
} while (!ethash_check_difficulty(&out.result, (const ethash_h256_t *) difficulty));
|
||||
|
||||
return Py_BuildValue("{" PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT ", " PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT ", " PY_CONST_STRING_FORMAT ":K}",
|
||||
"mix digest", &out.mix_hash, 32,
|
||||
"result", &out.result, 32,
|
||||
"nonce", nonce);
|
||||
}
|
||||
*/
|
||||
|
||||
//get_seedhash(block_number)
|
||||
static PyObject *
|
||||
get_seedhash(PyObject *self, PyObject *args) {
|
||||
unsigned long block_number;
|
||||
if (!PyArg_ParseTuple(args, "k", &block_number))
|
||||
return 0;
|
||||
if (block_number >= ETHASH_EPOCH_LENGTH * 2048) {
|
||||
char error_message[1024];
|
||||
sprintf(error_message, "Block number must be less than %i (was %lu)", ETHASH_EPOCH_LENGTH * 2048, block_number);
|
||||
|
||||
PyErr_SetString(PyExc_ValueError, error_message);
|
||||
return 0;
|
||||
}
|
||||
ethash_h256_t seedhash = ethash_get_seedhash(block_number);
|
||||
return Py_BuildValue(PY_STRING_FORMAT, (char *) &seedhash, 32);
|
||||
}
|
||||
|
||||
static PyMethodDef PyethashMethods[] =
|
||||
{
|
||||
{"get_seedhash", get_seedhash, METH_VARARGS,
|
||||
"get_seedhash(block_number)\n\n"
|
||||
"Gets the seedhash for a block."},
|
||||
{"mkcache_bytes", mkcache_bytes, METH_VARARGS,
|
||||
"mkcache_bytes(block_number)\n\n"
|
||||
"Makes a byte array for the cache for given block number\n"},
|
||||
/*{"calc_dataset_bytes", calc_dataset_bytes, METH_VARARGS,
|
||||
"calc_dataset_bytes(full_size, cache_bytes)\n\n"
|
||||
"Makes a byte array for the dataset for a given size given cache bytes"},*/
|
||||
{"hashimoto_light", hashimoto_light, METH_VARARGS,
|
||||
"hashimoto_light(block_number, cache_bytes, header, nonce)\n\n"
|
||||
"Runs the hashimoto hashing function just using cache bytes. Takes an int (full_size), byte array (cache_bytes), another byte array (header), and an int (nonce). Returns an object containing the mix digest, and hash result."},
|
||||
/*{"hashimoto_full", hashimoto_full, METH_VARARGS,
|
||||
"hashimoto_full(dataset_bytes, header, nonce)\n\n"
|
||||
"Runs the hashimoto hashing function using the dataset bytes. Useful for testing. Returns an object containing the mix digest (byte array), and hash result (another byte array)."},
|
||||
{"mine", mine, METH_VARARGS,
|
||||
"mine(dataset_bytes, header, difficulty_bytes)\n\n"
|
||||
"Mine for an adequate header. Returns an object containing the mix digest (byte array), hash result (another byte array) and nonce (an int)."},*/
|
||||
{NULL, NULL, 0, NULL}
|
||||
};
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
static struct PyModuleDef PyethashModule = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
"pyethash",
|
||||
"...",
|
||||
-1,
|
||||
PyethashMethods
|
||||
};
|
||||
|
||||
PyMODINIT_FUNC PyInit_pyethash(void) {
|
||||
PyObject *module = PyModule_Create(&PyethashModule);
|
||||
// Following Spec: https://github.com/ethereum/wiki/wiki/Ethash#definitions
|
||||
PyModule_AddIntConstant(module, "REVISION", (long) ETHASH_REVISION);
|
||||
PyModule_AddIntConstant(module, "DATASET_BYTES_INIT", (long) ETHASH_DATASET_BYTES_INIT);
|
||||
PyModule_AddIntConstant(module, "DATASET_BYTES_GROWTH", (long) ETHASH_DATASET_BYTES_GROWTH);
|
||||
PyModule_AddIntConstant(module, "CACHE_BYTES_INIT", (long) ETHASH_CACHE_BYTES_INIT);
|
||||
PyModule_AddIntConstant(module, "CACHE_BYTES_GROWTH", (long) ETHASH_CACHE_BYTES_GROWTH);
|
||||
PyModule_AddIntConstant(module, "EPOCH_LENGTH", (long) ETHASH_EPOCH_LENGTH);
|
||||
PyModule_AddIntConstant(module, "MIX_BYTES", (long) ETHASH_MIX_BYTES);
|
||||
PyModule_AddIntConstant(module, "HASH_BYTES", (long) ETHASH_HASH_BYTES);
|
||||
PyModule_AddIntConstant(module, "DATASET_PARENTS", (long) ETHASH_DATASET_PARENTS);
|
||||
PyModule_AddIntConstant(module, "CACHE_ROUNDS", (long) ETHASH_CACHE_ROUNDS);
|
||||
PyModule_AddIntConstant(module, "ACCESSES", (long) ETHASH_ACCESSES);
|
||||
return module;
|
||||
}
|
||||
#else
|
||||
PyMODINIT_FUNC
|
||||
initpyethash(void) {
|
||||
PyObject *module = Py_InitModule("pyethash", PyethashMethods);
|
||||
// Following Spec: https://github.com/ethereum/wiki/wiki/Ethash#definitions
|
||||
PyModule_AddIntConstant(module, "REVISION", (long) ETHASH_REVISION);
|
||||
PyModule_AddIntConstant(module, "DATASET_BYTES_INIT", (long) ETHASH_DATASET_BYTES_INIT);
|
||||
PyModule_AddIntConstant(module, "DATASET_BYTES_GROWTH", (long) ETHASH_DATASET_BYTES_GROWTH);
|
||||
PyModule_AddIntConstant(module, "CACHE_BYTES_INIT", (long) ETHASH_CACHE_BYTES_INIT);
|
||||
PyModule_AddIntConstant(module, "CACHE_BYTES_GROWTH", (long) ETHASH_CACHE_BYTES_GROWTH);
|
||||
PyModule_AddIntConstant(module, "EPOCH_LENGTH", (long) ETHASH_EPOCH_LENGTH);
|
||||
PyModule_AddIntConstant(module, "MIX_BYTES", (long) ETHASH_MIX_BYTES);
|
||||
PyModule_AddIntConstant(module, "HASH_BYTES", (long) ETHASH_HASH_BYTES);
|
||||
PyModule_AddIntConstant(module, "DATASET_PARENTS", (long) ETHASH_DATASET_PARENTS);
|
||||
PyModule_AddIntConstant(module, "CACHE_ROUNDS", (long) ETHASH_CACHE_ROUNDS);
|
||||
PyModule_AddIntConstant(module, "ACCESSES", (long) ETHASH_ACCESSES);
|
||||
}
|
||||
#endif
|
66
Godeps/_workspace/src/github.com/ethereum/ethash/test/c/CMakeLists.txt
generated
vendored
@ -1,66 +0,0 @@
|
||||
if (MSVC)
|
||||
if (NOT BOOST_ROOT)
|
||||
set (BOOST_ROOT "$ENV{BOOST_ROOT}")
|
||||
endif()
|
||||
set (CMAKE_PREFIX_PATH BOOST_ROOT)
|
||||
endif()
|
||||
|
||||
IF( NOT Boost_FOUND )
|
||||
# use multithreaded boost libraries, with -mt suffix
|
||||
set(Boost_USE_MULTITHREADED ON)
|
||||
|
||||
if (MSVC)
|
||||
# TODO handle other msvc versions or it will fail find them
|
||||
set(Boost_COMPILER -vc120)
|
||||
# use static boost libraries *.lib
|
||||
set(Boost_USE_STATIC_LIBS ON)
|
||||
elseif (APPLE)
|
||||
|
||||
# use static boost libraries *.a
|
||||
set(Boost_USE_STATIC_LIBS ON)
|
||||
|
||||
elseif (UNIX)
|
||||
# use dynamic boost libraries .dll
|
||||
set(Boost_USE_STATIC_LIBS OFF)
|
||||
|
||||
endif()
|
||||
find_package(Boost 1.48.0 COMPONENTS unit_test_framework system filesystem)
|
||||
ENDIF()
|
||||
|
||||
IF (Boost_FOUND)
|
||||
message(STATUS "boost header: ${Boost_INCLUDE_DIRS}")
|
||||
message(STATUS "boost libs : ${Boost_LIBRARIES}")
|
||||
|
||||
include_directories( ${Boost_INCLUDE_DIR} )
|
||||
include_directories(../../src)
|
||||
|
||||
link_directories(${Boost_LIBRARY_DIRS})
|
||||
file(GLOB HEADERS "*.h")
|
||||
if ((NOT MSVC) AND (NOT APPLE))
|
||||
ADD_DEFINITIONS(-DBOOST_TEST_DYN_LINK)
|
||||
endif()
|
||||
if (NOT CRYPTOPP_FOUND)
|
||||
find_package (CryptoPP)
|
||||
endif()
|
||||
|
||||
if (CRYPTOPP_FOUND)
|
||||
add_definitions(-DWITH_CRYPTOPP)
|
||||
endif()
|
||||
|
||||
if (NOT MSVC)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 ")
|
||||
endif()
|
||||
|
||||
add_executable (Test "./test.cpp" ${HEADERS})
|
||||
target_link_libraries(Test ${ETHHASH_LIBS})
|
||||
target_link_libraries(Test ${Boost_FILESYSTEM_LIBRARIES})
|
||||
target_link_libraries(Test ${Boost_SYSTEM_LIBRARIES})
|
||||
target_link_libraries(Test ${Boost_UNIT_TEST_FRAMEWORK_LIBRARIES})
|
||||
|
||||
if (CRYPTOPP_FOUND)
|
||||
TARGET_LINK_LIBRARIES(Test ${CRYPTOPP_LIBRARIES})
|
||||
endif()
|
||||
|
||||
enable_testing ()
|
||||
add_test(NAME ethash COMMAND Test)
|
||||
ENDIF()
|
669
Godeps/_workspace/src/github.com/ethereum/ethash/test/c/test.cpp
generated
vendored
@ -1,669 +0,0 @@
|
||||
#include <iomanip>
|
||||
#include <libethash/fnv.h>
|
||||
#include <libethash/ethash.h>
|
||||
#include <libethash/internal.h>
|
||||
#include <libethash/io.h>
|
||||
|
||||
#ifdef WITH_CRYPTOPP
|
||||
|
||||
#include <libethash/sha3_cryptopp.h>
|
||||
|
||||
#else
|
||||
#include <libethash/sha3.h>
|
||||
#endif // WITH_CRYPTOPP
|
||||
|
||||
#ifdef _WIN32
|
||||
#include <windows.h>
|
||||
#include <Shlobj.h>
|
||||
#endif
|
||||
|
||||
#define BOOST_TEST_MODULE Daggerhashimoto
|
||||
#define BOOST_TEST_MAIN
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <vector>
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
using namespace std;
|
||||
using byte = uint8_t;
|
||||
using bytes = std::vector<byte>;
|
||||
namespace fs = boost::filesystem;
|
||||
|
||||
// Just an alloca "wrapper" to silence uint64_t to size_t conversion warnings in windows
|
||||
// consider replacing alloca calls with something better though!
|
||||
#define our_alloca(param__) alloca((size_t)(param__))
|
||||
|
||||
|
||||
// some functions taken from eth::dev for convenience.
|
||||
std::string bytesToHexString(const uint8_t *str, const uint64_t s)
|
||||
{
|
||||
std::ostringstream ret;
|
||||
|
||||
for (size_t i = 0; i < s; ++i)
|
||||
ret << std::hex << std::setfill('0') << std::setw(2) << std::nouppercase << (int) str[i];
|
||||
|
||||
return ret.str();
|
||||
}
|
||||
|
||||
std::string blockhashToHexString(ethash_h256_t* _hash)
|
||||
{
|
||||
return bytesToHexString((uint8_t*)_hash, 32);
|
||||
}
|
||||
|
||||
int fromHex(char _i)
|
||||
{
|
||||
if (_i >= '0' && _i <= '9')
|
||||
return _i - '0';
|
||||
if (_i >= 'a' && _i <= 'f')
|
||||
return _i - 'a' + 10;
|
||||
if (_i >= 'A' && _i <= 'F')
|
||||
return _i - 'A' + 10;
|
||||
|
||||
BOOST_REQUIRE_MESSAGE(false, "should never get here");
|
||||
return -1;
|
||||
}
|
||||
|
||||
bytes hexStringToBytes(std::string const& _s)
|
||||
{
|
||||
unsigned s = (_s[0] == '0' && _s[1] == 'x') ? 2 : 0;
|
||||
std::vector<uint8_t> ret;
|
||||
ret.reserve((_s.size() - s + 1) / 2);
|
||||
|
||||
if (_s.size() % 2)
|
||||
try
|
||||
{
|
||||
ret.push_back(fromHex(_s[s++]));
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
ret.push_back(0);
|
||||
}
|
||||
for (unsigned i = s; i < _s.size(); i += 2)
|
||||
try
|
||||
{
|
||||
ret.push_back((byte)(fromHex(_s[i]) * 16 + fromHex(_s[i + 1])));
|
||||
}
|
||||
catch (...){
|
||||
ret.push_back(0);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
ethash_h256_t stringToBlockhash(std::string const& _s)
|
||||
{
|
||||
ethash_h256_t ret;
|
||||
bytes b = hexStringToBytes(_s);
|
||||
memcpy(&ret, b.data(), b.size());
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
|
||||
BOOST_AUTO_TEST_CASE(fnv_hash_check) {
|
||||
uint32_t x = 1235U;
|
||||
const uint32_t
|
||||
y = 9999999U,
|
||||
expected = (FNV_PRIME * x) ^y;
|
||||
|
||||
x = fnv_hash(x, y);
|
||||
|
||||
BOOST_REQUIRE_MESSAGE(x == expected,
|
||||
"\nexpected: " << expected << "\n"
|
||||
<< "actual: " << x << "\n");
|
||||
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(SHA256_check) {
|
||||
ethash_h256_t input;
|
||||
ethash_h256_t out;
|
||||
memcpy(&input, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
SHA3_256(&out, (uint8_t*)&input, 32);
|
||||
const std::string
|
||||
expected = "2b5ddf6f4d21c23de216f44d5e4bdc68e044b71897837ea74c83908be7037cd7",
|
||||
actual = bytesToHexString((uint8_t*)&out, 32);
|
||||
BOOST_REQUIRE_MESSAGE(expected == actual,
|
||||
"\nexpected: " << expected.c_str() << "\n"
|
||||
<< "actual: " << actual.c_str() << "\n");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(SHA512_check) {
|
||||
uint8_t input[64], out[64];
|
||||
memcpy(input, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 64);
|
||||
SHA3_512(out, input, 64);
|
||||
const std::string
|
||||
expected = "0be8a1d334b4655fe58c6b38789f984bb13225684e86b20517a55ab2386c7b61c306f25e0627c60064cecd6d80cd67a82b3890bd1289b7ceb473aad56a359405",
|
||||
actual = bytesToHexString(out, 64);
|
||||
BOOST_REQUIRE_MESSAGE(expected == actual,
|
||||
"\nexpected: " << expected.c_str() << "\n"
|
||||
<< "actual: " << actual.c_str() << "\n");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_swap_endian32) {
|
||||
uint32_t v32 = (uint32_t)0xBAADF00D;
|
||||
v32 = ethash_swap_u32(v32);
|
||||
BOOST_REQUIRE_EQUAL(v32, (uint32_t)0x0DF0ADBA);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_swap_endian64) {
|
||||
uint64_t v64 = (uint64_t)0xFEE1DEADDEADBEEF;
|
||||
v64 = ethash_swap_u64(v64);
|
||||
BOOST_REQUIRE_EQUAL(v64, (uint64_t)0xEFBEADDEADDEE1FE);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(ethash_params_init_genesis_check) {
|
||||
uint64_t full_size = ethash_get_datasize(0);
|
||||
uint64_t cache_size = ethash_get_cachesize(0);
|
||||
BOOST_REQUIRE_MESSAGE(full_size < ETHASH_DATASET_BYTES_INIT,
|
||||
"\nfull size: " << full_size << "\n"
|
||||
<< "should be less than or equal to: " << ETHASH_DATASET_BYTES_INIT << "\n");
|
||||
BOOST_REQUIRE_MESSAGE(full_size + 20 * ETHASH_MIX_BYTES >= ETHASH_DATASET_BYTES_INIT,
|
||||
"\nfull size + 20*MIX_BYTES: " << full_size + 20 * ETHASH_MIX_BYTES << "\n"
|
||||
<< "should be greater than or equal to: " << ETHASH_DATASET_BYTES_INIT << "\n");
|
||||
BOOST_REQUIRE_MESSAGE(cache_size < ETHASH_DATASET_BYTES_INIT / 32,
|
||||
"\ncache size: " << cache_size << "\n"
|
||||
<< "should be less than or equal to: " << ETHASH_DATASET_BYTES_INIT / 32 << "\n");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(ethash_params_init_genesis_calcifide_check) {
|
||||
uint64_t full_size = ethash_get_datasize(0);
|
||||
uint64_t cache_size = ethash_get_cachesize(0);
|
||||
const uint32_t expected_full_size = 1073739904;
|
||||
const uint32_t expected_cache_size = 16776896;
|
||||
BOOST_REQUIRE_MESSAGE(full_size == expected_full_size,
|
||||
"\nexpected: " << expected_cache_size << "\n"
|
||||
<< "actual: " << full_size << "\n");
|
||||
BOOST_REQUIRE_MESSAGE(cache_size == expected_cache_size,
|
||||
"\nexpected: " << expected_cache_size << "\n"
|
||||
<< "actual: " << cache_size << "\n");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(ethash_check_difficulty_check) {
|
||||
ethash_h256_t hash;
|
||||
ethash_h256_t target;
|
||||
memcpy(&hash, "11111111111111111111111111111111", 32);
|
||||
memcpy(&target, "22222222222222222222222222222222", 32);
|
||||
BOOST_REQUIRE_MESSAGE(
|
||||
ethash_check_difficulty(&hash, &target),
|
||||
"\nexpected \"" << std::string((char *) &hash, 32).c_str() << "\" to have the same or less difficulty than \"" << std::string((char *) &target, 32).c_str() << "\"\n");
|
||||
BOOST_REQUIRE_MESSAGE(
|
||||
ethash_check_difficulty(&hash, &hash), "");
|
||||
// "\nexpected \"" << hash << "\" to have the same or less difficulty than \"" << hash << "\"\n");
|
||||
memcpy(&target, "11111111111111111111111111111112", 32);
|
||||
BOOST_REQUIRE_MESSAGE(
|
||||
ethash_check_difficulty(&hash, &target), "");
|
||||
// "\nexpected \"" << hash << "\" to have the same or less difficulty than \"" << target << "\"\n");
|
||||
memcpy(&target, "11111111111111111111111111111110", 32);
|
||||
BOOST_REQUIRE_MESSAGE(
|
||||
!ethash_check_difficulty(&hash, &target), "");
|
||||
// "\nexpected \"" << hash << "\" to have more difficulty than \"" << target << "\"\n");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_ethash_io_mutable_name) {
|
||||
char mutable_name[DAG_MUTABLE_NAME_MAX_SIZE];
|
||||
// should have at least 8 bytes provided since this is what we test :)
|
||||
ethash_h256_t seed1 = ethash_h256_static_init(0, 10, 65, 255, 34, 55, 22, 8);
|
||||
ethash_io_mutable_name(1, &seed1, mutable_name);
|
||||
BOOST_REQUIRE_EQUAL(0, strcmp(mutable_name, "full-R1-000a41ff22371608"));
|
||||
ethash_h256_t seed2 = ethash_h256_static_init(0, 0, 0, 0, 0, 0, 0, 0);
|
||||
ethash_io_mutable_name(44, &seed2, mutable_name);
|
||||
BOOST_REQUIRE_EQUAL(0, strcmp(mutable_name, "full-R44-0000000000000000"));
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_ethash_dir_creation) {
|
||||
ethash_h256_t seedhash;
|
||||
FILE *f = NULL;
|
||||
memset(&seedhash, 0, 32);
|
||||
BOOST_REQUIRE_EQUAL(
|
||||
ETHASH_IO_MEMO_MISMATCH,
|
||||
ethash_io_prepare("./test_ethash_directory/", seedhash, &f, 64, false)
|
||||
);
|
||||
BOOST_REQUIRE(f);
|
||||
|
||||
// let's make sure that the directory was created
|
||||
BOOST_REQUIRE(fs::is_directory(fs::path("./test_ethash_directory/")));
|
||||
|
||||
// cleanup
|
||||
fclose(f);
|
||||
fs::remove_all("./test_ethash_directory/");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_ethash_io_memo_file_match) {
|
||||
uint64_t full_size;
|
||||
uint64_t cache_size;
|
||||
ethash_h256_t seed;
|
||||
ethash_h256_t hash;
|
||||
FILE* f;
|
||||
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
|
||||
cache_size = 1024;
|
||||
full_size = 1024 * 32;
|
||||
|
||||
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
|
||||
ethash_full_t full = ethash_full_new_internal(
|
||||
"./test_ethash_directory/",
|
||||
seed,
|
||||
full_size,
|
||||
light,
|
||||
NULL
|
||||
);
|
||||
BOOST_ASSERT(full);
|
||||
// let's make sure that the directory was created
|
||||
BOOST_REQUIRE(fs::is_directory(fs::path("./test_ethash_directory/")));
|
||||
// delete the full here so that memory is properly unmapped and FILE handler freed
|
||||
ethash_full_delete(full);
|
||||
// and check that we have a match when checking again
|
||||
BOOST_REQUIRE_EQUAL(
|
||||
ETHASH_IO_MEMO_MATCH,
|
||||
ethash_io_prepare("./test_ethash_directory/", seed, &f, full_size, false)
|
||||
);
|
||||
BOOST_REQUIRE(f);
|
||||
|
||||
// cleanup
|
||||
fclose(f);
|
||||
ethash_light_delete(light);
|
||||
fs::remove_all("./test_ethash_directory/");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_ethash_io_memo_file_size_mismatch) {
|
||||
static const int blockn = 0;
|
||||
ethash_h256_t seedhash = ethash_get_seedhash(blockn);
|
||||
FILE *f = NULL;
|
||||
BOOST_REQUIRE_EQUAL(
|
||||
ETHASH_IO_MEMO_MISMATCH,
|
||||
ethash_io_prepare("./test_ethash_directory/", seedhash, &f, 64, false)
|
||||
);
|
||||
BOOST_REQUIRE(f);
|
||||
fclose(f);
|
||||
|
||||
// let's make sure that the directory was created
|
||||
BOOST_REQUIRE(fs::is_directory(fs::path("./test_ethash_directory/")));
|
||||
// and check that we get the size mismatch detected if we request diffferent size
|
||||
BOOST_REQUIRE_EQUAL(
|
||||
ETHASH_IO_MEMO_SIZE_MISMATCH,
|
||||
ethash_io_prepare("./test_ethash_directory/", seedhash, &f, 65, false)
|
||||
);
|
||||
|
||||
// cleanup
|
||||
fs::remove_all("./test_ethash_directory/");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_ethash_get_default_dirname) {
|
||||
char result[256];
|
||||
// this is really not an easy thing to test for in a unit test
|
||||
// TODO: Improve this test ...
|
||||
#ifdef _WIN32
|
||||
char homedir[256];
|
||||
BOOST_REQUIRE(SUCCEEDED(SHGetFolderPathA(NULL, CSIDL_PROFILE, NULL, 0, (CHAR*)homedir)));
|
||||
BOOST_REQUIRE(ethash_get_default_dirname(result, 256));
|
||||
std::string res = std::string(homedir) + std::string("\\AppData\\Local\\Ethash\\");
|
||||
#else
|
||||
char* homedir = getenv("HOME");
|
||||
BOOST_REQUIRE(ethash_get_default_dirname(result, 256));
|
||||
std::string res = std::string(homedir) + std::string("/.ethash/");
|
||||
#endif
|
||||
BOOST_CHECK_MESSAGE(strcmp(res.c_str(), result) == 0,
|
||||
"Expected \"" + res + "\" but got \"" + std::string(result) + "\""
|
||||
);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(light_and_full_client_checks) {
|
||||
uint64_t full_size;
|
||||
uint64_t cache_size;
|
||||
ethash_h256_t seed;
|
||||
ethash_h256_t hash;
|
||||
ethash_h256_t difficulty;
|
||||
ethash_return_value_t light_out;
|
||||
ethash_return_value_t full_out;
|
||||
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
|
||||
// Set the difficulty
|
||||
ethash_h256_set(&difficulty, 0, 197);
|
||||
ethash_h256_set(&difficulty, 1, 90);
|
||||
for (int i = 2; i < 32; i++)
|
||||
ethash_h256_set(&difficulty, i, 255);
|
||||
|
||||
cache_size = 1024;
|
||||
full_size = 1024 * 32;
|
||||
|
||||
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
|
||||
ethash_full_t full = ethash_full_new_internal(
|
||||
"./test_ethash_directory/",
|
||||
seed,
|
||||
full_size,
|
||||
light,
|
||||
NULL
|
||||
);
|
||||
BOOST_ASSERT(full);
|
||||
{
|
||||
const std::string
|
||||
expected = "2da2b506f21070e1143d908e867962486d6b0a02e31d468fd5e3a7143aafa76a14201f63374314e2a6aaf84ad2eb57105dea3378378965a1b3873453bb2b78f9a8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995ca8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995ca8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995c259440b89fa3481c2c33171477c305c8e1e421f8d8f6d59585449d0034f3e421808d8da6bbd0b6378f567647cc6c4ba6c434592b198ad444e7284905b7c6adaf70bf43ec2daa7bd5e8951aa609ab472c124cf9eba3d38cff5091dc3f58409edcc386c743c3bd66f92408796ee1e82dd149eaefbf52b00ce33014a6eb3e50625413b072a58bc01da28262f42cbe4f87d4abc2bf287d15618405a1fe4e386fcdafbb171064bd99901d8f81dd6789396ce5e364ac944bbbd75a7827291c70b42d26385910cd53ca535ab29433dd5c5714d26e0dce95514c5ef866329c12e958097e84462197c2b32087849dab33e88b11da61d52f9dbc0b92cc61f742c07dbbf751c49d7678624ee60dfbe62e5e8c47a03d8247643f3d16ad8c8e663953bcda1f59d7e2d4a9bf0768e789432212621967a8f41121ad1df6ae1fa78782530695414c6213942865b2730375019105cae91a4c17a558d4b63059661d9f108362143107babe0b848de412e4da59168cce82bfbff3c99e022dd6ac1e559db991f2e3f7bb910cefd173e65ed00a8d5d416534e2c8416ff23977dbf3eb7180b75c71580d08ce95efeb9b0afe904ea12285a392aff0c8561ff79fca67f694a62b9e52377485c57cc3598d84cac0a9d27960de0cc31ff9bbfe455acaa62c8aa5d2cce96f345da9afe843d258a99c4eaf3650fc62efd81c7b81cd0d534d2d71eeda7a6e315d540b4473c80f8730037dc2ae3e47b986240cfc65ccc565f0d8cde0bc68a57e39a271dda57440b3598bee19f799611d25731a96b5dbbbefdff6f4f656161462633030d62560ea4e9c161cf78fc96a2ca5aaa32453a6c5dea206f766244e8c9d9a8dc61185ce37f1fc804459c5f07434f8ecb34141b8dcae7eae704c950b55556c5f40140c3714b45eddb02637513268778cbf937a33e4e33183685f9deb31ef54e90161e76d969587dd782eaa94e289420e7c2ee908517f5893a26fdb5873d68f92d118d4bcf98d7a4916794d6ab290045e30f9ea00ca547c584b8482b0331ba1539a0f2714fddc3a0b06b0cfbb6a607b8339c39bcfd6640b1f653e9d70ef6c985b",
|
||||
actual = bytesToHexString((uint8_t const *) light->cache, cache_size);
|
||||
|
||||
BOOST_REQUIRE_MESSAGE(expected == actual,
|
||||
"\nexpected: " << expected.c_str() << "\n"
|
||||
<< "actual: " << actual.c_str() << "\n");
|
||||
}
|
||||
{
|
||||
node node;
|
||||
ethash_calculate_dag_item(&node, 0, light);
|
||||
const std::string
|
||||
actual = bytesToHexString((uint8_t const *) &node, sizeof(node)),
|
||||
expected = "b1698f829f90b35455804e5185d78f549fcb1bdce2bee006d4d7e68eb154b596be1427769eb1c3c3e93180c760af75f81d1023da6a0ffbe321c153a7c0103597";
|
||||
BOOST_REQUIRE_MESSAGE(actual == expected,
|
||||
"\n" << "expected: " << expected.c_str() << "\n"
|
||||
<< "actual: " << actual.c_str() << "\n");
|
||||
}
|
||||
{
|
||||
for (int i = 0; i < full_size / sizeof(node); ++i) {
|
||||
for (uint32_t j = 0; j < 32; ++j) {
|
||||
node expected_node;
|
||||
ethash_calculate_dag_item(&expected_node, j, light);
|
||||
const std::string
|
||||
actual = bytesToHexString((uint8_t const *) &(full->data[j]), sizeof(node)),
|
||||
expected = bytesToHexString((uint8_t const *) &expected_node, sizeof(node));
|
||||
BOOST_REQUIRE_MESSAGE(actual == expected,
|
||||
"\ni: " << j << "\n"
|
||||
<< "expected: " << expected.c_str() << "\n"
|
||||
<< "actual: " << actual.c_str() << "\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
uint64_t nonce = 0x7c7c597c;
|
||||
full_out = ethash_full_compute(full, hash, nonce);
|
||||
BOOST_REQUIRE(full_out.success);
|
||||
light_out = ethash_light_compute_internal(light, full_size, hash, nonce);
|
||||
BOOST_REQUIRE(light_out.success);
|
||||
const std::string
|
||||
light_result_string = blockhashToHexString(&light_out.result),
|
||||
full_result_string = blockhashToHexString(&full_out.result);
|
||||
BOOST_REQUIRE_MESSAGE(light_result_string == full_result_string,
|
||||
"\nlight result: " << light_result_string.c_str() << "\n"
|
||||
<< "full result: " << full_result_string.c_str() << "\n");
|
||||
const std::string
|
||||
light_mix_hash_string = blockhashToHexString(&light_out.mix_hash),
|
||||
full_mix_hash_string = blockhashToHexString(&full_out.mix_hash);
|
||||
BOOST_REQUIRE_MESSAGE(full_mix_hash_string == light_mix_hash_string,
|
||||
"\nlight mix hash: " << light_mix_hash_string.c_str() << "\n"
|
||||
<< "full mix hash: " << full_mix_hash_string.c_str() << "\n");
|
||||
ethash_h256_t check_hash;
|
||||
ethash_quick_hash(&check_hash, &hash, nonce, &full_out.mix_hash);
|
||||
const std::string check_hash_string = blockhashToHexString(&check_hash);
|
||||
BOOST_REQUIRE_MESSAGE(check_hash_string == full_result_string,
|
||||
"\ncheck hash string: " << check_hash_string.c_str() << "\n"
|
||||
<< "full result: " << full_result_string.c_str() << "\n");
|
||||
}
|
||||
{
|
||||
full_out = ethash_full_compute(full, hash, 5);
|
||||
BOOST_REQUIRE(full_out.success);
|
||||
std::string
|
||||
light_result_string = blockhashToHexString(&light_out.result),
|
||||
full_result_string = blockhashToHexString(&full_out.result);
|
||||
BOOST_REQUIRE_MESSAGE(light_result_string != full_result_string,
|
||||
"\nlight result and full result should differ: " << light_result_string.c_str() << "\n");
|
||||
|
||||
light_out = ethash_light_compute_internal(light, full_size, hash, 5);
|
||||
BOOST_REQUIRE(light_out.success);
|
||||
light_result_string = blockhashToHexString(&light_out.result);
|
||||
BOOST_REQUIRE_MESSAGE(light_result_string == full_result_string,
|
||||
"\nlight result and full result should be the same\n"
|
||||
<< "light result: " << light_result_string.c_str() << "\n"
|
||||
<< "full result: " << full_result_string.c_str() << "\n");
|
||||
std::string
|
||||
light_mix_hash_string = blockhashToHexString(&light_out.mix_hash),
|
||||
full_mix_hash_string = blockhashToHexString(&full_out.mix_hash);
|
||||
BOOST_REQUIRE_MESSAGE(full_mix_hash_string == light_mix_hash_string,
|
||||
"\nlight mix hash: " << light_mix_hash_string.c_str() << "\n"
|
||||
<< "full mix hash: " << full_mix_hash_string.c_str() << "\n");
|
||||
BOOST_REQUIRE_MESSAGE(ethash_check_difficulty(&full_out.result, &difficulty),
|
||||
"ethash_check_difficulty failed"
|
||||
);
|
||||
BOOST_REQUIRE_MESSAGE(ethash_quick_check_difficulty(&hash, 5U, &full_out.mix_hash, &difficulty),
|
||||
"ethash_quick_check_difficulty failed"
|
||||
);
|
||||
}
|
||||
ethash_light_delete(light);
|
||||
ethash_full_delete(full);
|
||||
fs::remove_all("./test_ethash_directory/");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(ethash_full_new_when_dag_exists_with_wrong_size) {
|
||||
uint64_t full_size;
|
||||
uint64_t cache_size;
|
||||
ethash_h256_t seed;
|
||||
ethash_h256_t hash;
|
||||
ethash_return_value_t full_out;
|
||||
ethash_return_value_t light_out;
|
||||
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
|
||||
cache_size = 1024;
|
||||
full_size = 1024 * 32;
|
||||
|
||||
// first make a DAG file of "wrong size"
|
||||
FILE *f;
|
||||
BOOST_REQUIRE_EQUAL(
|
||||
ETHASH_IO_MEMO_MISMATCH,
|
||||
ethash_io_prepare("./test_ethash_directory/", seed, &f, 64, false)
|
||||
);
|
||||
fclose(f);
|
||||
|
||||
// then create new DAG, which should detect the wrong size and force create a new file
|
||||
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
|
||||
BOOST_ASSERT(light);
|
||||
ethash_full_t full = ethash_full_new_internal(
|
||||
"./test_ethash_directory/",
|
||||
seed,
|
||||
full_size,
|
||||
light,
|
||||
NULL
|
||||
);
|
||||
BOOST_ASSERT(full);
|
||||
{
|
||||
uint64_t nonce = 0x7c7c597c;
|
||||
full_out = ethash_full_compute(full, hash, nonce);
|
||||
BOOST_REQUIRE(full_out.success);
|
||||
light_out = ethash_light_compute_internal(light, full_size, hash, nonce);
|
||||
BOOST_REQUIRE(light_out.success);
|
||||
const std::string
|
||||
light_result_string = blockhashToHexString(&light_out.result),
|
||||
full_result_string = blockhashToHexString(&full_out.result);
|
||||
BOOST_REQUIRE_MESSAGE(light_result_string == full_result_string,
|
||||
"\nlight result: " << light_result_string.c_str() << "\n"
|
||||
<< "full result: " << full_result_string.c_str() << "\n");
|
||||
const std::string
|
||||
light_mix_hash_string = blockhashToHexString(&light_out.mix_hash),
|
||||
full_mix_hash_string = blockhashToHexString(&full_out.mix_hash);
|
||||
BOOST_REQUIRE_MESSAGE(full_mix_hash_string == light_mix_hash_string,
|
||||
"\nlight mix hash: " << light_mix_hash_string.c_str() << "\n"
|
||||
<< "full mix hash: " << full_mix_hash_string.c_str() << "\n");
|
||||
ethash_h256_t check_hash;
|
||||
ethash_quick_hash(&check_hash, &hash, nonce, &full_out.mix_hash);
|
||||
const std::string check_hash_string = blockhashToHexString(&check_hash);
|
||||
BOOST_REQUIRE_MESSAGE(check_hash_string == full_result_string,
|
||||
"\ncheck hash string: " << check_hash_string.c_str() << "\n"
|
||||
<< "full result: " << full_result_string.c_str() << "\n");
|
||||
}
|
||||
|
||||
ethash_light_delete(light);
|
||||
ethash_full_delete(full);
|
||||
fs::remove_all("./test_ethash_directory/");
|
||||
}
|
||||
|
||||
static bool g_executed = false;
|
||||
static unsigned g_prev_progress = 0;
|
||||
static int test_full_callback(unsigned _progress)
|
||||
{
|
||||
g_executed = true;
|
||||
BOOST_CHECK(_progress >= g_prev_progress);
|
||||
g_prev_progress = _progress;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int test_full_callback_that_fails(unsigned _progress)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
static int test_full_callback_create_incomplete_dag(unsigned _progress)
|
||||
{
|
||||
if (_progress >= 30) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(full_client_callback) {
|
||||
uint64_t full_size;
|
||||
uint64_t cache_size;
|
||||
ethash_h256_t seed;
|
||||
ethash_h256_t hash;
|
||||
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
|
||||
cache_size = 1024;
|
||||
full_size = 1024 * 32;
|
||||
|
||||
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
|
||||
ethash_full_t full = ethash_full_new_internal(
|
||||
"./test_ethash_directory/",
|
||||
seed,
|
||||
full_size,
|
||||
light,
|
||||
test_full_callback
|
||||
);
|
||||
BOOST_ASSERT(full);
|
||||
BOOST_CHECK(g_executed);
|
||||
BOOST_REQUIRE_EQUAL(g_prev_progress, 100);
|
||||
|
||||
ethash_full_delete(full);
|
||||
ethash_light_delete(light);
|
||||
fs::remove_all("./test_ethash_directory/");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(failing_full_client_callback) {
|
||||
uint64_t full_size;
|
||||
uint64_t cache_size;
|
||||
ethash_h256_t seed;
|
||||
ethash_h256_t hash;
|
||||
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
|
||||
cache_size = 1024;
|
||||
full_size = 1024 * 32;
|
||||
|
||||
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
|
||||
ethash_full_t full = ethash_full_new_internal(
|
||||
"./test_ethash_directory/",
|
||||
seed,
|
||||
full_size,
|
||||
light,
|
||||
test_full_callback_that_fails
|
||||
);
|
||||
BOOST_ASSERT(!full);
|
||||
ethash_light_delete(light);
|
||||
fs::remove_all("./test_ethash_directory/");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_incomplete_dag_file) {
|
||||
uint64_t full_size;
|
||||
uint64_t cache_size;
|
||||
ethash_h256_t seed;
|
||||
ethash_h256_t hash;
|
||||
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
|
||||
|
||||
cache_size = 1024;
|
||||
full_size = 1024 * 32;
|
||||
|
||||
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
|
||||
// create a full but stop at 30%, so no magic number is written
|
||||
ethash_full_t full = ethash_full_new_internal(
|
||||
"./test_ethash_directory/",
|
||||
seed,
|
||||
full_size,
|
||||
light,
|
||||
test_full_callback_create_incomplete_dag
|
||||
);
|
||||
BOOST_ASSERT(!full);
|
||||
FILE *f = NULL;
|
||||
// confirm that we get a size_mismatch because the magic number is missing
|
||||
BOOST_REQUIRE_EQUAL(
|
||||
ETHASH_IO_MEMO_SIZE_MISMATCH,
|
||||
ethash_io_prepare("./test_ethash_directory/", seed, &f, full_size, false)
|
||||
);
|
||||
ethash_light_delete(light);
|
||||
fs::remove_all("./test_ethash_directory/");
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_block22_verification) {
|
||||
// from POC-9 testnet, epoch 0
|
||||
ethash_light_t light = ethash_light_new(22);
|
||||
ethash_h256_t seedhash = stringToBlockhash("372eca2454ead349c3df0ab5d00b0b706b23e49d469387db91811cee0358fc6d");
|
||||
BOOST_ASSERT(light);
|
||||
ethash_return_value_t ret = ethash_light_compute(
|
||||
light,
|
||||
seedhash,
|
||||
0x495732e0ed7a801cU
|
||||
);
|
||||
BOOST_REQUIRE_EQUAL(blockhashToHexString(&ret.result), "00000b184f1fdd88bfd94c86c39e65db0c36144d5e43f745f722196e730cb614");
|
||||
ethash_h256_t difficulty = ethash_h256_static_init(0x2, 0x5, 0x40);
|
||||
BOOST_REQUIRE(ethash_check_difficulty(&ret.result, &difficulty));
|
||||
ethash_light_delete(light);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_block30001_verification) {
|
||||
// from POC-9 testnet, epoch 1
|
||||
ethash_light_t light = ethash_light_new(30001);
|
||||
ethash_h256_t seedhash = stringToBlockhash("7e44356ee3441623bc72a683fd3708fdf75e971bbe294f33e539eedad4b92b34");
|
||||
BOOST_ASSERT(light);
|
||||
ethash_return_value_t ret = ethash_light_compute(
|
||||
light,
|
||||
seedhash,
|
||||
0x318df1c8adef7e5eU
|
||||
);
|
||||
ethash_h256_t difficulty = ethash_h256_static_init(0x17, 0x62, 0xff);
|
||||
BOOST_REQUIRE(ethash_check_difficulty(&ret.result, &difficulty));
|
||||
ethash_light_delete(light);
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(test_block60000_verification) {
|
||||
// from POC-9 testnet, epoch 2
|
||||
ethash_light_t light = ethash_light_new(60000);
|
||||
ethash_h256_t seedhash = stringToBlockhash("5fc898f16035bf5ac9c6d9077ae1e3d5fc1ecc3c9fd5bee8bb00e810fdacbaa0");
|
||||
BOOST_ASSERT(light);
|
||||
ethash_return_value_t ret = ethash_light_compute(
|
||||
light,
|
||||
seedhash,
|
||||
0x50377003e5d830caU
|
||||
);
|
||||
ethash_h256_t difficulty = ethash_h256_static_init(0x25, 0xa6, 0x1e);
|
||||
BOOST_REQUIRE(ethash_check_difficulty(&ret.result, &difficulty));
|
||||
ethash_light_delete(light);
|
||||
}
|
||||
|
||||
// Test of Full DAG creation with the minimal ethash.h API.
|
||||
// Commented out since travis tests would take too much time.
|
||||
// Uncomment and run on your own machine if you want to confirm
|
||||
// it works fine.
|
||||
#if 0
|
||||
static int progress_cb(unsigned _progress)
|
||||
{
|
||||
printf("CREATING DAG. PROGRESS: %u\n", _progress);
|
||||
fflush(stdout);
|
||||
return 0;
|
||||
}
|
||||
|
||||
BOOST_AUTO_TEST_CASE(full_dag_test) {
|
||||
ethash_light_t light = ethash_light_new(55);
|
||||
BOOST_ASSERT(light);
|
||||
ethash_full_t full = ethash_full_new(light, progress_cb);
|
||||
BOOST_ASSERT(full);
|
||||
ethash_light_delete(light);
|
||||
ethash_full_delete(full);
|
||||
}
|
||||
#endif
|
32
Godeps/_workspace/src/github.com/ethereum/ethash/test/c/test.sh
generated
vendored
@ -1,32 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Strict mode
|
||||
set -e
|
||||
|
||||
VALGRIND_ARGS="--tool=memcheck"
|
||||
VALGRIND_ARGS+=" --leak-check=yes"
|
||||
VALGRIND_ARGS+=" --track-origins=yes"
|
||||
VALGRIND_ARGS+=" --show-reachable=yes"
|
||||
VALGRIND_ARGS+=" --num-callers=20"
|
||||
VALGRIND_ARGS+=" --track-fds=yes"
|
||||
|
||||
SOURCE="${BASH_SOURCE[0]}"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
|
||||
done
|
||||
TEST_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
|
||||
rm -rf $TEST_DIR/build
|
||||
mkdir -p $TEST_DIR/build
|
||||
cd $TEST_DIR/build ;
|
||||
cmake ../../.. > /dev/null
|
||||
make Test
|
||||
./test/c/Test
|
||||
|
||||
# If we have valgrind also run memory check tests
|
||||
if hash valgrind 2>/dev/null; then
|
||||
echo "======== Running tests under valgrind ========";
|
||||
cd $TEST_DIR/build/ && valgrind $VALGRIND_ARGS ./test/c/Test
|
||||
fi
|
1
Godeps/_workspace/src/github.com/ethereum/ethash/test/python/.gitignore
generated
vendored
@ -1 +0,0 @@
|
||||
python-virtual-env/
|
3
Godeps/_workspace/src/github.com/ethereum/ethash/test/python/requirements.txt
generated
vendored
@ -1,3 +0,0 @@
|
||||
pyethereum==0.7.522
|
||||
nose==1.3.4
|
||||
pysha3==0.3
|
30
Godeps/_workspace/src/github.com/ethereum/ethash/test/python/test.sh
generated
vendored
@ -1,30 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Strict mode
|
||||
set -e
|
||||
|
||||
if [ -x "$(which virtualenv2)" ] ; then
|
||||
VIRTUALENV_EXEC=virtualenv2
|
||||
elif [ -x "$(which virtualenv)" ] ; then
|
||||
VIRTUALENV_EXEC=virtualenv
|
||||
else
|
||||
echo "Could not find a suitable version of virtualenv"
|
||||
false
|
||||
fi
|
||||
|
||||
SOURCE="${BASH_SOURCE[0]}"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
|
||||
done
|
||||
TEST_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
|
||||
[ -d $TEST_DIR/python-virtual-env ] || $VIRTUALENV_EXEC --system-site-packages $TEST_DIR/python-virtual-env
|
||||
source $TEST_DIR/python-virtual-env/bin/activate
|
||||
pip install -r $TEST_DIR/requirements.txt > /dev/null
|
||||
# force installation of nose in virtualenv even if existing in thereuser's system
|
||||
pip install nose -I
|
||||
pip install --upgrade --no-deps --force-reinstall -e $TEST_DIR/../..
|
||||
cd $TEST_DIR
|
||||
nosetests --with-doctest -v --nocapture
|
105
Godeps/_workspace/src/github.com/ethereum/ethash/test/python/test_pyethash.py
generated
vendored
@ -1,105 +0,0 @@
|
||||
import pyethash
|
||||
from random import randint
|
||||
|
||||
def test_get_cache_size_not_None():
|
||||
for _ in range(100):
|
||||
block_num = randint(0,12456789)
|
||||
out = pyethash.get_cache_size(block_num)
|
||||
assert out != None
|
||||
|
||||
def test_get_full_size_not_None():
|
||||
for _ in range(100):
|
||||
block_num = randint(0,12456789)
|
||||
out = pyethash.get_full_size(block_num)
|
||||
assert out != None
|
||||
|
||||
def test_get_cache_size_based_on_EPOCH():
|
||||
for _ in range(100):
|
||||
block_num = randint(0,12456789)
|
||||
out1 = pyethash.get_cache_size(block_num)
|
||||
out2 = pyethash.get_cache_size((block_num // pyethash.EPOCH_LENGTH) * pyethash.EPOCH_LENGTH)
|
||||
assert out1 == out2
|
||||
|
||||
def test_get_full_size_based_on_EPOCH():
|
||||
for _ in range(100):
|
||||
block_num = randint(0,12456789)
|
||||
out1 = pyethash.get_full_size(block_num)
|
||||
out2 = pyethash.get_full_size((block_num // pyethash.EPOCH_LENGTH) * pyethash.EPOCH_LENGTH)
|
||||
assert out1 == out2
|
||||
|
||||
# See light_and_full_client_checks in test.cpp
|
||||
def test_mkcache_is_as_expected():
|
||||
actual = pyethash.mkcache_bytes(
|
||||
1024,
|
||||
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~").encode('hex')
|
||||
expected = "2da2b506f21070e1143d908e867962486d6b0a02e31d468fd5e3a7143aafa76a14201f63374314e2a6aaf84ad2eb57105dea3378378965a1b3873453bb2b78f9a8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995ca8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995ca8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995c259440b89fa3481c2c33171477c305c8e1e421f8d8f6d59585449d0034f3e421808d8da6bbd0b6378f567647cc6c4ba6c434592b198ad444e7284905b7c6adaf70bf43ec2daa7bd5e8951aa609ab472c124cf9eba3d38cff5091dc3f58409edcc386c743c3bd66f92408796ee1e82dd149eaefbf52b00ce33014a6eb3e50625413b072a58bc01da28262f42cbe4f87d4abc2bf287d15618405a1fe4e386fcdafbb171064bd99901d8f81dd6789396ce5e364ac944bbbd75a7827291c70b42d26385910cd53ca535ab29433dd5c5714d26e0dce95514c5ef866329c12e958097e84462197c2b32087849dab33e88b11da61d52f9dbc0b92cc61f742c07dbbf751c49d7678624ee60dfbe62e5e8c47a03d8247643f3d16ad8c8e663953bcda1f59d7e2d4a9bf0768e789432212621967a8f41121ad1df6ae1fa78782530695414c6213942865b2730375019105cae91a4c17a558d4b63059661d9f108362143107babe0b848de412e4da59168cce82bfbff3c99e022dd6ac1e559db991f2e3f7bb910cefd173e65ed00a8d5d416534e2c8416ff23977dbf3eb7180b75c71580d08ce95efeb9b0afe904ea12285a392aff0c8561ff79fca67f694a62b9e52377485c57cc3598d84cac0a9d27960de0cc31ff9bbfe455acaa62c8aa5d2cce96f345da9afe843d258a99c4eaf3650fc62efd81c7b81cd0d534d2d71eeda7a6e315d540b4473c80f8730037dc2ae3e47b986240cfc65ccc565f0d8cde0bc68a57e39a271dda57440b3598bee19f799611d25731a96b5dbbbefdff6f4f656161462633030d62560ea4e9c161cf78fc96a2ca5aaa32453a6c5dea206f766244e8c9d9a8dc61185ce37f1fc804459c5f07434f8ecb34141b8dcae7eae704c950b55556c5f40140c3714b45eddb02637513268778cbf937a33e4e33183685f9deb31ef54e90161e76d969587dd782eaa94e289420e7c2ee908517f5893a26fdb5873d68f92d118d4bcf98d7a4916794d6ab290045e30f9ea00ca547c584b8482b0331ba1539a0f2714fddc3a0b06b0cfbb6a607b8339c39bcfd6640b1f653e9d70ef6c985b"
|
||||
assert actual == expected
|
||||
|
||||
def test_calc_dataset_is_not_None():
|
||||
cache = pyethash.mkcache_bytes(
|
||||
1024,
|
||||
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
|
||||
assert pyethash.calc_dataset_bytes(1024 * 32, cache) != None
|
||||
|
||||
def test_light_and_full_agree():
|
||||
cache = pyethash.mkcache_bytes(
|
||||
1024,
|
||||
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
|
||||
full_size = 1024 * 32
|
||||
header = "~~~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
||||
light_result = pyethash.hashimoto_light(full_size, cache, header, 0)
|
||||
dataset = pyethash.calc_dataset_bytes(full_size, cache)
|
||||
full_result = pyethash.hashimoto_full(dataset, header, 0)
|
||||
assert light_result["mix digest"] != None
|
||||
assert len(light_result["mix digest"]) == 32
|
||||
assert light_result["mix digest"] == full_result["mix digest"]
|
||||
assert light_result["result"] != None
|
||||
assert len(light_result["result"]) == 32
|
||||
assert light_result["result"] == full_result["result"]
|
||||
|
||||
def int_to_bytes(i):
|
||||
b = []
|
||||
for _ in range(32):
|
||||
b.append(chr(i & 0xff))
|
||||
i >>= 8
|
||||
b.reverse()
|
||||
return "".join(b)
|
||||
|
||||
def test_mining_basic():
|
||||
easy_difficulty = int_to_bytes(2**256 - 1)
|
||||
assert easy_difficulty.encode('hex') == 'f' * 64
|
||||
cache = pyethash.mkcache_bytes(
|
||||
1024,
|
||||
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
|
||||
full_size = 1024 * 32
|
||||
header = "~~~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
||||
dataset = pyethash.calc_dataset_bytes(full_size, cache)
|
||||
# Check type of outputs
|
||||
assert type(pyethash.mine(dataset,header,easy_difficulty)) == dict
|
||||
assert type(pyethash.mine(dataset,header,easy_difficulty)["nonce"]) == long
|
||||
assert type(pyethash.mine(dataset,header,easy_difficulty)["mix digest"]) == str
|
||||
assert type(pyethash.mine(dataset,header,easy_difficulty)["result"]) == str
|
||||
|
||||
def test_mining_doesnt_always_return_the_same_value():
|
||||
easy_difficulty1 = int_to_bytes(int(2**256 * 0.999))
|
||||
# 1 in 1000 difficulty
|
||||
easy_difficulty2 = int_to_bytes(int(2**256 * 0.001))
|
||||
assert easy_difficulty1 != easy_difficulty2
|
||||
cache = pyethash.mkcache_bytes(
|
||||
1024,
|
||||
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
|
||||
full_size = 1024 * 32
|
||||
header = "~~~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
||||
dataset = pyethash.calc_dataset_bytes(full_size, cache)
|
||||
# Check type of outputs
|
||||
assert pyethash.mine(dataset, header, easy_difficulty1)['nonce'] != pyethash.mine(dataset, header, easy_difficulty2)['nonce']
|
||||
|
||||
def test_get_seedhash():
|
||||
assert pyethash.get_seedhash(0).encode('hex') == '0' * 64
|
||||
import hashlib, sha3
|
||||
expected = pyethash.get_seedhash(0)
|
||||
#print "checking seed hashes:",
|
||||
for i in range(0, 30000*2048, 30000):
|
||||
#print i // 30000,
|
||||
assert pyethash.get_seedhash(i) == expected
|
||||
expected = hashlib.sha3_256(expected).digest()
|
32
Godeps/_workspace/src/github.com/ethereum/ethash/test/test.sh
generated
vendored
@ -1,32 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Strict mode
|
||||
set -e
|
||||
|
||||
SOURCE="${BASH_SOURCE[0]}"
|
||||
while [ -h "$SOURCE" ]; do
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
|
||||
done
|
||||
TEST_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
|
||||
echo -e "\n################# Testing JS ##################"
|
||||
# TODO: Use mocha and real testing tools instead of rolling our own
|
||||
cd $TEST_DIR/../js
|
||||
if [ -x "$(which nodejs)" ] ; then
|
||||
nodejs test.js
|
||||
fi
|
||||
if [ -x "$(which node)" ] ; then
|
||||
node test.js
|
||||
fi
|
||||
|
||||
echo -e "\n################# Testing C ##################"
|
||||
$TEST_DIR/c/test.sh
|
||||
|
||||
# Temporarily commenting out python tests until they conform to the API
|
||||
#echo -e "\n################# Testing Python ##################"
|
||||
#$TEST_DIR/python/test.sh
|
||||
|
||||
echo "################# Testing Go ##################"
|
||||
cd $TEST_DIR/.. && go test -timeout 9999s
|
53
Godeps/_workspace/src/github.com/fatih/color/color.go
generated
vendored
@ -6,8 +6,8 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/mattn/go-colorable"
|
||||
"github.com/mattn/go-isatty"
|
||||
"github.com/shiena/ansicolor"
|
||||
)
|
||||
|
||||
// NoColor defines if the output is colorized or not. It's dynamically set to
|
||||
@ -53,6 +53,18 @@ const (
|
||||
FgWhite
|
||||
)
|
||||
|
||||
// Foreground Hi-Intensity text colors
|
||||
const (
|
||||
FgHiBlack Attribute = iota + 90
|
||||
FgHiRed
|
||||
FgHiGreen
|
||||
FgHiYellow
|
||||
FgHiBlue
|
||||
FgHiMagenta
|
||||
FgHiCyan
|
||||
FgHiWhite
|
||||
)
|
||||
|
||||
// Background text colors
|
||||
const (
|
||||
BgBlack Attribute = iota + 40
|
||||
@ -65,6 +77,18 @@ const (
|
||||
BgWhite
|
||||
)
|
||||
|
||||
// Background Hi-Intensity text colors
|
||||
const (
|
||||
BgHiBlack Attribute = iota + 100
|
||||
BgHiRed
|
||||
BgHiGreen
|
||||
BgHiYellow
|
||||
BgHiBlue
|
||||
BgHiMagenta
|
||||
BgHiCyan
|
||||
BgHiWhite
|
||||
)
|
||||
|
||||
// New returns a newly created color object.
|
||||
func New(value ...Attribute) *Color {
|
||||
c := &Color{params: make([]Attribute, 0)}
|
||||
@ -123,7 +147,7 @@ func (c *Color) prepend(value Attribute) {
|
||||
|
||||
// Output defines the standard output of the print functions. By default
|
||||
// os.Stdout is used.
|
||||
var Output = ansicolor.NewAnsiColorWriter(os.Stdout)
|
||||
var Output = colorable.NewColorableStdout()
|
||||
|
||||
// Print formats using the default formats for its operands and writes to
|
||||
// standard output. Spaces are added between operands when neither is a
|
||||
@ -259,6 +283,31 @@ func (c *Color) isNoColorSet() bool {
|
||||
return NoColor
|
||||
}
|
||||
|
||||
// Equals returns a boolean value indicating whether two colors are equal.
|
||||
func (c *Color) Equals(c2 *Color) bool {
|
||||
if len(c.params) != len(c2.params) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, attr := range c.params {
|
||||
if !c2.attrExists(attr) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (c *Color) attrExists(a Attribute) bool {
|
||||
for _, attr := range c.params {
|
||||
if attr == a {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func boolPtr(v bool) *bool {
|
||||
return &v
|
||||
}
|
||||
|
176
Godeps/_workspace/src/github.com/fatih/color/color_test.go
generated
vendored
@ -1,176 +0,0 @@
|
||||
package color
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/shiena/ansicolor"
|
||||
)
|
||||
|
||||
// Testing colors is kinda different. First we test for given colors and their
|
||||
// escaped formatted results. Next we create some visual tests to be tested.
|
||||
// Each visual test includes the color name to be compared.
|
||||
func TestColor(t *testing.T) {
|
||||
rb := new(bytes.Buffer)
|
||||
Output = rb
|
||||
|
||||
testColors := []struct {
|
||||
text string
|
||||
code Attribute
|
||||
}{
|
||||
{text: "black", code: FgBlack},
|
||||
{text: "red", code: FgRed},
|
||||
{text: "green", code: FgGreen},
|
||||
{text: "yellow", code: FgYellow},
|
||||
{text: "blue", code: FgBlue},
|
||||
{text: "magent", code: FgMagenta},
|
||||
{text: "cyan", code: FgCyan},
|
||||
{text: "white", code: FgWhite},
|
||||
}
|
||||
|
||||
for _, c := range testColors {
|
||||
New(c.code).Print(c.text)
|
||||
|
||||
line, _ := rb.ReadString('\n')
|
||||
scannedLine := fmt.Sprintf("%q", line)
|
||||
colored := fmt.Sprintf("\x1b[%dm%s\x1b[0m", c.code, c.text)
|
||||
escapedForm := fmt.Sprintf("%q", colored)
|
||||
|
||||
fmt.Printf("%s\t: %s\n", c.text, line)
|
||||
|
||||
if scannedLine != escapedForm {
|
||||
t.Errorf("Expecting %s, got '%s'\n", escapedForm, scannedLine)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNoColor(t *testing.T) {
|
||||
rb := new(bytes.Buffer)
|
||||
Output = rb
|
||||
|
||||
testColors := []struct {
|
||||
text string
|
||||
code Attribute
|
||||
}{
|
||||
{text: "black", code: FgBlack},
|
||||
{text: "red", code: FgRed},
|
||||
{text: "green", code: FgGreen},
|
||||
{text: "yellow", code: FgYellow},
|
||||
{text: "blue", code: FgBlue},
|
||||
{text: "magent", code: FgMagenta},
|
||||
{text: "cyan", code: FgCyan},
|
||||
{text: "white", code: FgWhite},
|
||||
}
|
||||
|
||||
for _, c := range testColors {
|
||||
p := New(c.code)
|
||||
p.DisableColor()
|
||||
p.Print(c.text)
|
||||
|
||||
line, _ := rb.ReadString('\n')
|
||||
if line != c.text {
|
||||
t.Errorf("Expecting %s, got '%s'\n", c.text, line)
|
||||
}
|
||||
}
|
||||
|
||||
// global check
|
||||
NoColor = true
|
||||
defer func() {
|
||||
NoColor = false
|
||||
}()
|
||||
for _, c := range testColors {
|
||||
p := New(c.code)
|
||||
p.Print(c.text)
|
||||
|
||||
line, _ := rb.ReadString('\n')
|
||||
if line != c.text {
|
||||
t.Errorf("Expecting %s, got '%s'\n", c.text, line)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestColorVisual(t *testing.T) {
|
||||
// First Visual Test
|
||||
fmt.Println("")
|
||||
Output = ansicolor.NewAnsiColorWriter(os.Stdout)
|
||||
|
||||
New(FgRed).Printf("red\t")
|
||||
New(BgRed).Print(" ")
|
||||
New(FgRed, Bold).Println(" red")
|
||||
|
||||
New(FgGreen).Printf("green\t")
|
||||
New(BgGreen).Print(" ")
|
||||
New(FgGreen, Bold).Println(" green")
|
||||
|
||||
New(FgYellow).Printf("yellow\t")
|
||||
New(BgYellow).Print(" ")
|
||||
New(FgYellow, Bold).Println(" yellow")
|
||||
|
||||
New(FgBlue).Printf("blue\t")
|
||||
New(BgBlue).Print(" ")
|
||||
New(FgBlue, Bold).Println(" blue")
|
||||
|
||||
New(FgMagenta).Printf("magenta\t")
|
||||
New(BgMagenta).Print(" ")
|
||||
New(FgMagenta, Bold).Println(" magenta")
|
||||
|
||||
New(FgCyan).Printf("cyan\t")
|
||||
New(BgCyan).Print(" ")
|
||||
New(FgCyan, Bold).Println(" cyan")
|
||||
|
||||
New(FgWhite).Printf("white\t")
|
||||
New(BgWhite).Print(" ")
|
||||
New(FgWhite, Bold).Println(" white")
|
||||
fmt.Println("")
|
||||
|
||||
// Second Visual test
|
||||
Black("black")
|
||||
Red("red")
|
||||
Green("green")
|
||||
Yellow("yellow")
|
||||
Blue("blue")
|
||||
Magenta("magenta")
|
||||
Cyan("cyan")
|
||||
White("white")
|
||||
|
||||
// Third visual test
|
||||
fmt.Println()
|
||||
Set(FgBlue)
|
||||
fmt.Println("is this blue?")
|
||||
Unset()
|
||||
|
||||
Set(FgMagenta)
|
||||
fmt.Println("and this magenta?")
|
||||
Unset()
|
||||
|
||||
// Fourth Visual test
|
||||
fmt.Println()
|
||||
blue := New(FgBlue).PrintlnFunc()
|
||||
blue("blue text with custom print func")
|
||||
|
||||
red := New(FgRed).PrintfFunc()
|
||||
red("red text with a printf func: %d\n", 123)
|
||||
|
||||
put := New(FgYellow).SprintFunc()
|
||||
warn := New(FgRed).SprintFunc()
|
||||
|
||||
fmt.Fprintf(Output, "this is a %s and this is %s.\n", put("warning"), warn("error"))
|
||||
|
||||
info := New(FgWhite, BgGreen).SprintFunc()
|
||||
fmt.Fprintf(Output, "this %s rocks!\n", info("package"))
|
||||
|
||||
// Fifth Visual Test
|
||||
fmt.Println()
|
||||
|
||||
fmt.Fprintln(Output, BlackString("black"))
|
||||
fmt.Fprintln(Output, RedString("red"))
|
||||
fmt.Fprintln(Output, GreenString("green"))
|
||||
fmt.Fprintln(Output, YellowString("yellow"))
|
||||
fmt.Fprintln(Output, BlueString("blue"))
|
||||
fmt.Fprintln(Output, MagentaString("magenta"))
|
||||
fmt.Fprintln(Output, CyanString("cyan"))
|
||||
fmt.Fprintln(Output, WhiteString("white"))
|
||||
}
|
1
Godeps/_workspace/src/github.com/gizak/termui/.gitignore
generated
vendored
@ -22,3 +22,4 @@ _testmain.go
|
||||
*.exe
|
||||
*.test
|
||||
*.prof
|
||||
.DS_Store
|
||||
|
207
Godeps/_workspace/src/github.com/gizak/termui/README.md
generated
vendored
@ -1,23 +1,72 @@
|
||||
# termui [](https://travis-ci.org/gizak/termui) [](https://godoc.org/github.com/gizak/termui)
|
||||
|
||||
## Update 23/06/2015
|
||||
Pull requests and master branch are freezing, waiting for merging from `refactoring` branch.
|
||||
<img src="./_example/dashboard.gif" alt="demo cast under osx 10.10; Terminal.app; Menlo Regular 12pt.)" width="80%">
|
||||
|
||||
## Notice
|
||||
termui comes with ABSOLUTELY NO WARRANTY, and there is a breaking change coming up (see refactoring branch) which will change the `Bufferer` interface and many others. These changes reduce calculation overhead and introduce a new drawing buffer with better capacibilities. We will step into the next stage (call it beta) after merging these changes.
|
||||
`termui` is a cross-platform, easy-to-compile, and fully-customizable terminal dashboard. It is inspired by [blessed-contrib](https://github.com/yaronn/blessed-contrib), but purely in Go.
|
||||
|
||||
## Introduction
|
||||
Go terminal dashboard. Inspired by [blessed-contrib](https://github.com/yaronn/blessed-contrib), but purely in Go.
|
||||
Now version v2 has arrived! It brings new event system, new theme system, new `Buffer` interface and specific colour text rendering. (some docs are missing, but it will be completed soon!)
|
||||
|
||||
Cross-platform, easy to compile, and fully-customizable.
|
||||
## Installation
|
||||
|
||||
__Demo:__ (cast under osx 10.10; Terminal.app; Menlo Regular 12pt.)
|
||||
`master` mirrors v2 branch, to install:
|
||||
|
||||
<img src="./example/dashboard.gif" alt="demo" width="600">
|
||||
go get -u github.com/gizak/termui
|
||||
|
||||
For the compatible reason, you can choose to install the legacy version of `termui`:
|
||||
|
||||
go get gopkg.in/gizak/termui.v1
|
||||
|
||||
## Usage
|
||||
|
||||
### Layout
|
||||
|
||||
To use `termui`, the very first thing you may want to know is how to manage layout. `termui` offers two ways of doing this, known as absolute layout and grid layout.
|
||||
|
||||
__Absolute layout__
|
||||
|
||||
Each widget has an underlying block structure which basically is a box model. It has border, label and padding properties. A border of a widget can be chosen to hide or display (with its border label), you can pick a different front/back colour for the border as well. To display such a widget at a specific location in terminal window, you need to assign `.X`, `.Y`, `.Height`, `.Width` values for each widget before send it to `.Render`. Let's demonstrate these by a code snippet:
|
||||
|
||||
`````go
|
||||
import ui "github.com/gizak/termui" // <- ui shortcut, optional
|
||||
|
||||
func main() {
|
||||
err := ui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer ui.Close()
|
||||
|
||||
p := ui.NewPar(":PRESS q TO QUIT DEMO")
|
||||
p.Height = 3
|
||||
p.Width = 50
|
||||
p.TextFgColor = ui.ColorWhite
|
||||
p.BorderLabel = "Text Box"
|
||||
p.BorderFg = ui.ColorCyan
|
||||
|
||||
g := ui.NewGauge()
|
||||
g.Percent = 50
|
||||
g.Width = 50
|
||||
g.Height = 3
|
||||
g.Y = 11
|
||||
g.BorderLabel = "Gauge"
|
||||
g.BarColor = ui.ColorRed
|
||||
g.BorderFg = ui.ColorWhite
|
||||
g.BorderLabelFg = ui.ColorCyan
|
||||
|
||||
ui.Render(p, g) // feel free to call Render, it's async and non-block
|
||||
|
||||
// event handler...
|
||||
}
|
||||
`````
|
||||
|
||||
Note that components can be overlapped (I'd rather call this a feature...), `Render(rs ...Renderer)` renders its args from left to right (i.e. each component's weight is arising from left to right).
|
||||
|
||||
__Grid layout:__
|
||||
|
||||
Expressive syntax, using [12 columns grid system](http://www.w3schools.com/bootstrap/bootstrap_grid_system.asp)
|
||||
<img src="./_example/grid.gif" alt="grid" width="60%">
|
||||
|
||||
Grid layout uses [12 columns grid system](http://www.w3schools.com/bootstrap/bootstrap_grid_system.asp) with expressive syntax. To use `Grid`, all we need to do is build a widget tree consisting of `Row`s and Cols (Actually a Col is also a `Row` but with a widget endpoint attached).
|
||||
|
||||
```go
|
||||
import ui "github.com/gizak/termui"
|
||||
// init and create widgets...
|
||||
@ -37,111 +86,49 @@ Expressive syntax, using [12 columns grid system](http://www.w3schools.com/boots
|
||||
|
||||
ui.Render(ui.Body)
|
||||
```
|
||||
[demo code:](https://github.com/gizak/termui/blob/master/example/grid.go)
|
||||
|
||||
<img src="./example/grid.gif" alt="grid" width="500">
|
||||
### Events
|
||||
|
||||
## Installation
|
||||
|
||||
go get github.com/gizak/termui
|
||||
|
||||
## Usage
|
||||
|
||||
Each component's layout is a bit like HTML block (box model), which has border and padding.
|
||||
|
||||
The `Border` property can be chosen to hide or display (with its border label), when it comes to display, the label takes 1 padding space (i.e. in css: `padding: 1;`, innerHeight and innerWidth therefore shrunk by 1).
|
||||
|
||||
`````go
|
||||
import ui "github.com/gizak/termui" // <- ui shortcut, optional
|
||||
|
||||
func main() {
|
||||
err := ui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer ui.Close()
|
||||
|
||||
p := ui.NewPar(":PRESS q TO QUIT DEMO")
|
||||
p.Height = 3
|
||||
p.Width = 50
|
||||
p.TextFgColor = ui.ColorWhite
|
||||
p.Border.Label = "Text Box"
|
||||
p.Border.FgColor = ui.ColorCyan
|
||||
|
||||
g := ui.NewGauge()
|
||||
g.Percent = 50
|
||||
g.Width = 50
|
||||
g.Height = 3
|
||||
g.Y = 11
|
||||
g.Border.Label = "Gauge"
|
||||
g.BarColor = ui.ColorRed
|
||||
g.Border.FgColor = ui.ColorWhite
|
||||
g.Border.LabelFgColor = ui.ColorCyan
|
||||
|
||||
ui.Render(p, g)
|
||||
|
||||
// event handler...
|
||||
}
|
||||
`````
|
||||
|
||||
Note that components can be overlapped (I'd rather call this a feature...), `Render(rs ...Renderer)` renders its args from left to right (i.e. each component's weight is arising from left to right).
|
||||
|
||||
## Themes
|
||||
|
||||
_All_ colors in _all_ components can be changed at _any_ time, while there provides some predefined color schemes:
|
||||
`termui` ships with a http-like event mux handling system. All events are channeled up from different sources (typing, click, windows resize, custom event) and then encoded as universal `Event` object. `Event.Path` indicates the event type and `Event.Data` stores the event data struct. Add a handler to a certain event is easy as below:
|
||||
|
||||
```go
|
||||
// for now there are only two themes: default and helloworld
|
||||
termui.UseTheme("helloworld")
|
||||
// handle key q pressing
|
||||
ui.Handle("/sys/kbd/q", func(ui.Event) {
|
||||
// press q to quit
|
||||
ui.StopLoop()
|
||||
})
|
||||
|
||||
// create components...
|
||||
ui.Handle("/sys/kbd/C-x", func(ui.Event) {
|
||||
// handle Ctrl + x combination
|
||||
})
|
||||
|
||||
ui.Handle("/sys/kbd", func(ui.Event) {
|
||||
// handle all other key pressing
|
||||
})
|
||||
|
||||
// handle a 1s timer
|
||||
ui.Handle("/timer/1s", func(e ui.Event) {
|
||||
t := e.Data.(ui.EvtTimer)
|
||||
// t is a EvtTimer
|
||||
if t.Count%2 ==0 {
|
||||
// do something
|
||||
}
|
||||
})
|
||||
|
||||
ui.Loop() // block until StopLoop is called
|
||||
```
|
||||
The `default ` theme's settings depend on the user's terminal color scheme, which is saying if your terminal default font color is white and background is white, it will be like:
|
||||
|
||||
<img src="./example/themedefault.png" alt="default" type="image/png" width="600">
|
||||
### Widgets
|
||||
|
||||
The `helloworld` color scheme drops in some colors!
|
||||
|
||||
<img src="./example/themehelloworld.png" alt="helloworld" type="image/png" width="600">
|
||||
|
||||
## Widgets
|
||||
|
||||
#### Par
|
||||
|
||||
[demo code](https://github.com/gizak/termui/blob/master/example/par.go)
|
||||
|
||||
<img src="./example/par.png" alt="par" type="image/png" width="300">
|
||||
|
||||
#### List
|
||||
[demo code](https://github.com/gizak/termui/blob/master/example/list.go)
|
||||
|
||||
<img src="./example/list.png" alt="list" type="image/png" width="200">
|
||||
|
||||
#### Gauge
|
||||
[demo code](https://github.com/gizak/termui/blob/master/example/gauge.go)
|
||||
|
||||
<img src="./example/gauge.png" alt="gauge" type="image/png" width="350">
|
||||
|
||||
#### Line Chart
|
||||
[demo code](https://github.com/gizak/termui/blob/master/example/linechart.go)
|
||||
|
||||
<img src="./example/linechart.png" alt="linechart" type="image/png" width="450">
|
||||
|
||||
#### Bar Chart
|
||||
[demo code](https://github.com/gizak/termui/blob/master/example/barchart.go)
|
||||
|
||||
<img src="./example/barchart.png" alt="barchart" type="image/png" width="150">
|
||||
|
||||
#### Mult-Bar / Stacked-Bar Chart
|
||||
[demo code](https://github.com/gizak/termui/blob/master/example/mbarchart.go)
|
||||
|
||||
<img src="./example/mbarchart.png" alt="barchart" type="image/png" width="150">
|
||||
|
||||
#### Sparklines
|
||||
[demo code](https://github.com/gizak/termui/blob/master/example/sparklines.go)
|
||||
|
||||
<img src="./example/sparklines.png" alt="sparklines" type="image/png" width="350">
|
||||
Click image to see the corresponding demo codes.
|
||||
|
||||
[<img src="./_example/par.png" alt="par" type="image/png" width="45%">](https://github.com/gizak/termui/blob/master/_example/par.go)
|
||||
[<img src="./_example/list.png" alt="list" type="image/png" width="45%">](https://github.com/gizak/termui/blob/master/_example/list.go)
|
||||
[<img src="./_example/gauge.png" alt="gauge" type="image/png" width="45%">](https://github.com/gizak/termui/blob/master/_example/gauge.go)
|
||||
[<img src="./_example/linechart.png" alt="linechart" type="image/png" width="45%">](https://github.com/gizak/termui/blob/master/_example/linechart.go)
|
||||
[<img src="./_example/barchart.png" alt="barchart" type="image/png" width="45%">](https://github.com/gizak/termui/blob/master/_example/barchart.go)
|
||||
[<img src="./_example/mbarchart.png" alt="barchart" type="image/png" width="45%">](https://github.com/gizak/termui/blob/master/_example/mbarchart.go)
|
||||
[<img src="./_example/sparklines.png" alt="sparklines" type="image/png" width="45%">](https://github.com/gizak/termui/blob/master/_example/sparklines.go)
|
||||
|
||||
## GoDoc
|
||||
|
||||
@ -150,10 +137,12 @@ The `helloworld` color scheme drops in some colors!
|
||||
## TODO
|
||||
|
||||
- [x] Grid layout
|
||||
- [ ] Event system
|
||||
- [ ] Canvas widget
|
||||
- [ ] Refine APIs
|
||||
- [x] Event system
|
||||
- [x] Canvas widget
|
||||
- [x] Refine APIs
|
||||
- [ ] Focusable widgets
|
||||
|
||||
## Changelog
|
||||
|
||||
## License
|
||||
This library is under the [MIT License](http://opensource.org/licenses/MIT)
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
@ -39,16 +39,16 @@ type BarChart struct {
|
||||
// NewBarChart returns a new *BarChart with current theme.
|
||||
func NewBarChart() *BarChart {
|
||||
bc := &BarChart{Block: *NewBlock()}
|
||||
bc.BarColor = theme.BarChartBar
|
||||
bc.NumColor = theme.BarChartNum
|
||||
bc.TextColor = theme.BarChartText
|
||||
bc.BarColor = ThemeAttr("barchart.bar.bg")
|
||||
bc.NumColor = ThemeAttr("barchart.num.fg")
|
||||
bc.TextColor = ThemeAttr("barchart.text.fg")
|
||||
bc.BarGap = 1
|
||||
bc.BarWidth = 3
|
||||
return bc
|
||||
}
|
||||
|
||||
func (bc *BarChart) layout() {
|
||||
bc.numBar = bc.innerWidth / (bc.BarGap + bc.BarWidth)
|
||||
bc.numBar = bc.innerArea.Dx() / (bc.BarGap + bc.BarWidth)
|
||||
bc.labels = make([][]rune, bc.numBar)
|
||||
bc.dataNum = make([][]rune, len(bc.Data))
|
||||
|
||||
@ -69,7 +69,7 @@ func (bc *BarChart) layout() {
|
||||
bc.max = bc.Data[i]
|
||||
}
|
||||
}
|
||||
bc.scale = float64(bc.max) / float64(bc.innerHeight-1)
|
||||
bc.scale = float64(bc.max) / float64(bc.innerArea.Dy()-1)
|
||||
}
|
||||
|
||||
func (bc *BarChart) SetMax(max int) {
|
||||
@ -80,8 +80,8 @@ func (bc *BarChart) SetMax(max int) {
|
||||
}
|
||||
|
||||
// Buffer implements Bufferer interface.
|
||||
func (bc *BarChart) Buffer() []Point {
|
||||
ps := bc.Block.Buffer()
|
||||
func (bc *BarChart) Buffer() Buffer {
|
||||
buf := bc.Block.Buffer()
|
||||
bc.layout()
|
||||
|
||||
for i := 0; i < bc.numBar && i < len(bc.Data) && i < len(bc.DataLabels); i++ {
|
||||
@ -90,46 +90,49 @@ func (bc *BarChart) Buffer() []Point {
|
||||
// plot bar
|
||||
for j := 0; j < bc.BarWidth; j++ {
|
||||
for k := 0; k < h; k++ {
|
||||
p := Point{}
|
||||
p.Ch = ' '
|
||||
p.Bg = bc.BarColor
|
||||
if bc.BarColor == ColorDefault { // when color is default, space char treated as transparent!
|
||||
p.Bg |= AttrReverse
|
||||
c := Cell{
|
||||
Ch: ' ',
|
||||
Bg: bc.BarColor,
|
||||
}
|
||||
p.X = bc.innerX + i*(bc.BarWidth+bc.BarGap) + j
|
||||
p.Y = bc.innerY + bc.innerHeight - 2 - k
|
||||
ps = append(ps, p)
|
||||
if bc.BarColor == ColorDefault { // when color is default, space char treated as transparent!
|
||||
c.Bg |= AttrReverse
|
||||
}
|
||||
x := bc.innerArea.Min.X + i*(bc.BarWidth+bc.BarGap) + j
|
||||
y := bc.innerArea.Min.Y + bc.innerArea.Dy() - 2 - k
|
||||
buf.Set(x, y, c)
|
||||
}
|
||||
}
|
||||
// plot text
|
||||
for j, k := 0, 0; j < len(bc.labels[i]); j++ {
|
||||
w := charWidth(bc.labels[i][j])
|
||||
p := Point{}
|
||||
p.Ch = bc.labels[i][j]
|
||||
p.Bg = bc.BgColor
|
||||
p.Fg = bc.TextColor
|
||||
p.Y = bc.innerY + bc.innerHeight - 1
|
||||
p.X = bc.innerX + oftX + k
|
||||
ps = append(ps, p)
|
||||
c := Cell{
|
||||
Ch: bc.labels[i][j],
|
||||
Bg: bc.Bg,
|
||||
Fg: bc.TextColor,
|
||||
}
|
||||
y := bc.innerArea.Min.Y + bc.innerArea.Dy() - 1
|
||||
x := bc.innerArea.Min.X + oftX + k
|
||||
buf.Set(x, y, c)
|
||||
k += w
|
||||
}
|
||||
// plot num
|
||||
for j := 0; j < len(bc.dataNum[i]); j++ {
|
||||
p := Point{}
|
||||
p.Ch = bc.dataNum[i][j]
|
||||
p.Fg = bc.NumColor
|
||||
p.Bg = bc.BarColor
|
||||
c := Cell{
|
||||
Ch: bc.dataNum[i][j],
|
||||
Fg: bc.NumColor,
|
||||
Bg: bc.BarColor,
|
||||
}
|
||||
if bc.BarColor == ColorDefault { // the same as above
|
||||
p.Bg |= AttrReverse
|
||||
c.Bg |= AttrReverse
|
||||
}
|
||||
if h == 0 {
|
||||
p.Bg = bc.BgColor
|
||||
c.Bg = bc.Bg
|
||||
}
|
||||
p.X = bc.innerX + oftX + (bc.BarWidth-len(bc.dataNum[i]))/2 + j
|
||||
p.Y = bc.innerY + bc.innerHeight - 2
|
||||
ps = append(ps, p)
|
||||
x := bc.innerArea.Min.X + oftX + (bc.BarWidth-len(bc.dataNum[i]))/2 + j
|
||||
y := bc.innerArea.Min.Y + bc.innerArea.Dy() - 2
|
||||
buf.Set(x, y, c)
|
||||
}
|
||||
}
|
||||
|
||||
return bc.Block.chopOverflow(ps)
|
||||
return buf
|
||||
}
|
276
Godeps/_workspace/src/github.com/gizak/termui/block.go
generated
vendored
@ -1,142 +1,240 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
package termui
|
||||
|
||||
import "image"
|
||||
|
||||
// Hline is a horizontal line.
|
||||
type Hline struct {
|
||||
X int
|
||||
Y int
|
||||
Len int
|
||||
Fg Attribute
|
||||
Bg Attribute
|
||||
}
|
||||
|
||||
// Vline is a vertical line.
|
||||
type Vline struct {
|
||||
X int
|
||||
Y int
|
||||
Len int
|
||||
Fg Attribute
|
||||
Bg Attribute
|
||||
}
|
||||
|
||||
// Buffer draws a horizontal line.
|
||||
func (l Hline) Buffer() Buffer {
|
||||
if l.Len <= 0 {
|
||||
return NewBuffer()
|
||||
}
|
||||
return NewFilledBuffer(l.X, l.Y, l.X+l.Len, l.Y+1, HORIZONTAL_LINE, l.Fg, l.Bg)
|
||||
}
|
||||
|
||||
// Buffer draws a vertical line.
|
||||
func (l Vline) Buffer() Buffer {
|
||||
if l.Len <= 0 {
|
||||
return NewBuffer()
|
||||
}
|
||||
return NewFilledBuffer(l.X, l.Y, l.X+1, l.Y+l.Len, VERTICAL_LINE, l.Fg, l.Bg)
|
||||
}
|
||||
|
||||
// Buffer draws a box border.
|
||||
func (b Block) drawBorder(buf Buffer) {
|
||||
if !b.Border {
|
||||
return
|
||||
}
|
||||
|
||||
min := b.area.Min
|
||||
max := b.area.Max
|
||||
|
||||
x0 := min.X
|
||||
y0 := min.Y
|
||||
x1 := max.X - 1
|
||||
y1 := max.Y - 1
|
||||
|
||||
// draw lines
|
||||
if b.BorderTop {
|
||||
buf.Merge(Hline{x0, y0, x1 - x0, b.BorderFg, b.BorderBg}.Buffer())
|
||||
}
|
||||
if b.BorderBottom {
|
||||
buf.Merge(Hline{x0, y1, x1 - x0, b.BorderFg, b.BorderBg}.Buffer())
|
||||
}
|
||||
if b.BorderLeft {
|
||||
buf.Merge(Vline{x0, y0, y1 - y0, b.BorderFg, b.BorderBg}.Buffer())
|
||||
}
|
||||
if b.BorderRight {
|
||||
buf.Merge(Vline{x1, y0, y1 - y0, b.BorderFg, b.BorderBg}.Buffer())
|
||||
}
|
||||
|
||||
// draw corners
|
||||
if b.BorderTop && b.BorderLeft && b.area.Dx() > 0 && b.area.Dy() > 0 {
|
||||
buf.Set(x0, y0, Cell{TOP_LEFT, b.BorderFg, b.BorderBg})
|
||||
}
|
||||
if b.BorderTop && b.BorderRight && b.area.Dx() > 1 && b.area.Dy() > 0 {
|
||||
buf.Set(x1, y0, Cell{TOP_RIGHT, b.BorderFg, b.BorderBg})
|
||||
}
|
||||
if b.BorderBottom && b.BorderLeft && b.area.Dx() > 0 && b.area.Dy() > 1 {
|
||||
buf.Set(x0, y1, Cell{BOTTOM_LEFT, b.BorderFg, b.BorderBg})
|
||||
}
|
||||
if b.BorderBottom && b.BorderRight && b.area.Dx() > 1 && b.area.Dy() > 1 {
|
||||
buf.Set(x1, y1, Cell{BOTTOM_RIGHT, b.BorderFg, b.BorderBg})
|
||||
}
|
||||
}
|
||||
|
||||
func (b Block) drawBorderLabel(buf Buffer) {
|
||||
maxTxtW := b.area.Dx() - 2
|
||||
tx := DTrimTxCls(DefaultTxBuilder.Build(b.BorderLabel, b.BorderLabelFg, b.BorderLabelBg), maxTxtW)
|
||||
|
||||
for i, w := 0, 0; i < len(tx); i++ {
|
||||
buf.Set(b.area.Min.X+1+w, b.area.Min.Y, tx[i])
|
||||
w += tx[i].Width()
|
||||
}
|
||||
}
|
||||
|
||||
// Block is a base struct for all other upper level widgets,
|
||||
// consider it as css: display:block.
|
||||
// Normally you do not need to create it manually.
|
||||
type Block struct {
|
||||
area image.Rectangle
|
||||
innerArea image.Rectangle
|
||||
X int
|
||||
Y int
|
||||
Border labeledBorder
|
||||
IsDisplay bool
|
||||
HasBorder bool
|
||||
BgColor Attribute
|
||||
Border bool
|
||||
BorderFg Attribute
|
||||
BorderBg Attribute
|
||||
BorderLeft bool
|
||||
BorderRight bool
|
||||
BorderTop bool
|
||||
BorderBottom bool
|
||||
BorderLabel string
|
||||
BorderLabelFg Attribute
|
||||
BorderLabelBg Attribute
|
||||
Display bool
|
||||
Bg Attribute
|
||||
Width int
|
||||
Height int
|
||||
innerWidth int
|
||||
innerHeight int
|
||||
innerX int
|
||||
innerY int
|
||||
PaddingTop int
|
||||
PaddingBottom int
|
||||
PaddingLeft int
|
||||
PaddingRight int
|
||||
id string
|
||||
Float Align
|
||||
}
|
||||
|
||||
// NewBlock returns a *Block which inherits styles from current theme.
|
||||
func NewBlock() *Block {
|
||||
d := Block{}
|
||||
d.IsDisplay = true
|
||||
d.HasBorder = theme.HasBorder
|
||||
d.Border.BgColor = theme.BorderBg
|
||||
d.Border.FgColor = theme.BorderFg
|
||||
d.Border.LabelBgColor = theme.BorderLabelTextBg
|
||||
d.Border.LabelFgColor = theme.BorderLabelTextFg
|
||||
d.BgColor = theme.BlockBg
|
||||
d.Width = 2
|
||||
d.Height = 2
|
||||
return &d
|
||||
b := Block{}
|
||||
b.Display = true
|
||||
b.Border = true
|
||||
b.BorderLeft = true
|
||||
b.BorderRight = true
|
||||
b.BorderTop = true
|
||||
b.BorderBottom = true
|
||||
b.BorderBg = ThemeAttr("border.bg")
|
||||
b.BorderFg = ThemeAttr("border.fg")
|
||||
b.BorderLabelBg = ThemeAttr("label.bg")
|
||||
b.BorderLabelFg = ThemeAttr("label.fg")
|
||||
b.Bg = ThemeAttr("block.bg")
|
||||
b.Width = 2
|
||||
b.Height = 2
|
||||
b.id = GenId()
|
||||
b.Float = AlignNone
|
||||
return &b
|
||||
}
|
||||
|
||||
// compute box model
|
||||
func (d *Block) align() {
|
||||
d.innerWidth = d.Width - d.PaddingLeft - d.PaddingRight
|
||||
d.innerHeight = d.Height - d.PaddingTop - d.PaddingBottom
|
||||
d.innerX = d.X + d.PaddingLeft
|
||||
d.innerY = d.Y + d.PaddingTop
|
||||
func (b Block) Id() string {
|
||||
return b.id
|
||||
}
|
||||
|
||||
if d.HasBorder {
|
||||
d.innerHeight -= 2
|
||||
d.innerWidth -= 2
|
||||
d.Border.X = d.X
|
||||
d.Border.Y = d.Y
|
||||
d.Border.Width = d.Width
|
||||
d.Border.Height = d.Height
|
||||
d.innerX++
|
||||
d.innerY++
|
||||
}
|
||||
// Align computes box model
|
||||
func (b *Block) Align() {
|
||||
// outer
|
||||
b.area.Min.X = 0
|
||||
b.area.Min.Y = 0
|
||||
b.area.Max.X = b.Width
|
||||
b.area.Max.Y = b.Height
|
||||
|
||||
if d.innerHeight < 0 {
|
||||
d.innerHeight = 0
|
||||
}
|
||||
if d.innerWidth < 0 {
|
||||
d.innerWidth = 0
|
||||
}
|
||||
// float
|
||||
b.area = AlignArea(TermRect(), b.area, b.Float)
|
||||
b.area = MoveArea(b.area, b.X, b.Y)
|
||||
|
||||
// inner
|
||||
b.innerArea.Min.X = b.area.Min.X + b.PaddingLeft
|
||||
b.innerArea.Min.Y = b.area.Min.Y + b.PaddingTop
|
||||
b.innerArea.Max.X = b.area.Max.X - b.PaddingRight
|
||||
b.innerArea.Max.Y = b.area.Max.Y - b.PaddingBottom
|
||||
|
||||
if b.Border {
|
||||
if b.BorderLeft {
|
||||
b.innerArea.Min.X++
|
||||
}
|
||||
if b.BorderRight {
|
||||
b.innerArea.Max.X--
|
||||
}
|
||||
if b.BorderTop {
|
||||
b.innerArea.Min.Y++
|
||||
}
|
||||
if b.BorderBottom {
|
||||
b.innerArea.Max.Y--
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// InnerBounds returns the internal bounds of the block after aligning and
|
||||
// calculating the padding and border, if any.
|
||||
func (d *Block) InnerBounds() (x, y, width, height int) {
|
||||
d.align()
|
||||
return d.innerX, d.innerY, d.innerWidth, d.innerHeight
|
||||
func (b *Block) InnerBounds() image.Rectangle {
|
||||
b.Align()
|
||||
return b.innerArea
|
||||
}
|
||||
|
||||
// Buffer implements Bufferer interface.
|
||||
// Draw background and border (if any).
|
||||
func (d *Block) Buffer() []Point {
|
||||
d.align()
|
||||
func (b *Block) Buffer() Buffer {
|
||||
b.Align()
|
||||
|
||||
ps := []Point{}
|
||||
if !d.IsDisplay {
|
||||
return ps
|
||||
}
|
||||
buf := NewBuffer()
|
||||
buf.SetArea(b.area)
|
||||
buf.Fill(' ', ColorDefault, b.Bg)
|
||||
|
||||
if d.HasBorder {
|
||||
ps = d.Border.Buffer()
|
||||
}
|
||||
b.drawBorder(buf)
|
||||
b.drawBorderLabel(buf)
|
||||
|
||||
for i := 0; i < d.innerWidth; i++ {
|
||||
for j := 0; j < d.innerHeight; j++ {
|
||||
p := Point{}
|
||||
p.X = d.X + 1 + i
|
||||
p.Y = d.Y + 1 + j
|
||||
p.Ch = ' '
|
||||
p.Bg = d.BgColor
|
||||
ps = append(ps, p)
|
||||
}
|
||||
}
|
||||
return ps
|
||||
return buf
|
||||
}
|
||||
|
||||
// GetHeight implements GridBufferer.
|
||||
// It returns current height of the block.
|
||||
func (d Block) GetHeight() int {
|
||||
return d.Height
|
||||
func (b Block) GetHeight() int {
|
||||
return b.Height
|
||||
}
|
||||
|
||||
// SetX implements GridBufferer interface, which sets block's x position.
|
||||
func (d *Block) SetX(x int) {
|
||||
d.X = x
|
||||
func (b *Block) SetX(x int) {
|
||||
b.X = x
|
||||
}
|
||||
|
||||
// SetY implements GridBufferer interface, it sets y position for block.
|
||||
func (d *Block) SetY(y int) {
|
||||
d.Y = y
|
||||
func (b *Block) SetY(y int) {
|
||||
b.Y = y
|
||||
}
|
||||
|
||||
// SetWidth implements GridBuffer interface, it sets block's width.
|
||||
func (d *Block) SetWidth(w int) {
|
||||
d.Width = w
|
||||
func (b *Block) SetWidth(w int) {
|
||||
b.Width = w
|
||||
}
|
||||
|
||||
// chop the overflow parts
|
||||
func (d *Block) chopOverflow(ps []Point) []Point {
|
||||
nps := make([]Point, 0, len(ps))
|
||||
x := d.X
|
||||
y := d.Y
|
||||
w := d.Width
|
||||
h := d.Height
|
||||
for _, v := range ps {
|
||||
if v.X >= x &&
|
||||
v.X < x+w &&
|
||||
v.Y >= y &&
|
||||
v.Y < y+h {
|
||||
nps = append(nps, v)
|
||||
}
|
||||
}
|
||||
return nps
|
||||
func (b Block) InnerWidth() int {
|
||||
return b.innerArea.Dx()
|
||||
}
|
||||
|
||||
func (b Block) InnerHeight() int {
|
||||
return b.innerArea.Dy()
|
||||
}
|
||||
|
||||
func (b Block) InnerX() int {
|
||||
return b.innerArea.Min.X
|
||||
}
|
||||
|
||||
func (b Block) InnerY() int { return b.innerArea.Min.Y }
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
@ -12,3 +12,9 @@ const HORIZONTAL_LINE = '─'
|
||||
const TOP_LEFT = '┌'
|
||||
const BOTTOM_RIGHT = '┘'
|
||||
const BOTTOM_LEFT = '└'
|
||||
const VERTICAL_LEFT = '┤'
|
||||
const VERTICAL_RIGHT = '├'
|
||||
const HORIZONTAL_DOWN = '┬'
|
||||
const HORIZONTAL_UP = '┴'
|
||||
const QUOTA_LEFT = '«'
|
||||
const QUOTA_RIGHT = '»'
|
46
Godeps/_workspace/src/github.com/gizak/termui/block_test.go
generated
vendored
@ -1,46 +0,0 @@
|
||||
package termui
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestBlock_InnerBounds(t *testing.T) {
|
||||
b := NewBlock()
|
||||
b.X = 10
|
||||
b.Y = 11
|
||||
b.Width = 12
|
||||
b.Height = 13
|
||||
|
||||
assert := func(name string, x, y, w, h int) {
|
||||
t.Log(name)
|
||||
cx, cy, cw, ch := b.InnerBounds()
|
||||
if cx != x {
|
||||
t.Errorf("expected x to be %d but got %d", x, cx)
|
||||
}
|
||||
if cy != y {
|
||||
t.Errorf("expected y to be %d but got %d", y, cy)
|
||||
}
|
||||
if cw != w {
|
||||
t.Errorf("expected width to be %d but got %d", w, cw)
|
||||
}
|
||||
if ch != h {
|
||||
t.Errorf("expected height to be %d but got %d", h, ch)
|
||||
}
|
||||
}
|
||||
|
||||
b.HasBorder = false
|
||||
assert("no border, no padding", 10, 11, 12, 13)
|
||||
|
||||
b.HasBorder = true
|
||||
assert("border, no padding", 11, 12, 10, 11)
|
||||
|
||||
b.PaddingBottom = 2
|
||||
assert("border, 2b padding", 11, 12, 10, 9)
|
||||
|
||||
b.PaddingTop = 3
|
||||
assert("border, 2b 3t padding", 11, 15, 10, 6)
|
||||
|
||||
b.PaddingLeft = 4
|
||||
assert("border, 2b 3t 4l padding", 15, 15, 6, 6)
|
||||
|
||||
b.PaddingRight = 5
|
||||
assert("border, 2b 3t 4l 5r padding", 15, 15, 1, 6)
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
117
Godeps/_workspace/src/github.com/gizak/termui/box.go
generated
vendored
@ -1,117 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
package termui
|
||||
|
||||
type border struct {
|
||||
X int
|
||||
Y int
|
||||
Width int
|
||||
Height int
|
||||
FgColor Attribute
|
||||
BgColor Attribute
|
||||
}
|
||||
|
||||
type hline struct {
|
||||
X int
|
||||
Y int
|
||||
Length int
|
||||
FgColor Attribute
|
||||
BgColor Attribute
|
||||
}
|
||||
|
||||
type vline struct {
|
||||
X int
|
||||
Y int
|
||||
Length int
|
||||
FgColor Attribute
|
||||
BgColor Attribute
|
||||
}
|
||||
|
||||
// Draw a horizontal line.
|
||||
func (l hline) Buffer() []Point {
|
||||
pts := make([]Point, l.Length)
|
||||
for i := 0; i < l.Length; i++ {
|
||||
pts[i].X = l.X + i
|
||||
pts[i].Y = l.Y
|
||||
pts[i].Ch = HORIZONTAL_LINE
|
||||
pts[i].Bg = l.BgColor
|
||||
pts[i].Fg = l.FgColor
|
||||
}
|
||||
return pts
|
||||
}
|
||||
|
||||
// Draw a vertical line.
|
||||
func (l vline) Buffer() []Point {
|
||||
pts := make([]Point, l.Length)
|
||||
for i := 0; i < l.Length; i++ {
|
||||
pts[i].X = l.X
|
||||
pts[i].Y = l.Y + i
|
||||
pts[i].Ch = VERTICAL_LINE
|
||||
pts[i].Bg = l.BgColor
|
||||
pts[i].Fg = l.FgColor
|
||||
}
|
||||
return pts
|
||||
}
|
||||
|
||||
// Draw a box border.
|
||||
func (b border) Buffer() []Point {
|
||||
if b.Width < 2 || b.Height < 2 {
|
||||
return nil
|
||||
}
|
||||
pts := make([]Point, 2*b.Width+2*b.Height-4)
|
||||
|
||||
pts[0].X = b.X
|
||||
pts[0].Y = b.Y
|
||||
pts[0].Fg = b.FgColor
|
||||
pts[0].Bg = b.BgColor
|
||||
pts[0].Ch = TOP_LEFT
|
||||
|
||||
pts[1].X = b.X + b.Width - 1
|
||||
pts[1].Y = b.Y
|
||||
pts[1].Fg = b.FgColor
|
||||
pts[1].Bg = b.BgColor
|
||||
pts[1].Ch = TOP_RIGHT
|
||||
|
||||
pts[2].X = b.X
|
||||
pts[2].Y = b.Y + b.Height - 1
|
||||
pts[2].Fg = b.FgColor
|
||||
pts[2].Bg = b.BgColor
|
||||
pts[2].Ch = BOTTOM_LEFT
|
||||
|
||||
pts[3].X = b.X + b.Width - 1
|
||||
pts[3].Y = b.Y + b.Height - 1
|
||||
pts[3].Fg = b.FgColor
|
||||
pts[3].Bg = b.BgColor
|
||||
pts[3].Ch = BOTTOM_RIGHT
|
||||
|
||||
copy(pts[4:], (hline{b.X + 1, b.Y, b.Width - 2, b.FgColor, b.BgColor}).Buffer())
|
||||
copy(pts[4+b.Width-2:], (hline{b.X + 1, b.Y + b.Height - 1, b.Width - 2, b.FgColor, b.BgColor}).Buffer())
|
||||
copy(pts[4+2*b.Width-4:], (vline{b.X, b.Y + 1, b.Height - 2, b.FgColor, b.BgColor}).Buffer())
|
||||
copy(pts[4+2*b.Width-4+b.Height-2:], (vline{b.X + b.Width - 1, b.Y + 1, b.Height - 2, b.FgColor, b.BgColor}).Buffer())
|
||||
|
||||
return pts
|
||||
}
|
||||
|
||||
type labeledBorder struct {
|
||||
border
|
||||
Label string
|
||||
LabelFgColor Attribute
|
||||
LabelBgColor Attribute
|
||||
}
|
||||
|
||||
// Draw a box border with label.
|
||||
func (lb labeledBorder) Buffer() []Point {
|
||||
ps := lb.border.Buffer()
|
||||
maxTxtW := lb.Width - 2
|
||||
rs := trimStr2Runes(lb.Label, maxTxtW)
|
||||
|
||||
for i, j, w := 0, 0, 0; i < len(rs); i++ {
|
||||
w = charWidth(rs[i])
|
||||
ps = append(ps, newPointWithAttrs(rs[i], lb.X+1+j, lb.Y, lb.LabelFgColor, lb.LabelBgColor))
|
||||
j += w
|
||||
}
|
||||
|
||||
return ps
|
||||
}
|
106
Godeps/_workspace/src/github.com/gizak/termui/buffer.go
generated
vendored
Normal file
@ -0,0 +1,106 @@
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
package termui
|
||||
|
||||
import "image"
|
||||
|
||||
// Cell is a rune with assigned Fg and Bg
|
||||
type Cell struct {
|
||||
Ch rune
|
||||
Fg Attribute
|
||||
Bg Attribute
|
||||
}
|
||||
|
||||
// Buffer is a renderable rectangle cell data container.
|
||||
type Buffer struct {
|
||||
Area image.Rectangle // selected drawing area
|
||||
CellMap map[image.Point]Cell
|
||||
}
|
||||
|
||||
// At returns the cell at (x,y).
|
||||
func (b Buffer) At(x, y int) Cell {
|
||||
return b.CellMap[image.Pt(x, y)]
|
||||
}
|
||||
|
||||
// Set assigns a char to (x,y)
|
||||
func (b Buffer) Set(x, y int, c Cell) {
|
||||
b.CellMap[image.Pt(x, y)] = c
|
||||
}
|
||||
|
||||
// Bounds returns the domain for which At can return non-zero color.
|
||||
func (b Buffer) Bounds() image.Rectangle {
|
||||
x0, y0, x1, y1 := 0, 0, 0, 0
|
||||
for p := range b.CellMap {
|
||||
if p.X > x1 {
|
||||
x1 = p.X
|
||||
}
|
||||
if p.X < x0 {
|
||||
x0 = p.X
|
||||
}
|
||||
if p.Y > y1 {
|
||||
y1 = p.Y
|
||||
}
|
||||
if p.Y < y0 {
|
||||
y0 = p.Y
|
||||
}
|
||||
}
|
||||
return image.Rect(x0, y0, x1, y1)
|
||||
}
|
||||
|
||||
// SetArea assigns a new rect area to Buffer b.
|
||||
func (b *Buffer) SetArea(r image.Rectangle) {
|
||||
b.Area.Max = r.Max
|
||||
b.Area.Min = r.Min
|
||||
}
|
||||
|
||||
// Sync sets drawing area to the buffer's bound
|
||||
func (b Buffer) Sync() {
|
||||
b.SetArea(b.Bounds())
|
||||
}
|
||||
|
||||
// NewCell returns a new cell
|
||||
func NewCell(ch rune, fg, bg Attribute) Cell {
|
||||
return Cell{ch, fg, bg}
|
||||
}
|
||||
|
||||
// Merge merges bs Buffers onto b
|
||||
func (b *Buffer) Merge(bs ...Buffer) {
|
||||
for _, buf := range bs {
|
||||
for p, v := range buf.CellMap {
|
||||
b.Set(p.X, p.Y, v)
|
||||
}
|
||||
b.SetArea(b.Area.Union(buf.Area))
|
||||
}
|
||||
}
|
||||
|
||||
// NewBuffer returns a new Buffer
|
||||
func NewBuffer() Buffer {
|
||||
return Buffer{
|
||||
CellMap: make(map[image.Point]Cell),
|
||||
Area: image.Rectangle{}}
|
||||
}
|
||||
|
||||
// Fill fills the Buffer b with ch,fg and bg.
|
||||
func (b Buffer) Fill(ch rune, fg, bg Attribute) {
|
||||
for x := b.Area.Min.X; x < b.Area.Max.X; x++ {
|
||||
for y := b.Area.Min.Y; y < b.Area.Max.Y; y++ {
|
||||
b.Set(x, y, Cell{ch, fg, bg})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NewFilledBuffer returns a new Buffer filled with ch, fb and bg.
|
||||
func NewFilledBuffer(x0, y0, x1, y1 int, ch rune, fg, bg Attribute) Buffer {
|
||||
buf := NewBuffer()
|
||||
buf.Area.Min = image.Pt(x0, y0)
|
||||
buf.Area.Max = image.Pt(x1, y1)
|
||||
|
||||
for x := buf.Area.Min.X; x < buf.Area.Max.X; x++ {
|
||||
for y := buf.Area.Min.Y; y < buf.Area.Max.Y; y++ {
|
||||
buf.Set(x, y, Cell{ch, fg, bg})
|
||||
}
|
||||
}
|
||||
return buf
|
||||
}
|
12
Godeps/_workspace/src/github.com/gizak/termui/canvas.go
generated
vendored
@ -1,4 +1,4 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
@ -63,12 +63,10 @@ func (c Canvas) Unset(x, y int) {
|
||||
}
|
||||
|
||||
// Buffer returns un-styled points
|
||||
func (c Canvas) Buffer() []Point {
|
||||
ps := make([]Point, len(c))
|
||||
i := 0
|
||||
func (c Canvas) Buffer() Buffer {
|
||||
buf := NewBuffer()
|
||||
for k, v := range c {
|
||||
ps[i] = newPoint(v+brailleBase, k[0], k[1])
|
||||
i++
|
||||
buf.Set(k[0], k[1], Cell{Ch: v + brailleBase})
|
||||
}
|
||||
return ps
|
||||
return buf
|
||||
}
|
||||
|
55
Godeps/_workspace/src/github.com/gizak/termui/canvas_test.go
generated
vendored
@ -1,55 +0,0 @@
|
||||
package termui
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
func TestCanvasSet(t *testing.T) {
|
||||
c := NewCanvas()
|
||||
c.Set(0, 0)
|
||||
c.Set(0, 1)
|
||||
c.Set(0, 2)
|
||||
c.Set(0, 3)
|
||||
c.Set(1, 3)
|
||||
c.Set(2, 3)
|
||||
c.Set(3, 3)
|
||||
c.Set(4, 3)
|
||||
c.Set(5, 3)
|
||||
spew.Dump(c)
|
||||
}
|
||||
|
||||
func TestCanvasUnset(t *testing.T) {
|
||||
c := NewCanvas()
|
||||
c.Set(0, 0)
|
||||
c.Set(0, 1)
|
||||
c.Set(0, 2)
|
||||
c.Unset(0, 2)
|
||||
spew.Dump(c)
|
||||
c.Unset(0, 3)
|
||||
spew.Dump(c)
|
||||
}
|
||||
|
||||
func TestCanvasBuffer(t *testing.T) {
|
||||
c := NewCanvas()
|
||||
c.Set(0, 0)
|
||||
c.Set(0, 1)
|
||||
c.Set(0, 2)
|
||||
c.Set(0, 3)
|
||||
c.Set(1, 3)
|
||||
c.Set(2, 3)
|
||||
c.Set(3, 3)
|
||||
c.Set(4, 3)
|
||||
c.Set(5, 3)
|
||||
c.Set(6, 3)
|
||||
c.Set(7, 2)
|
||||
c.Set(8, 1)
|
||||
c.Set(9, 0)
|
||||
bufs := c.Buffer()
|
||||
rs := make([]rune, len(bufs))
|
||||
for i, v := range bufs {
|
||||
rs[i] = v.Ch
|
||||
}
|
||||
spew.Dump(string(rs))
|
||||
}
|
26
Godeps/_workspace/src/github.com/gizak/termui/config
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env perl6
|
||||
|
||||
use v6;
|
||||
|
||||
my $copyright = '// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
';
|
||||
|
||||
sub MAIN('update-docstr', Str $srcp) {
|
||||
if $srcp.IO.f {
|
||||
$_ = $srcp.IO.slurp;
|
||||
if m/^ \/\/\s Copyright .+? \n\n/ {
|
||||
unless ~$/ eq $copyright {
|
||||
s/^ \/\/\s Copyright .+? \n\n /$copyright/;
|
||||
spurt $srcp, $_;
|
||||
say "[updated] doc string for:"~$srcp;
|
||||
}
|
||||
} else {
|
||||
say "[added] doc string for "~$srcp~" (no match found)";
|
||||
$_ = $copyright ~ $_;
|
||||
spurt $srcp, $_;
|
||||
}
|
||||
}
|
||||
}
|
117
Godeps/_workspace/src/github.com/gizak/termui/debug/debuger.go
generated
vendored
Normal file
@ -0,0 +1,117 @@
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
package debug
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"golang.org/x/net/websocket"
|
||||
)
|
||||
|
||||
type Server struct {
|
||||
Port string
|
||||
Addr string
|
||||
Path string
|
||||
Msg chan string
|
||||
chs []chan string
|
||||
}
|
||||
|
||||
type Client struct {
|
||||
Port string
|
||||
Addr string
|
||||
Path string
|
||||
ws *websocket.Conn
|
||||
}
|
||||
|
||||
var defaultPort = ":8080"
|
||||
|
||||
func NewServer() *Server {
|
||||
return &Server{
|
||||
Port: defaultPort,
|
||||
Addr: "localhost",
|
||||
Path: "/echo",
|
||||
Msg: make(chan string),
|
||||
chs: make([]chan string, 0),
|
||||
}
|
||||
}
|
||||
|
||||
func NewClient() Client {
|
||||
return Client{
|
||||
Port: defaultPort,
|
||||
Addr: "localhost",
|
||||
Path: "/echo",
|
||||
}
|
||||
}
|
||||
|
||||
func (c Client) ConnectAndListen() error {
|
||||
ws, err := websocket.Dial("ws://"+c.Addr+c.Port+c.Path, "", "http://"+c.Addr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer ws.Close()
|
||||
|
||||
var m string
|
||||
for {
|
||||
err := websocket.Message.Receive(ws, &m)
|
||||
if err != nil {
|
||||
fmt.Print(err)
|
||||
return err
|
||||
}
|
||||
fmt.Print(m)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) ListenAndServe() error {
|
||||
http.Handle(s.Path, websocket.Handler(func(ws *websocket.Conn) {
|
||||
defer ws.Close()
|
||||
|
||||
mc := make(chan string)
|
||||
s.chs = append(s.chs, mc)
|
||||
|
||||
for m := range mc {
|
||||
websocket.Message.Send(ws, m)
|
||||
}
|
||||
}))
|
||||
|
||||
go func() {
|
||||
for msg := range s.Msg {
|
||||
for _, c := range s.chs {
|
||||
go func(a chan string) {
|
||||
a <- msg
|
||||
}(c)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return http.ListenAndServe(s.Port, nil)
|
||||
}
|
||||
|
||||
func (s *Server) Log(msg string) {
|
||||
go func() { s.Msg <- msg }()
|
||||
}
|
||||
|
||||
func (s *Server) Logf(format string, a ...interface{}) {
|
||||
s.Log(fmt.Sprintf(format, a...))
|
||||
}
|
||||
|
||||
var DefaultServer = NewServer()
|
||||
var DefaultClient = NewClient()
|
||||
|
||||
func ListenAndServe() error {
|
||||
return DefaultServer.ListenAndServe()
|
||||
}
|
||||
|
||||
func ConnectAndListen() error {
|
||||
return DefaultClient.ConnectAndListen()
|
||||
}
|
||||
|
||||
func Log(msg string) {
|
||||
DefaultServer.Log(msg)
|
||||
}
|
||||
|
||||
func Logf(format string, a ...interface{}) {
|
||||
DefaultServer.Logf(format, a...)
|
||||
}
|
2
Godeps/_workspace/src/github.com/gizak/termui/doc.go
generated
vendored
@ -1,4 +1,4 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
|
493
Godeps/_workspace/src/github.com/gizak/termui/events.go
generated
vendored
@ -1,219 +1,316 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
//
|
||||
// Portions of this file uses [termbox-go](https://github.com/nsf/termbox-go/blob/54b74d087b7c397c402d0e3b66d2ccb6eaf5c2b4/api_common.go)
|
||||
// by [authors](https://github.com/nsf/termbox-go/blob/master/AUTHORS)
|
||||
// under [license](https://github.com/nsf/termbox-go/blob/master/LICENSE)
|
||||
|
||||
package termui
|
||||
|
||||
import "github.com/nsf/termbox-go"
|
||||
import (
|
||||
"path"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
/***********************************termbox-go**************************************/
|
||||
|
||||
type (
|
||||
EventType uint8
|
||||
Modifier uint8
|
||||
Key uint16
|
||||
"github.com/nsf/termbox-go"
|
||||
)
|
||||
|
||||
// This type represents a termbox event. The 'Mod', 'Key' and 'Ch' fields are
|
||||
// valid if 'Type' is EventKey. The 'Width' and 'Height' fields are valid if
|
||||
// 'Type' is EventResize. The 'Err' field is valid if 'Type' is EventError.
|
||||
type Event struct {
|
||||
Type EventType // one of Event* constants
|
||||
Mod Modifier // one of Mod* constants or 0
|
||||
Key Key // one of Key* constants, invalid if 'Ch' is not 0
|
||||
Ch rune // a unicode character
|
||||
Width int // width of the screen
|
||||
Height int // height of the screen
|
||||
Err error // error in case if input failed
|
||||
MouseX int // x coord of mouse
|
||||
MouseY int // y coord of mouse
|
||||
N int // number of bytes written when getting a raw event
|
||||
Type string
|
||||
Path string
|
||||
From string
|
||||
To string
|
||||
Data interface{}
|
||||
Time int64
|
||||
}
|
||||
|
||||
const (
|
||||
KeyF1 Key = 0xFFFF - iota
|
||||
KeyF2
|
||||
KeyF3
|
||||
KeyF4
|
||||
KeyF5
|
||||
KeyF6
|
||||
KeyF7
|
||||
KeyF8
|
||||
KeyF9
|
||||
KeyF10
|
||||
KeyF11
|
||||
KeyF12
|
||||
KeyInsert
|
||||
KeyDelete
|
||||
KeyHome
|
||||
KeyEnd
|
||||
KeyPgup
|
||||
KeyPgdn
|
||||
KeyArrowUp
|
||||
KeyArrowDown
|
||||
KeyArrowLeft
|
||||
KeyArrowRight
|
||||
key_min // see terminfo
|
||||
MouseLeft
|
||||
MouseMiddle
|
||||
MouseRight
|
||||
)
|
||||
var sysEvtChs []chan Event
|
||||
|
||||
const (
|
||||
KeyCtrlTilde Key = 0x00
|
||||
KeyCtrl2 Key = 0x00
|
||||
KeyCtrlSpace Key = 0x00
|
||||
KeyCtrlA Key = 0x01
|
||||
KeyCtrlB Key = 0x02
|
||||
KeyCtrlC Key = 0x03
|
||||
KeyCtrlD Key = 0x04
|
||||
KeyCtrlE Key = 0x05
|
||||
KeyCtrlF Key = 0x06
|
||||
KeyCtrlG Key = 0x07
|
||||
KeyBackspace Key = 0x08
|
||||
KeyCtrlH Key = 0x08
|
||||
KeyTab Key = 0x09
|
||||
KeyCtrlI Key = 0x09
|
||||
KeyCtrlJ Key = 0x0A
|
||||
KeyCtrlK Key = 0x0B
|
||||
KeyCtrlL Key = 0x0C
|
||||
KeyEnter Key = 0x0D
|
||||
KeyCtrlM Key = 0x0D
|
||||
KeyCtrlN Key = 0x0E
|
||||
KeyCtrlO Key = 0x0F
|
||||
KeyCtrlP Key = 0x10
|
||||
KeyCtrlQ Key = 0x11
|
||||
KeyCtrlR Key = 0x12
|
||||
KeyCtrlS Key = 0x13
|
||||
KeyCtrlT Key = 0x14
|
||||
KeyCtrlU Key = 0x15
|
||||
KeyCtrlV Key = 0x16
|
||||
KeyCtrlW Key = 0x17
|
||||
KeyCtrlX Key = 0x18
|
||||
KeyCtrlY Key = 0x19
|
||||
KeyCtrlZ Key = 0x1A
|
||||
KeyEsc Key = 0x1B
|
||||
KeyCtrlLsqBracket Key = 0x1B
|
||||
KeyCtrl3 Key = 0x1B
|
||||
KeyCtrl4 Key = 0x1C
|
||||
KeyCtrlBackslash Key = 0x1C
|
||||
KeyCtrl5 Key = 0x1D
|
||||
KeyCtrlRsqBracket Key = 0x1D
|
||||
KeyCtrl6 Key = 0x1E
|
||||
KeyCtrl7 Key = 0x1F
|
||||
KeyCtrlSlash Key = 0x1F
|
||||
KeyCtrlUnderscore Key = 0x1F
|
||||
KeySpace Key = 0x20
|
||||
KeyBackspace2 Key = 0x7F
|
||||
KeyCtrl8 Key = 0x7F
|
||||
)
|
||||
|
||||
// Alt modifier constant, see Event.Mod field and SetInputMode function.
|
||||
const (
|
||||
ModAlt Modifier = 0x01
|
||||
)
|
||||
|
||||
// Event type. See Event.Type field.
|
||||
const (
|
||||
EventKey EventType = iota
|
||||
EventResize
|
||||
EventMouse
|
||||
EventError
|
||||
EventInterrupt
|
||||
EventRaw
|
||||
EventNone
|
||||
)
|
||||
|
||||
/**************************************end**************************************/
|
||||
|
||||
// convert termbox.Event to termui.Event
|
||||
func uiEvt(e termbox.Event) Event {
|
||||
event := Event{}
|
||||
event.Type = EventType(e.Type)
|
||||
event.Mod = Modifier(e.Mod)
|
||||
event.Key = Key(e.Key)
|
||||
event.Ch = e.Ch
|
||||
event.Width = e.Width
|
||||
event.Height = e.Height
|
||||
event.Err = e.Err
|
||||
event.MouseX = e.MouseX
|
||||
event.MouseY = e.MouseY
|
||||
event.N = e.N
|
||||
|
||||
return event
|
||||
type EvtKbd struct {
|
||||
KeyStr string
|
||||
}
|
||||
|
||||
var evtChs = make([]chan Event, 0)
|
||||
func evtKbd(e termbox.Event) EvtKbd {
|
||||
ek := EvtKbd{}
|
||||
|
||||
// EventCh returns an output-only event channel.
|
||||
// This function can be called many times (multiplexer).
|
||||
func EventCh() <-chan Event {
|
||||
out := make(chan Event)
|
||||
evtChs = append(evtChs, out)
|
||||
return out
|
||||
}
|
||||
k := string(e.Ch)
|
||||
pre := ""
|
||||
mod := ""
|
||||
|
||||
// turn on event listener
|
||||
func evtListen() {
|
||||
go func() {
|
||||
for {
|
||||
e := termbox.PollEvent()
|
||||
// dispatch
|
||||
for _, c := range evtChs {
|
||||
go func(ch chan Event) {
|
||||
ch <- uiEvt(e)
|
||||
}(c)
|
||||
if e.Mod == termbox.ModAlt {
|
||||
mod = "M-"
|
||||
}
|
||||
if e.Ch == 0 {
|
||||
if e.Key > 0xFFFF-12 {
|
||||
k = "<f" + strconv.Itoa(0xFFFF-int(e.Key)+1) + ">"
|
||||
} else if e.Key > 0xFFFF-25 {
|
||||
ks := []string{"<insert>", "<delete>", "<home>", "<end>", "<previous>", "<next>", "<up>", "<down>", "<left>", "<right>"}
|
||||
k = ks[0xFFFF-int(e.Key)-12]
|
||||
}
|
||||
|
||||
if e.Key <= 0x7F {
|
||||
pre = "C-"
|
||||
k = string('a' - 1 + int(e.Key))
|
||||
kmap := map[termbox.Key][2]string{
|
||||
termbox.KeyCtrlSpace: {"C-", "<space>"},
|
||||
termbox.KeyBackspace: {"", "<backspace>"},
|
||||
termbox.KeyTab: {"", "<tab>"},
|
||||
termbox.KeyEnter: {"", "<enter>"},
|
||||
termbox.KeyEsc: {"", "<escape>"},
|
||||
termbox.KeyCtrlBackslash: {"C-", "\\"},
|
||||
termbox.KeyCtrlSlash: {"C-", "/"},
|
||||
termbox.KeySpace: {"", "<space>"},
|
||||
termbox.KeyCtrl8: {"C-", "8"},
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
/*
|
||||
// EventHandlers is a handler sequence
|
||||
var EventHandlers []func(Event)
|
||||
|
||||
var signalQuit = make(chan bool)
|
||||
|
||||
// Quit sends quit signal to terminate termui
|
||||
func Quit() {
|
||||
signalQuit <- true
|
||||
}
|
||||
|
||||
// Wait listening to signalQuit, block operation.
|
||||
func Wait() {
|
||||
<-signalQuit
|
||||
}
|
||||
|
||||
// RegEvtHandler register function into TSEventHandler sequence.
|
||||
func RegEvtHandler(fn func(Event)) {
|
||||
EventHandlers = append(EventHandlers, fn)
|
||||
}
|
||||
|
||||
// EventLoop handles all events and
|
||||
// redirects every event to callbacks in EventHandlers
|
||||
func EventLoop() {
|
||||
evt := make(chan termbox.Event)
|
||||
|
||||
go func() {
|
||||
for {
|
||||
evt <- termbox.PollEvent()
|
||||
}
|
||||
}()
|
||||
|
||||
for {
|
||||
select {
|
||||
case c := <-signalQuit:
|
||||
defer func() { signalQuit <- c }()
|
||||
return
|
||||
case e := <-evt:
|
||||
for _, fn := range EventHandlers {
|
||||
fn(uiEvt(e))
|
||||
if sk, ok := kmap[e.Key]; ok {
|
||||
pre = sk[0]
|
||||
k = sk[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ek.KeyStr = pre + mod + k
|
||||
return ek
|
||||
}
|
||||
|
||||
func crtTermboxEvt(e termbox.Event) Event {
|
||||
systypemap := map[termbox.EventType]string{
|
||||
termbox.EventKey: "keyboard",
|
||||
termbox.EventResize: "window",
|
||||
termbox.EventMouse: "mouse",
|
||||
termbox.EventError: "error",
|
||||
termbox.EventInterrupt: "interrupt",
|
||||
}
|
||||
ne := Event{From: "/sys", Time: time.Now().Unix()}
|
||||
typ := e.Type
|
||||
ne.Type = systypemap[typ]
|
||||
|
||||
switch typ {
|
||||
case termbox.EventKey:
|
||||
kbd := evtKbd(e)
|
||||
ne.Path = "/sys/kbd/" + kbd.KeyStr
|
||||
ne.Data = kbd
|
||||
case termbox.EventResize:
|
||||
wnd := EvtWnd{}
|
||||
wnd.Width = e.Width
|
||||
wnd.Height = e.Height
|
||||
ne.Path = "/sys/wnd/resize"
|
||||
ne.Data = wnd
|
||||
case termbox.EventError:
|
||||
err := EvtErr(e.Err)
|
||||
ne.Path = "/sys/err"
|
||||
ne.Data = err
|
||||
case termbox.EventMouse:
|
||||
m := EvtMouse{}
|
||||
m.X = e.MouseX
|
||||
m.Y = e.MouseY
|
||||
ne.Path = "/sys/mouse"
|
||||
ne.Data = m
|
||||
}
|
||||
return ne
|
||||
}
|
||||
|
||||
type EvtWnd struct {
|
||||
Width int
|
||||
Height int
|
||||
}
|
||||
|
||||
type EvtMouse struct {
|
||||
X int
|
||||
Y int
|
||||
Press string
|
||||
}
|
||||
|
||||
type EvtErr error
|
||||
|
||||
func hookTermboxEvt() {
|
||||
for {
|
||||
e := termbox.PollEvent()
|
||||
|
||||
for _, c := range sysEvtChs {
|
||||
go func(ch chan Event) {
|
||||
ch <- crtTermboxEvt(e)
|
||||
}(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func NewSysEvtCh() chan Event {
|
||||
ec := make(chan Event)
|
||||
sysEvtChs = append(sysEvtChs, ec)
|
||||
return ec
|
||||
}
|
||||
|
||||
var DefaultEvtStream = NewEvtStream()
|
||||
|
||||
type EvtStream struct {
|
||||
sync.RWMutex
|
||||
srcMap map[string]chan Event
|
||||
stream chan Event
|
||||
wg sync.WaitGroup
|
||||
sigStopLoop chan Event
|
||||
Handlers map[string]func(Event)
|
||||
hook func(Event)
|
||||
}
|
||||
|
||||
func NewEvtStream() *EvtStream {
|
||||
return &EvtStream{
|
||||
srcMap: make(map[string]chan Event),
|
||||
stream: make(chan Event),
|
||||
Handlers: make(map[string]func(Event)),
|
||||
sigStopLoop: make(chan Event),
|
||||
}
|
||||
}
|
||||
|
||||
func (es *EvtStream) Init() {
|
||||
es.Merge("internal", es.sigStopLoop)
|
||||
go func() {
|
||||
es.wg.Wait()
|
||||
close(es.stream)
|
||||
}()
|
||||
}
|
||||
|
||||
func cleanPath(p string) string {
|
||||
if p == "" {
|
||||
return "/"
|
||||
}
|
||||
if p[0] != '/' {
|
||||
p = "/" + p
|
||||
}
|
||||
return path.Clean(p)
|
||||
}
|
||||
|
||||
func isPathMatch(pattern, path string) bool {
|
||||
if len(pattern) == 0 {
|
||||
return false
|
||||
}
|
||||
n := len(pattern)
|
||||
return len(path) >= n && path[0:n] == pattern
|
||||
}
|
||||
|
||||
func (es *EvtStream) Merge(name string, ec chan Event) {
|
||||
es.Lock()
|
||||
defer es.Unlock()
|
||||
|
||||
es.wg.Add(1)
|
||||
es.srcMap[name] = ec
|
||||
|
||||
go func(a chan Event) {
|
||||
for n := range a {
|
||||
n.From = name
|
||||
es.stream <- n
|
||||
}
|
||||
es.wg.Done()
|
||||
}(ec)
|
||||
}
|
||||
|
||||
func (es *EvtStream) Handle(path string, handler func(Event)) {
|
||||
es.Handlers[cleanPath(path)] = handler
|
||||
}
|
||||
|
||||
func findMatch(mux map[string]func(Event), path string) string {
|
||||
n := -1
|
||||
pattern := ""
|
||||
for m := range mux {
|
||||
if !isPathMatch(m, path) {
|
||||
continue
|
||||
}
|
||||
if len(m) > n {
|
||||
pattern = m
|
||||
n = len(m)
|
||||
}
|
||||
}
|
||||
return pattern
|
||||
|
||||
}
|
||||
|
||||
func (es *EvtStream) match(path string) string {
|
||||
return findMatch(es.Handlers, path)
|
||||
}
|
||||
|
||||
func (es *EvtStream) Hook(f func(Event)) {
|
||||
es.hook = f
|
||||
}
|
||||
|
||||
func (es *EvtStream) Loop() {
|
||||
for e := range es.stream {
|
||||
switch e.Path {
|
||||
case "/sig/stoploop":
|
||||
return
|
||||
}
|
||||
go func(a Event) {
|
||||
es.RLock()
|
||||
defer es.RUnlock()
|
||||
if pattern := es.match(a.Path); pattern != "" {
|
||||
es.Handlers[pattern](a)
|
||||
}
|
||||
}(e)
|
||||
if es.hook != nil {
|
||||
es.hook(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (es *EvtStream) StopLoop() {
|
||||
go func() {
|
||||
e := Event{
|
||||
Path: "/sig/stoploop",
|
||||
}
|
||||
es.sigStopLoop <- e
|
||||
}()
|
||||
}
|
||||
|
||||
func Merge(name string, ec chan Event) {
|
||||
DefaultEvtStream.Merge(name, ec)
|
||||
}
|
||||
|
||||
func Handle(path string, handler func(Event)) {
|
||||
DefaultEvtStream.Handle(path, handler)
|
||||
}
|
||||
|
||||
func Loop() {
|
||||
DefaultEvtStream.Loop()
|
||||
}
|
||||
|
||||
func StopLoop() {
|
||||
DefaultEvtStream.StopLoop()
|
||||
}
|
||||
|
||||
type EvtTimer struct {
|
||||
Duration time.Duration
|
||||
Count uint64
|
||||
}
|
||||
|
||||
func NewTimerCh(du time.Duration) chan Event {
|
||||
t := make(chan Event)
|
||||
|
||||
go func(a chan Event) {
|
||||
n := uint64(0)
|
||||
for {
|
||||
n++
|
||||
time.Sleep(du)
|
||||
e := Event{}
|
||||
e.Type = "timer"
|
||||
e.Path = "/timer/" + du.String()
|
||||
e.Time = time.Now().Unix()
|
||||
e.Data = EvtTimer{
|
||||
Duration: du,
|
||||
Count: n,
|
||||
}
|
||||
t <- e
|
||||
|
||||
}
|
||||
}(t)
|
||||
return t
|
||||
}
|
||||
|
||||
var DefualtHandler = func(e Event) {
|
||||
}
|
||||
|
||||
var usrEvtCh = make(chan Event)
|
||||
|
||||
func SendCustomEvt(path string, data interface{}) {
|
||||
e := Event{}
|
||||
e.Path = path
|
||||
e.Data = data
|
||||
e.Time = time.Now().Unix()
|
||||
usrEvtCh <- e
|
||||
}
|
||||
*/
|
||||
|
28
Godeps/_workspace/src/github.com/gizak/termui/events_test.go
generated
vendored
@ -1,28 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
//
|
||||
// Portions of this file uses [termbox-go](https://github.com/nsf/termbox-go/blob/54b74d087b7c397c402d0e3b66d2ccb6eaf5c2b4/api_common.go)
|
||||
// by [authors](https://github.com/nsf/termbox-go/blob/master/AUTHORS)
|
||||
// under [license](https://github.com/nsf/termbox-go/blob/master/LICENSE)
|
||||
|
||||
package termui
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
termbox "github.com/nsf/termbox-go"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type boxEvent termbox.Event
|
||||
|
||||
func TestUiEvt(t *testing.T) {
|
||||
err := errors.New("This is a mock error")
|
||||
event := boxEvent{3, 5, 2, 'H', 200, 500, err, 50, 30, 2}
|
||||
expetced := Event{3, 5, 2, 'H', 200, 500, err, 50, 30, 2}
|
||||
|
||||
// We need to do that ugly casting so that vet does not complain
|
||||
assert.Equal(t, uiEvt(termbox.Event(event)), expetced)
|
||||
}
|
35
Godeps/_workspace/src/github.com/gizak/termui/example/barchart.go
generated
vendored
@ -1,35 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import "github.com/gizak/termui"
|
||||
|
||||
func main() {
|
||||
err := termui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer termui.Close()
|
||||
|
||||
termui.UseTheme("helloworld")
|
||||
|
||||
bc := termui.NewBarChart()
|
||||
data := []int{3, 2, 5, 3, 9, 5, 3, 2, 5, 8, 3, 2, 4, 5, 3, 2, 5, 7, 5, 3, 2, 6, 7, 4, 6, 3, 6, 7, 8, 3, 6, 4, 5, 3, 2, 4, 6, 4, 8, 5, 9, 4, 3, 6, 5, 3, 6}
|
||||
bclabels := []string{"S0", "S1", "S2", "S3", "S4", "S5"}
|
||||
bc.Border.Label = "Bar Chart"
|
||||
bc.Data = data
|
||||
bc.Width = 26
|
||||
bc.Height = 10
|
||||
bc.DataLabels = bclabels
|
||||
bc.TextColor = termui.ColorGreen
|
||||
bc.BarColor = termui.ColorRed
|
||||
bc.NumColor = termui.ColorYellow
|
||||
|
||||
termui.Render(bc)
|
||||
|
||||
<-termui.EventCh()
|
||||
}
|
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/barchart.png
generated
vendored
Before Width: | Height: | Size: 15 KiB |
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/dashboard.gif
generated
vendored
Before Width: | Height: | Size: 443 KiB |
148
Godeps/_workspace/src/github.com/gizak/termui/example/dashboard.go
generated
vendored
@ -1,148 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import ui "github.com/gizak/termui"
|
||||
import "math"
|
||||
|
||||
import "time"
|
||||
|
||||
func main() {
|
||||
err := ui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer ui.Close()
|
||||
|
||||
p := ui.NewPar(":PRESS q TO QUIT DEMO")
|
||||
p.Height = 3
|
||||
p.Width = 50
|
||||
p.TextFgColor = ui.ColorWhite
|
||||
p.Border.Label = "Text Box"
|
||||
p.Border.FgColor = ui.ColorCyan
|
||||
|
||||
strs := []string{"[0] gizak/termui", "[1] editbox.go", "[2] iterrupt.go", "[3] keyboard.go", "[4] output.go", "[5] random_out.go", "[6] dashboard.go", "[7] nsf/termbox-go"}
|
||||
list := ui.NewList()
|
||||
list.Items = strs
|
||||
list.ItemFgColor = ui.ColorYellow
|
||||
list.Border.Label = "List"
|
||||
list.Height = 7
|
||||
list.Width = 25
|
||||
list.Y = 4
|
||||
|
||||
g := ui.NewGauge()
|
||||
g.Percent = 50
|
||||
g.Width = 50
|
||||
g.Height = 3
|
||||
g.Y = 11
|
||||
g.Border.Label = "Gauge"
|
||||
g.BarColor = ui.ColorRed
|
||||
g.Border.FgColor = ui.ColorWhite
|
||||
g.Border.LabelFgColor = ui.ColorCyan
|
||||
|
||||
spark := ui.Sparkline{}
|
||||
spark.Height = 1
|
||||
spark.Title = "srv 0:"
|
||||
spdata := []int{4, 2, 1, 6, 3, 9, 1, 4, 2, 15, 14, 9, 8, 6, 10, 13, 15, 12, 10, 5, 3, 6, 1, 7, 10, 10, 14, 13, 6, 4, 2, 1, 6, 3, 9, 1, 4, 2, 15, 14, 9, 8, 6, 10, 13, 15, 12, 10, 5, 3, 6, 1, 7, 10, 10, 14, 13, 6, 4, 2, 1, 6, 3, 9, 1, 4, 2, 15, 14, 9, 8, 6, 10, 13, 15, 12, 10, 5, 3, 6, 1, 7, 10, 10, 14, 13, 6, 4, 2, 1, 6, 3, 9, 1, 4, 2, 15, 14, 9, 8, 6, 10, 13, 15, 12, 10, 5, 3, 6, 1, 7, 10, 10, 14, 13, 6}
|
||||
spark.Data = spdata
|
||||
spark.LineColor = ui.ColorCyan
|
||||
spark.TitleColor = ui.ColorWhite
|
||||
|
||||
spark1 := ui.Sparkline{}
|
||||
spark1.Height = 1
|
||||
spark1.Title = "srv 1:"
|
||||
spark1.Data = spdata
|
||||
spark1.TitleColor = ui.ColorWhite
|
||||
spark1.LineColor = ui.ColorRed
|
||||
|
||||
sp := ui.NewSparklines(spark, spark1)
|
||||
sp.Width = 25
|
||||
sp.Height = 7
|
||||
sp.Border.Label = "Sparkline"
|
||||
sp.Y = 4
|
||||
sp.X = 25
|
||||
|
||||
sinps := (func() []float64 {
|
||||
n := 220
|
||||
ps := make([]float64, n)
|
||||
for i := range ps {
|
||||
ps[i] = 1 + math.Sin(float64(i)/5)
|
||||
}
|
||||
return ps
|
||||
})()
|
||||
|
||||
lc := ui.NewLineChart()
|
||||
lc.Border.Label = "dot-mode Line Chart"
|
||||
lc.Data = sinps
|
||||
lc.Width = 50
|
||||
lc.Height = 11
|
||||
lc.X = 0
|
||||
lc.Y = 14
|
||||
lc.AxesColor = ui.ColorWhite
|
||||
lc.LineColor = ui.ColorRed | ui.AttrBold
|
||||
lc.Mode = "dot"
|
||||
|
||||
bc := ui.NewBarChart()
|
||||
bcdata := []int{3, 2, 5, 3, 9, 5, 3, 2, 5, 8, 3, 2, 4, 5, 3, 2, 5, 7, 5, 3, 2, 6, 7, 4, 6, 3, 6, 7, 8, 3, 6, 4, 5, 3, 2, 4, 6, 4, 8, 5, 9, 4, 3, 6, 5, 3, 6}
|
||||
bclabels := []string{"S0", "S1", "S2", "S3", "S4", "S5"}
|
||||
bc.Border.Label = "Bar Chart"
|
||||
bc.Width = 26
|
||||
bc.Height = 10
|
||||
bc.X = 51
|
||||
bc.Y = 0
|
||||
bc.DataLabels = bclabels
|
||||
bc.BarColor = ui.ColorGreen
|
||||
bc.NumColor = ui.ColorBlack
|
||||
|
||||
lc1 := ui.NewLineChart()
|
||||
lc1.Border.Label = "braille-mode Line Chart"
|
||||
lc1.Data = sinps
|
||||
lc1.Width = 26
|
||||
lc1.Height = 11
|
||||
lc1.X = 51
|
||||
lc1.Y = 14
|
||||
lc1.AxesColor = ui.ColorWhite
|
||||
lc1.LineColor = ui.ColorYellow | ui.AttrBold
|
||||
|
||||
p1 := ui.NewPar("Hey!\nI am a borderless block!")
|
||||
p1.HasBorder = false
|
||||
p1.Width = 26
|
||||
p1.Height = 2
|
||||
p1.TextFgColor = ui.ColorMagenta
|
||||
p1.X = 52
|
||||
p1.Y = 11
|
||||
|
||||
draw := func(t int) {
|
||||
g.Percent = t % 101
|
||||
list.Items = strs[t%9:]
|
||||
sp.Lines[0].Data = spdata[:30+t%50]
|
||||
sp.Lines[1].Data = spdata[:35+t%50]
|
||||
lc.Data = sinps[t/2:]
|
||||
lc1.Data = sinps[2*t:]
|
||||
bc.Data = bcdata[t/2%10:]
|
||||
ui.Render(p, list, g, sp, lc, bc, lc1, p1)
|
||||
}
|
||||
|
||||
evt := ui.EventCh()
|
||||
|
||||
i := 0
|
||||
for {
|
||||
select {
|
||||
case e := <-evt:
|
||||
if e.Type == ui.EventKey && e.Ch == 'q' {
|
||||
return
|
||||
}
|
||||
default:
|
||||
draw(i)
|
||||
i++
|
||||
if i == 102 {
|
||||
return
|
||||
}
|
||||
time.Sleep(time.Second / 2)
|
||||
}
|
||||
}
|
||||
}
|
62
Godeps/_workspace/src/github.com/gizak/termui/example/gauge.go
generated
vendored
@ -1,62 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import "github.com/gizak/termui"
|
||||
|
||||
func main() {
|
||||
err := termui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer termui.Close()
|
||||
|
||||
termui.UseTheme("helloworld")
|
||||
|
||||
g0 := termui.NewGauge()
|
||||
g0.Percent = 40
|
||||
g0.Width = 50
|
||||
g0.Height = 3
|
||||
g0.Border.Label = "Slim Gauge"
|
||||
g0.BarColor = termui.ColorRed
|
||||
g0.Border.FgColor = termui.ColorWhite
|
||||
g0.Border.LabelFgColor = termui.ColorCyan
|
||||
|
||||
g2 := termui.NewGauge()
|
||||
g2.Percent = 60
|
||||
g2.Width = 50
|
||||
g2.Height = 3
|
||||
g2.PercentColor = termui.ColorBlue
|
||||
g2.Y = 3
|
||||
g2.Border.Label = "Slim Gauge"
|
||||
g2.BarColor = termui.ColorYellow
|
||||
g2.Border.FgColor = termui.ColorWhite
|
||||
|
||||
g1 := termui.NewGauge()
|
||||
g1.Percent = 30
|
||||
g1.Width = 50
|
||||
g1.Height = 5
|
||||
g1.Y = 6
|
||||
g1.Border.Label = "Big Gauge"
|
||||
g1.PercentColor = termui.ColorYellow
|
||||
g1.BarColor = termui.ColorGreen
|
||||
g1.Border.FgColor = termui.ColorWhite
|
||||
g1.Border.LabelFgColor = termui.ColorMagenta
|
||||
|
||||
g3 := termui.NewGauge()
|
||||
g3.Percent = 50
|
||||
g3.Width = 50
|
||||
g3.Height = 3
|
||||
g3.Y = 11
|
||||
g3.Border.Label = "Gauge with custom label"
|
||||
g3.Label = "{{percent}}% (100MBs free)"
|
||||
g3.LabelAlign = termui.AlignRight
|
||||
|
||||
termui.Render(g0, g1, g2, g3)
|
||||
|
||||
<-termui.EventCh()
|
||||
}
|
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/gauge.png
generated
vendored
Before Width: | Height: | Size: 32 KiB |
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/grid.gif
generated
vendored
Before Width: | Height: | Size: 782 KiB |
134
Godeps/_workspace/src/github.com/gizak/termui/example/grid.go
generated
vendored
@ -1,134 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import ui "github.com/gizak/termui"
|
||||
import "math"
|
||||
import "time"
|
||||
|
||||
func main() {
|
||||
err := ui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer ui.Close()
|
||||
|
||||
sinps := (func() []float64 {
|
||||
n := 400
|
||||
ps := make([]float64, n)
|
||||
for i := range ps {
|
||||
ps[i] = 1 + math.Sin(float64(i)/5)
|
||||
}
|
||||
return ps
|
||||
})()
|
||||
sinpsint := (func() []int {
|
||||
ps := make([]int, len(sinps))
|
||||
for i, v := range sinps {
|
||||
ps[i] = int(100*v + 10)
|
||||
}
|
||||
return ps
|
||||
})()
|
||||
|
||||
ui.UseTheme("helloworld")
|
||||
|
||||
spark := ui.Sparkline{}
|
||||
spark.Height = 8
|
||||
spdata := sinpsint
|
||||
spark.Data = spdata[:100]
|
||||
spark.LineColor = ui.ColorCyan
|
||||
spark.TitleColor = ui.ColorWhite
|
||||
|
||||
sp := ui.NewSparklines(spark)
|
||||
sp.Height = 11
|
||||
sp.Border.Label = "Sparkline"
|
||||
|
||||
lc := ui.NewLineChart()
|
||||
lc.Border.Label = "braille-mode Line Chart"
|
||||
lc.Data = sinps
|
||||
lc.Height = 11
|
||||
lc.AxesColor = ui.ColorWhite
|
||||
lc.LineColor = ui.ColorYellow | ui.AttrBold
|
||||
|
||||
gs := make([]*ui.Gauge, 3)
|
||||
for i := range gs {
|
||||
gs[i] = ui.NewGauge()
|
||||
gs[i].Height = 2
|
||||
gs[i].HasBorder = false
|
||||
gs[i].Percent = i * 10
|
||||
gs[i].PaddingBottom = 1
|
||||
gs[i].BarColor = ui.ColorRed
|
||||
}
|
||||
|
||||
ls := ui.NewList()
|
||||
ls.HasBorder = false
|
||||
ls.Items = []string{
|
||||
"[1] Downloading File 1",
|
||||
"", // == \newline
|
||||
"[2] Downloading File 2",
|
||||
"",
|
||||
"[3] Uploading File 3",
|
||||
}
|
||||
ls.Height = 5
|
||||
|
||||
par := ui.NewPar("<> This row has 3 columns\n<- Widgets can be stacked up like left side\n<- Stacked widgets are treated as a single widget")
|
||||
par.Height = 5
|
||||
par.Border.Label = "Demonstration"
|
||||
|
||||
// build layout
|
||||
ui.Body.AddRows(
|
||||
ui.NewRow(
|
||||
ui.NewCol(6, 0, sp),
|
||||
ui.NewCol(6, 0, lc)),
|
||||
ui.NewRow(
|
||||
ui.NewCol(3, 0, ls),
|
||||
ui.NewCol(3, 0, gs[0], gs[1], gs[2]),
|
||||
ui.NewCol(6, 0, par)))
|
||||
|
||||
// calculate layout
|
||||
ui.Body.Align()
|
||||
|
||||
done := make(chan bool)
|
||||
redraw := make(chan bool)
|
||||
|
||||
update := func() {
|
||||
for i := 0; i < 103; i++ {
|
||||
for _, g := range gs {
|
||||
g.Percent = (g.Percent + 3) % 100
|
||||
}
|
||||
|
||||
sp.Lines[0].Data = spdata[:100+i]
|
||||
lc.Data = sinps[2*i:]
|
||||
|
||||
time.Sleep(time.Second / 2)
|
||||
redraw <- true
|
||||
}
|
||||
done <- true
|
||||
}
|
||||
|
||||
evt := ui.EventCh()
|
||||
|
||||
ui.Render(ui.Body)
|
||||
go update()
|
||||
|
||||
for {
|
||||
select {
|
||||
case e := <-evt:
|
||||
if e.Type == ui.EventKey && e.Ch == 'q' {
|
||||
return
|
||||
}
|
||||
if e.Type == ui.EventResize {
|
||||
ui.Body.Width = ui.TermWidth()
|
||||
ui.Body.Align()
|
||||
go func() { redraw <- true }()
|
||||
}
|
||||
case <-done:
|
||||
return
|
||||
case <-redraw:
|
||||
ui.Render(ui.Body)
|
||||
}
|
||||
}
|
||||
}
|
68
Godeps/_workspace/src/github.com/gizak/termui/example/linechart.go
generated
vendored
@ -1,68 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"math"
|
||||
|
||||
"github.com/gizak/termui"
|
||||
)
|
||||
|
||||
func main() {
|
||||
err := termui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer termui.Close()
|
||||
|
||||
termui.UseTheme("helloworld")
|
||||
|
||||
sinps := (func() []float64 {
|
||||
n := 220
|
||||
ps := make([]float64, n)
|
||||
for i := range ps {
|
||||
ps[i] = 1 + math.Sin(float64(i)/5)
|
||||
}
|
||||
return ps
|
||||
})()
|
||||
|
||||
lc0 := termui.NewLineChart()
|
||||
lc0.Border.Label = "braille-mode Line Chart"
|
||||
lc0.Data = sinps
|
||||
lc0.Width = 50
|
||||
lc0.Height = 12
|
||||
lc0.X = 0
|
||||
lc0.Y = 0
|
||||
lc0.AxesColor = termui.ColorWhite
|
||||
lc0.LineColor = termui.ColorGreen | termui.AttrBold
|
||||
|
||||
lc1 := termui.NewLineChart()
|
||||
lc1.Border.Label = "dot-mode Line Chart"
|
||||
lc1.Mode = "dot"
|
||||
lc1.Data = sinps
|
||||
lc1.Width = 26
|
||||
lc1.Height = 12
|
||||
lc1.X = 51
|
||||
lc1.DotStyle = '+'
|
||||
lc1.AxesColor = termui.ColorWhite
|
||||
lc1.LineColor = termui.ColorYellow | termui.AttrBold
|
||||
|
||||
lc2 := termui.NewLineChart()
|
||||
lc2.Border.Label = "dot-mode Line Chart"
|
||||
lc2.Mode = "dot"
|
||||
lc2.Data = sinps[4:]
|
||||
lc2.Width = 77
|
||||
lc2.Height = 16
|
||||
lc2.X = 0
|
||||
lc2.Y = 12
|
||||
lc2.AxesColor = termui.ColorWhite
|
||||
lc2.LineColor = termui.ColorCyan | termui.AttrBold
|
||||
|
||||
termui.Render(lc0, lc1, lc2)
|
||||
|
||||
<-termui.EventCh()
|
||||
}
|
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/linechart.png
generated
vendored
Before Width: | Height: | Size: 136 KiB |
41
Godeps/_workspace/src/github.com/gizak/termui/example/list.go
generated
vendored
@ -1,41 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import "github.com/gizak/termui"
|
||||
|
||||
func main() {
|
||||
err := termui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer termui.Close()
|
||||
|
||||
termui.UseTheme("helloworld")
|
||||
|
||||
strs := []string{
|
||||
"[0] github.com/gizak/termui",
|
||||
"[1] 你好,世界",
|
||||
"[2] こんにちは世界",
|
||||
"[3] keyboard.go",
|
||||
"[4] output.go",
|
||||
"[5] random_out.go",
|
||||
"[6] dashboard.go",
|
||||
"[7] nsf/termbox-go"}
|
||||
|
||||
ls := termui.NewList()
|
||||
ls.Items = strs
|
||||
ls.ItemFgColor = termui.ColorYellow
|
||||
ls.Border.Label = "List"
|
||||
ls.Height = 7
|
||||
ls.Width = 25
|
||||
ls.Y = 0
|
||||
|
||||
termui.Render(ls)
|
||||
|
||||
<-termui.EventCh()
|
||||
}
|
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/list.png
generated
vendored
Before Width: | Height: | Size: 36 KiB |
50
Godeps/_workspace/src/github.com/gizak/termui/example/mbarchart.go
generated
vendored
@ -1,50 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import "github.com/gizak/termui"
|
||||
|
||||
func main() {
|
||||
err := termui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer termui.Close()
|
||||
|
||||
termui.UseTheme("helloworld")
|
||||
|
||||
bc := termui.NewMBarChart()
|
||||
math := []int{90, 85, 90, 80}
|
||||
english := []int{70, 85, 75, 60}
|
||||
science := []int{75, 60, 80, 85}
|
||||
compsci := []int{100, 100, 100, 100}
|
||||
bc.Data[0] = math
|
||||
bc.Data[1] = english
|
||||
bc.Data[2] = science
|
||||
bc.Data[3] = compsci
|
||||
studentsName := []string{"Ken", "Rob", "Dennis", "Linus"}
|
||||
bc.Border.Label = "Student's Marks X-Axis=Name Y-Axis=Marks[Math,English,Science,ComputerScience] in %"
|
||||
bc.Width = 100
|
||||
bc.Height = 50
|
||||
bc.Y = 10
|
||||
bc.BarWidth = 10
|
||||
bc.DataLabels = studentsName
|
||||
bc.ShowScale = true //Show y_axis scale value (min and max)
|
||||
bc.SetMax(400)
|
||||
|
||||
bc.TextColor = termui.ColorGreen //this is color for label (x-axis)
|
||||
bc.BarColor[3] = termui.ColorGreen //BarColor for computerscience
|
||||
bc.BarColor[1] = termui.ColorYellow //Bar Color for english
|
||||
bc.NumColor[3] = termui.ColorRed // Num color for computerscience
|
||||
bc.NumColor[1] = termui.ColorRed // num color for english
|
||||
|
||||
//Other colors are automatically populated, btw All the students seems do well in computerscience. :p
|
||||
|
||||
termui.Render(bc)
|
||||
|
||||
<-termui.EventCh()
|
||||
}
|
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/mbarchart.png
generated
vendored
Before Width: | Height: | Size: 20 KiB |
48
Godeps/_workspace/src/github.com/gizak/termui/example/par.go
generated
vendored
@ -1,48 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import "github.com/gizak/termui"
|
||||
|
||||
func main() {
|
||||
err := termui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer termui.Close()
|
||||
|
||||
termui.UseTheme("helloworld")
|
||||
|
||||
par0 := termui.NewPar("Borderless Text")
|
||||
par0.Height = 1
|
||||
par0.Width = 20
|
||||
par0.Y = 1
|
||||
par0.HasBorder = false
|
||||
|
||||
par1 := termui.NewPar("你好,世界。")
|
||||
par1.Height = 3
|
||||
par1.Width = 17
|
||||
par1.X = 20
|
||||
par1.Border.Label = "标签"
|
||||
|
||||
par2 := termui.NewPar("Simple text\nwith label. It can be multilined with \\n or break automatically")
|
||||
par2.Height = 5
|
||||
par2.Width = 37
|
||||
par2.Y = 4
|
||||
par2.Border.Label = "Multiline"
|
||||
par2.Border.FgColor = termui.ColorYellow
|
||||
|
||||
par3 := termui.NewPar("Long text with label and it is auto trimmed.")
|
||||
par3.Height = 3
|
||||
par3.Width = 37
|
||||
par3.Y = 9
|
||||
par3.Border.Label = "Auto Trim"
|
||||
|
||||
termui.Render(par0, par1, par2, par3)
|
||||
|
||||
<-termui.EventCh()
|
||||
}
|
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/par.png
generated
vendored
Before Width: | Height: | Size: 64 KiB |
65
Godeps/_workspace/src/github.com/gizak/termui/example/sparklines.go
generated
vendored
@ -1,65 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import "github.com/gizak/termui"
|
||||
|
||||
func main() {
|
||||
err := termui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer termui.Close()
|
||||
|
||||
termui.UseTheme("helloworld")
|
||||
|
||||
data := []int{4, 2, 1, 6, 3, 9, 1, 4, 2, 15, 14, 9, 8, 6, 10, 13, 15, 12, 10, 5, 3, 6, 1, 7, 10, 10, 14, 13, 6}
|
||||
spl0 := termui.NewSparkline()
|
||||
spl0.Data = data[3:]
|
||||
spl0.Title = "Sparkline 0"
|
||||
spl0.LineColor = termui.ColorGreen
|
||||
|
||||
// single
|
||||
spls0 := termui.NewSparklines(spl0)
|
||||
spls0.Height = 2
|
||||
spls0.Width = 20
|
||||
spls0.HasBorder = false
|
||||
|
||||
spl1 := termui.NewSparkline()
|
||||
spl1.Data = data
|
||||
spl1.Title = "Sparkline 1"
|
||||
spl1.LineColor = termui.ColorRed
|
||||
|
||||
spl2 := termui.NewSparkline()
|
||||
spl2.Data = data[5:]
|
||||
spl2.Title = "Sparkline 2"
|
||||
spl2.LineColor = termui.ColorMagenta
|
||||
|
||||
// group
|
||||
spls1 := termui.NewSparklines(spl0, spl1, spl2)
|
||||
spls1.Height = 8
|
||||
spls1.Width = 20
|
||||
spls1.Y = 3
|
||||
spls1.Border.Label = "Group Sparklines"
|
||||
|
||||
spl3 := termui.NewSparkline()
|
||||
spl3.Data = data
|
||||
spl3.Title = "Enlarged Sparkline"
|
||||
spl3.Height = 8
|
||||
spl3.LineColor = termui.ColorYellow
|
||||
|
||||
spls2 := termui.NewSparklines(spl3)
|
||||
spls2.Height = 11
|
||||
spls2.Width = 30
|
||||
spls2.Border.FgColor = termui.ColorCyan
|
||||
spls2.X = 21
|
||||
spls2.Border.Label = "Tweeked Sparkline"
|
||||
|
||||
termui.Render(spls0, spls1, spls2)
|
||||
|
||||
<-termui.EventCh()
|
||||
}
|
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/sparklines.png
generated
vendored
Before Width: | Height: | Size: 42 KiB |
143
Godeps/_workspace/src/github.com/gizak/termui/example/theme.go
generated
vendored
@ -1,143 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import ui "github.com/gizak/termui"
|
||||
import "math"
|
||||
|
||||
import "time"
|
||||
|
||||
func main() {
|
||||
err := ui.Init()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer ui.Close()
|
||||
|
||||
ui.UseTheme("helloworld")
|
||||
|
||||
p := ui.NewPar(":PRESS q TO QUIT DEMO")
|
||||
p.Height = 3
|
||||
p.Width = 50
|
||||
p.Border.Label = "Text Box"
|
||||
|
||||
strs := []string{"[0] gizak/termui", "[1] editbox.go", "[2] iterrupt.go", "[3] keyboard.go", "[4] output.go", "[5] random_out.go", "[6] dashboard.go", "[7] nsf/termbox-go"}
|
||||
list := ui.NewList()
|
||||
list.Items = strs
|
||||
list.Border.Label = "List"
|
||||
list.Height = 7
|
||||
list.Width = 25
|
||||
list.Y = 4
|
||||
|
||||
g := ui.NewGauge()
|
||||
g.Percent = 50
|
||||
g.Width = 50
|
||||
g.Height = 3
|
||||
g.Y = 11
|
||||
g.Border.Label = "Gauge"
|
||||
|
||||
spark := ui.NewSparkline()
|
||||
spark.Title = "srv 0:"
|
||||
spdata := []int{4, 2, 1, 6, 3, 9, 1, 4, 2, 15, 14, 9, 8, 6, 10, 13, 15, 12, 10, 5, 3, 6, 1, 7, 10, 10, 14, 13, 6}
|
||||
spark.Data = spdata
|
||||
|
||||
spark1 := ui.NewSparkline()
|
||||
spark1.Title = "srv 1:"
|
||||
spark1.Data = spdata
|
||||
|
||||
sp := ui.NewSparklines(spark, spark1)
|
||||
sp.Width = 25
|
||||
sp.Height = 7
|
||||
sp.Border.Label = "Sparkline"
|
||||
sp.Y = 4
|
||||
sp.X = 25
|
||||
|
||||
lc := ui.NewLineChart()
|
||||
sinps := (func() []float64 {
|
||||
n := 100
|
||||
ps := make([]float64, n)
|
||||
for i := range ps {
|
||||
ps[i] = 1 + math.Sin(float64(i)/4)
|
||||
}
|
||||
return ps
|
||||
})()
|
||||
|
||||
lc.Border.Label = "Line Chart"
|
||||
lc.Data = sinps
|
||||
lc.Width = 50
|
||||
lc.Height = 11
|
||||
lc.X = 0
|
||||
lc.Y = 14
|
||||
lc.Mode = "dot"
|
||||
|
||||
bc := ui.NewBarChart()
|
||||
bcdata := []int{3, 2, 5, 3, 9, 5, 3, 2, 5, 8, 3, 2, 4, 5, 3, 2, 5, 7, 5, 3, 2, 6, 7, 4, 6, 3, 6, 7, 8, 3, 6, 4, 5, 3, 2, 4, 6, 4, 8, 5, 9, 4, 3, 6, 5, 3, 6}
|
||||
bclabels := []string{"S0", "S1", "S2", "S3", "S4", "S5"}
|
||||
bc.Border.Label = "Bar Chart"
|
||||
bc.Width = 26
|
||||
bc.Height = 10
|
||||
bc.X = 51
|
||||
bc.Y = 0
|
||||
bc.DataLabels = bclabels
|
||||
|
||||
lc1 := ui.NewLineChart()
|
||||
lc1.Border.Label = "Line Chart"
|
||||
rndwalk := (func() []float64 {
|
||||
n := 150
|
||||
d := make([]float64, n)
|
||||
for i := 1; i < n; i++ {
|
||||
if i < 20 {
|
||||
d[i] = d[i-1] + 0.01
|
||||
}
|
||||
if i > 20 {
|
||||
d[i] = d[i-1] - 0.05
|
||||
}
|
||||
}
|
||||
return d
|
||||
})()
|
||||
lc1.Data = rndwalk
|
||||
lc1.Width = 26
|
||||
lc1.Height = 11
|
||||
lc1.X = 51
|
||||
lc1.Y = 14
|
||||
|
||||
p1 := ui.NewPar("Hey!\nI am a borderless block!")
|
||||
p1.HasBorder = false
|
||||
p1.Width = 26
|
||||
p1.Height = 2
|
||||
p1.X = 52
|
||||
p1.Y = 11
|
||||
|
||||
draw := func(t int) {
|
||||
g.Percent = t % 101
|
||||
list.Items = strs[t%9:]
|
||||
sp.Lines[0].Data = spdata[t%10:]
|
||||
sp.Lines[1].Data = spdata[t/2%10:]
|
||||
lc.Data = sinps[t/2:]
|
||||
lc1.Data = rndwalk[t:]
|
||||
bc.Data = bcdata[t/2%10:]
|
||||
ui.Render(p, list, g, sp, lc, bc, lc1, p1)
|
||||
}
|
||||
|
||||
evt := ui.EventCh()
|
||||
i := 0
|
||||
for {
|
||||
select {
|
||||
case e := <-evt:
|
||||
if e.Type == ui.EventKey && e.Ch == 'q' {
|
||||
return
|
||||
}
|
||||
default:
|
||||
draw(i)
|
||||
i++
|
||||
if i == 102 {
|
||||
return
|
||||
}
|
||||
time.Sleep(time.Second / 2)
|
||||
}
|
||||
}
|
||||
}
|
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/themedefault.png
generated
vendored
Before Width: | Height: | Size: 103 KiB |
BIN
Godeps/_workspace/src/github.com/gizak/termui/example/themehelloworld.png
generated
vendored
Before Width: | Height: | Size: 88 KiB |
96
Godeps/_workspace/src/github.com/gizak/termui/gauge.go
generated
vendored
@ -1,4 +1,4 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
@ -21,33 +21,27 @@ import (
|
||||
g.PercentColor = termui.ColorBlue
|
||||
*/
|
||||
|
||||
// Align is the position of the gauge's label.
|
||||
type Align int
|
||||
|
||||
// All supported positions.
|
||||
const (
|
||||
AlignLeft Align = iota
|
||||
AlignCenter
|
||||
AlignRight
|
||||
)
|
||||
const ColorUndef Attribute = Attribute(^uint16(0))
|
||||
|
||||
type Gauge struct {
|
||||
Block
|
||||
Percent int
|
||||
BarColor Attribute
|
||||
PercentColor Attribute
|
||||
Label string
|
||||
LabelAlign Align
|
||||
Percent int
|
||||
BarColor Attribute
|
||||
PercentColor Attribute
|
||||
PercentColorHighlighted Attribute
|
||||
Label string
|
||||
LabelAlign Align
|
||||
}
|
||||
|
||||
// NewGauge return a new gauge with current theme.
|
||||
func NewGauge() *Gauge {
|
||||
g := &Gauge{
|
||||
Block: *NewBlock(),
|
||||
PercentColor: theme.GaugePercent,
|
||||
BarColor: theme.GaugeBar,
|
||||
Label: "{{percent}}%",
|
||||
LabelAlign: AlignCenter,
|
||||
Block: *NewBlock(),
|
||||
PercentColor: ThemeAttr("gauge.percent.fg"),
|
||||
BarColor: ThemeAttr("gauge.bar.bg"),
|
||||
Label: "{{percent}}%",
|
||||
LabelAlign: AlignCenter,
|
||||
PercentColorHighlighted: ColorUndef,
|
||||
}
|
||||
|
||||
g.Width = 12
|
||||
@ -56,28 +50,26 @@ func NewGauge() *Gauge {
|
||||
}
|
||||
|
||||
// Buffer implements Bufferer interface.
|
||||
func (g *Gauge) Buffer() []Point {
|
||||
ps := g.Block.Buffer()
|
||||
func (g *Gauge) Buffer() Buffer {
|
||||
buf := g.Block.Buffer()
|
||||
|
||||
// plot bar
|
||||
w := g.Percent * g.innerWidth / 100
|
||||
for i := 0; i < g.innerHeight; i++ {
|
||||
w := g.Percent * g.innerArea.Dx() / 100
|
||||
for i := 0; i < g.innerArea.Dy(); i++ {
|
||||
for j := 0; j < w; j++ {
|
||||
p := Point{}
|
||||
p.X = g.innerX + j
|
||||
p.Y = g.innerY + i
|
||||
p.Ch = ' '
|
||||
p.Bg = g.BarColor
|
||||
if p.Bg == ColorDefault {
|
||||
p.Bg |= AttrReverse
|
||||
c := Cell{}
|
||||
c.Ch = ' '
|
||||
c.Bg = g.BarColor
|
||||
if c.Bg == ColorDefault {
|
||||
c.Bg |= AttrReverse
|
||||
}
|
||||
ps = append(ps, p)
|
||||
buf.Set(g.innerArea.Min.X+j, g.innerArea.Min.Y+i, c)
|
||||
}
|
||||
}
|
||||
|
||||
// plot percentage
|
||||
s := strings.Replace(g.Label, "{{percent}}", strconv.Itoa(g.Percent), -1)
|
||||
pry := g.innerY + g.innerHeight/2
|
||||
pry := g.innerArea.Min.Y + g.innerArea.Dy()/2
|
||||
rs := str2runes(s)
|
||||
var pos int
|
||||
switch g.LabelAlign {
|
||||
@ -85,29 +77,33 @@ func (g *Gauge) Buffer() []Point {
|
||||
pos = 0
|
||||
|
||||
case AlignCenter:
|
||||
pos = (g.innerWidth - strWidth(s)) / 2
|
||||
pos = (g.innerArea.Dx() - strWidth(s)) / 2
|
||||
|
||||
case AlignRight:
|
||||
pos = g.innerWidth - strWidth(s)
|
||||
pos = g.innerArea.Dx() - strWidth(s) - 1
|
||||
}
|
||||
pos += g.innerArea.Min.X
|
||||
|
||||
for i, v := range rs {
|
||||
p := Point{}
|
||||
p.X = 1 + pos + i
|
||||
p.Y = pry
|
||||
p.Ch = v
|
||||
p.Fg = g.PercentColor
|
||||
if w+g.innerX > pos+i {
|
||||
p.Bg = g.BarColor
|
||||
if p.Bg == ColorDefault {
|
||||
p.Bg |= AttrReverse
|
||||
}
|
||||
|
||||
} else {
|
||||
p.Bg = g.Block.BgColor
|
||||
c := Cell{
|
||||
Ch: v,
|
||||
Fg: g.PercentColor,
|
||||
}
|
||||
|
||||
ps = append(ps, p)
|
||||
if w+g.innerArea.Min.X > pos+i {
|
||||
c.Bg = g.BarColor
|
||||
if c.Bg == ColorDefault {
|
||||
c.Bg |= AttrReverse
|
||||
}
|
||||
|
||||
if g.PercentColorHighlighted != ColorUndef {
|
||||
c.Fg = g.PercentColorHighlighted
|
||||
}
|
||||
} else {
|
||||
c.Bg = g.Block.Bg
|
||||
}
|
||||
|
||||
buf.Set(1+pos+i, pry, c)
|
||||
}
|
||||
return g.Block.chopOverflow(ps)
|
||||
return buf
|
||||
}
|
||||
|
20
Godeps/_workspace/src/github.com/gizak/termui/grid.go
generated
vendored
@ -1,4 +1,4 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
@ -160,8 +160,8 @@ func (r *Row) SetWidth(w int) {
|
||||
|
||||
// Buffer implements Bufferer interface,
|
||||
// recursively merge all widgets buffer
|
||||
func (r *Row) Buffer() []Point {
|
||||
merged := []Point{}
|
||||
func (r *Row) Buffer() Buffer {
|
||||
merged := NewBuffer()
|
||||
|
||||
if r.isRenderableLeaf() {
|
||||
return r.Widget.Buffer()
|
||||
@ -169,13 +169,13 @@ func (r *Row) Buffer() []Point {
|
||||
|
||||
// for those are not leaves but have a renderable widget
|
||||
if r.Widget != nil {
|
||||
merged = append(merged, r.Widget.Buffer()...)
|
||||
merged.Merge(r.Widget.Buffer())
|
||||
}
|
||||
|
||||
// collect buffer from children
|
||||
if !r.isLeaf() {
|
||||
for _, c := range r.Cols {
|
||||
merged = append(merged, c.Buffer()...)
|
||||
merged.Merge(c.Buffer())
|
||||
}
|
||||
}
|
||||
|
||||
@ -267,13 +267,13 @@ func (g *Grid) Align() {
|
||||
}
|
||||
|
||||
// Buffer implments Bufferer interface.
|
||||
func (g Grid) Buffer() []Point {
|
||||
ps := []Point{}
|
||||
func (g Grid) Buffer() Buffer {
|
||||
buf := NewBuffer()
|
||||
|
||||
for _, r := range g.Rows {
|
||||
ps = append(ps, r.Buffer()...)
|
||||
buf.Merge(r.Buffer())
|
||||
}
|
||||
return ps
|
||||
return buf
|
||||
}
|
||||
|
||||
// Body corresponds to the entire terminal display region.
|
||||
var Body *Grid
|
||||
|
98
Godeps/_workspace/src/github.com/gizak/termui/grid_test.go
generated
vendored
@ -1,98 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
package termui
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
var r *Row
|
||||
|
||||
func TestRowWidth(t *testing.T) {
|
||||
p0 := NewPar("p0")
|
||||
p0.Height = 1
|
||||
p1 := NewPar("p1")
|
||||
p1.Height = 1
|
||||
p2 := NewPar("p2")
|
||||
p2.Height = 1
|
||||
p3 := NewPar("p3")
|
||||
p3.Height = 1
|
||||
|
||||
/* test against tree:
|
||||
|
||||
r
|
||||
/ \
|
||||
0:w 1
|
||||
/ \
|
||||
10:w 11
|
||||
/
|
||||
110:w
|
||||
/
|
||||
1100:w
|
||||
*/
|
||||
/*
|
||||
r = &row{
|
||||
Span: 12,
|
||||
Cols: []*row{
|
||||
&row{Widget: p0, Span: 6},
|
||||
&row{
|
||||
Span: 6,
|
||||
Cols: []*row{
|
||||
&row{Widget: p1, Span: 6},
|
||||
&row{
|
||||
Span: 6,
|
||||
Cols: []*row{
|
||||
&row{
|
||||
Span: 12,
|
||||
Widget: p2,
|
||||
Cols: []*row{
|
||||
&row{Span: 12, Widget: p3}}}}}}}}}
|
||||
*/
|
||||
|
||||
r = NewRow(
|
||||
NewCol(6, 0, p0),
|
||||
NewCol(6, 0,
|
||||
NewRow(
|
||||
NewCol(6, 0, p1),
|
||||
NewCol(6, 0, p2, p3))))
|
||||
|
||||
r.assignWidth(100)
|
||||
if r.Width != 100 ||
|
||||
(r.Cols[0].Width) != 50 ||
|
||||
(r.Cols[1].Width) != 50 ||
|
||||
(r.Cols[1].Cols[0].Width) != 25 ||
|
||||
(r.Cols[1].Cols[1].Width) != 25 ||
|
||||
(r.Cols[1].Cols[1].Cols[0].Width) != 25 ||
|
||||
(r.Cols[1].Cols[1].Cols[0].Cols[0].Width) != 25 {
|
||||
t.Error("assignWidth fails")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRowHeight(t *testing.T) {
|
||||
spew.Dump()
|
||||
|
||||
if (r.solveHeight()) != 2 ||
|
||||
(r.Cols[1].Cols[1].Height) != 2 ||
|
||||
(r.Cols[1].Cols[1].Cols[0].Height) != 2 ||
|
||||
(r.Cols[1].Cols[0].Height) != 1 {
|
||||
t.Error("solveHeight fails")
|
||||
}
|
||||
}
|
||||
|
||||
func TestAssignXY(t *testing.T) {
|
||||
r.assignX(0)
|
||||
r.assignY(0)
|
||||
if (r.Cols[0].X) != 0 ||
|
||||
(r.Cols[1].Cols[0].X) != 50 ||
|
||||
(r.Cols[1].Cols[1].X) != 75 ||
|
||||
(r.Cols[1].Cols[1].Cols[0].X) != 75 ||
|
||||
(r.Cols[1].Cols[0].Y) != 0 ||
|
||||
(r.Cols[1].Cols[1].Cols[0].Y) != 0 ||
|
||||
(r.Cols[1].Cols[1].Cols[0].Cols[0].Y) != 1 {
|
||||
t.Error("assignXY fails")
|
||||
}
|
||||
}
|
156
Godeps/_workspace/src/github.com/gizak/termui/helper.go
generated
vendored
@ -1,10 +1,15 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Copyright 2016 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
package termui
|
||||
|
||||
import tm "github.com/nsf/termbox-go"
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
tm "github.com/nsf/termbox-go"
|
||||
)
|
||||
import rw "github.com/mattn/go-runewidth"
|
||||
|
||||
/* ---------------Port from termbox-go --------------------- */
|
||||
@ -12,6 +17,7 @@ import rw "github.com/mattn/go-runewidth"
|
||||
// Attribute is printable cell's color and style.
|
||||
type Attribute uint16
|
||||
|
||||
// 8 basic clolrs
|
||||
const (
|
||||
ColorDefault Attribute = iota
|
||||
ColorBlack
|
||||
@ -24,7 +30,10 @@ const (
|
||||
ColorWhite
|
||||
)
|
||||
|
||||
const NumberofColors = 8 //Have a constant that defines number of colors
|
||||
//Have a constant that defines number of colors
|
||||
const NumberofColors = 8
|
||||
|
||||
// Text style
|
||||
const (
|
||||
AttrBold Attribute = 1 << (iota + 9)
|
||||
AttrUnderline
|
||||
@ -46,15 +55,39 @@ func str2runes(s string) []rune {
|
||||
return []rune(s)
|
||||
}
|
||||
|
||||
// Here for backwards-compatibility.
|
||||
func trimStr2Runes(s string, w int) []rune {
|
||||
return TrimStr2Runes(s, w)
|
||||
}
|
||||
|
||||
// TrimStr2Runes trims string to w[-1 rune], appends …, and returns the runes
|
||||
// of that string if string is grather then n. If string is small then w,
|
||||
// return the runes.
|
||||
func TrimStr2Runes(s string, w int) []rune {
|
||||
if w <= 0 {
|
||||
return []rune{}
|
||||
}
|
||||
|
||||
sw := rw.StringWidth(s)
|
||||
if sw > w {
|
||||
return []rune(rw.Truncate(s, w, dot))
|
||||
}
|
||||
return str2runes(s) //[]rune(rw.Truncate(s, w, ""))
|
||||
return str2runes(s)
|
||||
}
|
||||
|
||||
// TrimStrIfAppropriate trim string to "s[:-1] + …"
|
||||
// if string > width otherwise return string
|
||||
func TrimStrIfAppropriate(s string, w int) string {
|
||||
if w <= 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
sw := rw.StringWidth(s)
|
||||
if sw > w {
|
||||
return rw.Truncate(s, w, dot)
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func strWidth(s string) int {
|
||||
@ -64,3 +97,118 @@ func strWidth(s string) int {
|
||||
func charWidth(ch rune) int {
|
||||
return rw.RuneWidth(ch)
|
||||
}
|
||||
|
||||
var whiteSpaceRegex = regexp.MustCompile(`\s`)
|
||||
|
||||
// StringToAttribute converts text to a termui attribute. You may specifiy more
|
||||
// then one attribute like that: "BLACK, BOLD, ...". All whitespaces
|
||||
// are ignored.
|
||||
func StringToAttribute(text string) Attribute {
|
||||
text = whiteSpaceRegex.ReplaceAllString(strings.ToLower(text), "")
|
||||
attributes := strings.Split(text, ",")
|
||||
result := Attribute(0)
|
||||
|
||||
for _, theAttribute := range attributes {
|
||||
var match Attribute
|
||||
switch theAttribute {
|
||||
case "reset", "default":
|
||||
match = ColorDefault
|
||||
|
||||
case "black":
|
||||
match = ColorBlack
|
||||
|
||||
case "red":
|
||||
match = ColorRed
|
||||
|
||||
case "green":
|
||||
match = ColorGreen
|
||||
|
||||
case "yellow":
|
||||
match = ColorYellow
|
||||
|
||||
case "blue":
|
||||
match = ColorBlue
|
||||
|
||||
case "magenta":
|
||||
match = ColorMagenta
|
||||
|
||||
case "cyan":
|
||||
match = ColorCyan
|
||||
|
||||
case "white":
|
||||
match = ColorWhite
|
||||
|
||||
case "bold":
|
||||
match = AttrBold
|
||||
|
||||
case "underline":
|
||||
match = AttrUnderline
|
||||
|
||||
case "reverse":
|
||||
match = AttrReverse
|
||||
}
|
||||
|
||||
result |= match
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// TextCells returns a coloured text cells []Cell
|
||||
func TextCells(s string, fg, bg Attribute) []Cell {
|
||||
cs := make([]Cell, 0, len(s))
|
||||
|
||||
// sequence := MarkdownTextRendererFactory{}.TextRenderer(s).Render(fg, bg)
|
||||
// runes := []rune(sequence.NormalizedText)
|
||||
runes := str2runes(s)
|
||||
|
||||
for n := range runes {
|
||||
// point, _ := sequence.PointAt(n, 0, 0)
|
||||
// cs = append(cs, Cell{point.Ch, point.Fg, point.Bg})
|
||||
cs = append(cs, Cell{runes[n], fg, bg})
|
||||
}
|
||||
return cs
|
||||
}
|
||||
|
||||
// Width returns the actual screen space the cell takes (usually 1 or 2).
|
||||
func (c Cell) Width() int {
|
||||
return charWidth(c.Ch)
|
||||
}
|
||||
|
||||
// Copy return a copy of c
|
||||
func (c Cell) Copy() Cell {
|
||||
return c
|
||||
}
|
||||
|
||||
// TrimTxCells trims the overflowed text cells sequence.
|
||||
func TrimTxCells(cs []Cell, w int) []Cell {
|
||||
if len(cs) <= w {
|
||||
return cs
|
||||
}
|
||||
return cs[:w]
|
||||
}
|
||||
|
||||
// DTrimTxCls trims the overflowed text cells sequence and append dots at the end.
|
||||
func DTrimTxCls(cs []Cell, w int) []Cell {
|
||||
l := len(cs)
|
||||
if l <= 0 {
|
||||
return []Cell{}
|
||||
}
|
||||
|
||||
rt := make([]Cell, 0, w)
|
||||
csw := 0
|
||||
for i := 0; i < l && csw <= w; i++ {
|
||||
c := cs[i]
|
||||
cw := c.Width()
|
||||
|
||||
if cw+csw < w {
|
||||
rt = append(rt, c)
|
||||
csw += cw
|
||||
} else {
|
||||
rt = append(rt, Cell{'…', c.Fg, c.Bg})
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return rt
|
||||
}
|
||||
|
58
Godeps/_workspace/src/github.com/gizak/termui/helper_test.go
generated
vendored
@ -1,58 +0,0 @@
|
||||
// Copyright 2015 Zack Guo <gizak@icloud.com>. All rights reserved.
|
||||
// Use of this source code is governed by a MIT license that can
|
||||
// be found in the LICENSE file.
|
||||
|
||||
package termui
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
func TestStr2Rune(t *testing.T) {
|
||||
s := "你好,世界."
|
||||
rs := str2runes(s)
|
||||
if len(rs) != 6 {
|
||||
t.Error()
|
||||
}
|
||||
}
|
||||
|
||||
func TestWidth(t *testing.T) {
|
||||
s0 := "つのだ☆HIRO"
|
||||
s1 := "11111111111"
|
||||
spew.Dump(s0)
|
||||
spew.Dump(s1)
|
||||
// above not align for setting East Asian Ambiguous to wide!!
|
||||
|
||||
if strWidth(s0) != strWidth(s1) {
|
||||
t.Error("str len failed")
|
||||
}
|
||||
|
||||
len1 := []rune{'a', '2', '&', '「', 'オ', '。'} //will false: 'ᆵ', 'ᄚ', 'ᄒ'
|
||||
for _, v := range len1 {
|
||||
if charWidth(v) != 1 {
|
||||
t.Error("len1 failed")
|
||||
}
|
||||
}
|
||||
|
||||
len2 := []rune{'漢', '字', '한', '자', '你', '好', 'だ', '。', '%', 's', 'E', 'ョ', '、', 'ヲ'}
|
||||
for _, v := range len2 {
|
||||
if charWidth(v) != 2 {
|
||||
t.Error("len2 failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestTrim(t *testing.T) {
|
||||
s := "つのだ☆HIRO"
|
||||
if string(trimStr2Runes(s, 10)) != "つのだ☆HI"+dot {
|
||||
t.Error("trim failed")
|
||||
}
|
||||
if string(trimStr2Runes(s, 11)) != "つのだ☆HIRO" {
|
||||
t.Error("avoid tail trim failed")
|
||||
}
|
||||
if string(trimStr2Runes(s, 15)) != "つのだ☆HIRO" {
|
||||
t.Error("avoid trim failed")
|
||||
}
|
||||
}
|