Compare commits
1176 Commits
FTLDNS-sys
...
v5.0
Author | SHA1 | Date | |
---|---|---|---|
|
4d25f69526 | ||
|
e728d7f761 | ||
|
7cc35d3b04 | ||
|
78469ee58d | ||
|
369288cc48 | ||
|
df13b9c32a | ||
|
017d405b28 | ||
|
ddb354f78b | ||
|
393c7730ec | ||
|
4f0e47e927 | ||
|
288d487fc0 | ||
|
20ef5e0264 | ||
|
ad5802715e | ||
|
989bbad37e | ||
|
63f6c6a894 | ||
|
d42785a3bf | ||
|
401c029dc4 | ||
|
ed9d74593d | ||
|
9286965ee2 | ||
|
fa57c457f3 | ||
|
0343171703 | ||
|
176fbaf83b | ||
|
94a4f844a8 | ||
|
a37dba2c81 | ||
|
471006676c | ||
|
0155d42650 | ||
|
3cc9ba4ee8 | ||
|
6dc85c3527 | ||
|
4f01daf5bc | ||
|
0f20470a38 | ||
|
851947bbf2 | ||
|
413fa94e98 | ||
|
308eb5eda5 | ||
|
26f71e4dbe | ||
|
b6ac1585ec | ||
|
a9b19df4ec | ||
|
d27a565d39 | ||
|
2de5362adc | ||
|
de42669bb7 | ||
|
3095fd4dd6 | ||
|
ebbb7168a4 | ||
|
16f664cdb4 | ||
|
a2d2639ee8 | ||
|
d1caad76d8 | ||
|
fff7adfb20 | ||
|
7d19ee1b25 | ||
|
7b15a88dc4 | ||
|
dc35709a1b | ||
|
0fad979206 | ||
|
277179f150 | ||
|
15a9d662ac | ||
|
1b35eebad8 | ||
|
4994da5170 | ||
|
175d32c5f6 | ||
|
1481cc583f | ||
|
dbc54b3063 | ||
|
22ce5c0d70 | ||
|
f617ed2f44 | ||
|
bf4fada3b7 | ||
|
360d0e4e6b | ||
|
4f390ce801 | ||
|
dc8ae4f0ab | ||
|
d2a8b4d2b9 | ||
|
c07d86b9f9 | ||
|
9e490775ff | ||
|
58785020bd | ||
|
1c74b41869 | ||
|
6104d81622 | ||
|
121c93e822 | ||
|
b4c2bf678f | ||
|
14944b0283 | ||
|
8ecaaba247 | ||
|
0fbcc6d8b5 | ||
|
707e21b927 | ||
|
f4a1cc6dec | ||
|
3dd05606ca | ||
|
1e8bfd33f5 | ||
|
545b6605bc | ||
|
8131b5961c | ||
|
81d4531e10 | ||
|
050e2963c7 | ||
|
3c09cd4a3a | ||
|
839fe32042 | ||
|
85c15a7167 | ||
|
b73580fa93 | ||
|
4a5f344b09 | ||
|
af95e8c250 | ||
|
ee7090b8fc | ||
|
7be019ff52 | ||
|
d14ee26d6a | ||
|
52398052e9 | ||
|
ddbd57f459 | ||
|
601f9048cd | ||
|
c5c414a7a2 | ||
|
bc91be6c08 | ||
|
d0e29ab7b0 | ||
|
714a79ffce | ||
|
cd3ad0bdc7 | ||
|
a8db753493 | ||
|
75633f0950 | ||
|
082cfb2f1c | ||
|
1072078e26 | ||
|
f10a151469 | ||
|
eadd82761c | ||
|
00f4393f48 | ||
|
50f6fffbdc | ||
|
baf5340dc0 | ||
|
e528903488 | ||
|
dc2fce8e1d | ||
|
c4005c4a31 | ||
|
0a70bbd255 | ||
|
c91d9cc0b6 | ||
|
8e10c22356 | ||
|
37a44c0773 | ||
|
2a5cf221fa | ||
|
92aa510bda | ||
|
6b04997fc3 | ||
|
e0b3405a4d | ||
|
10c2dad48a | ||
|
52e2a2610e | ||
|
a809624356 | ||
|
29f06a4444 | ||
|
3f9e79f152 | ||
|
633e56e8a9 | ||
|
bf01f725f7 | ||
|
276b191845 | ||
|
c7bc58e94b | ||
|
8f22203d24 | ||
|
782fec841e | ||
|
cfa909a93d | ||
|
e0fde41d87 | ||
|
574f7c1a1f | ||
|
ec8f4050d0 | ||
|
60c51886e0 | ||
|
cbb1461010 | ||
|
07cc5b501c | ||
|
ebb1a730c1 | ||
|
9dff55b212 | ||
|
8ae03b64d7 | ||
|
bb30c818ab | ||
|
c944f6a320 | ||
|
62ec7de963 | ||
|
aa4c0ff329 | ||
|
37217ece73 | ||
|
28d4f4b142 | ||
|
8d5d423adb | ||
|
cda0133dd1 | ||
|
eda7f40fef | ||
|
e589e665a7 | ||
|
b32b5ad6e9 | ||
|
e2de199f47 | ||
|
948f4a8827 | ||
|
a1633123aa | ||
|
2444296348 | ||
|
4be7ebe61f | ||
|
a720fe1789 | ||
|
2cec9eaf65 | ||
|
313f999af4 | ||
|
0b0ec43bf5 | ||
|
f0439c8d12 | ||
|
40e8657137 | ||
|
52dd72dfa5 | ||
|
922ce7359c | ||
|
779fe670f7 | ||
|
570a7a5c11 | ||
|
bd1b004d94 | ||
|
5457b2c6ea | ||
|
02f3316710 | ||
|
69a909fc4c | ||
|
ec09b5843c | ||
|
078e7e1686 | ||
|
d29947ba32 | ||
|
1f03faddef | ||
|
d1bce7e685 | ||
|
880352ea65 | ||
|
3231e5c3ba | ||
|
f482156cca | ||
|
620e1e9c73 | ||
|
8a119d72e2 | ||
|
807a5cfb23 | ||
|
ca7a5bc0fe | ||
|
0c5185f8ba | ||
|
eaf1244932 | ||
|
7c2bbf840a | ||
|
85673b8273 | ||
|
b6cd7b8e3d | ||
|
869473172c | ||
|
63e407cfdc | ||
|
0251117c77 | ||
|
44e1455b12 | ||
|
76460f01e9 | ||
|
4b8a72fda7 | ||
|
edaee4e962 | ||
|
77bfb3fb67 | ||
|
6a881545b0 | ||
|
d0de5fda30 | ||
|
a1f120b2ff | ||
|
185319d560 | ||
|
5c6dd3f6f4 | ||
|
8e5abc1f15 | ||
|
9248c92b5c | ||
|
583ea4d17a | ||
|
edcdf9f619 | ||
|
c809c34024 | ||
|
037d52104a | ||
|
1fb70c977c | ||
|
eeb26e3975 | ||
|
12817c09bb | ||
|
4840bdb031 | ||
|
a85e7a2a43 | ||
|
b93628acb3 | ||
|
7f7b9d089c | ||
|
61d233f069 | ||
|
d457d40e0b | ||
|
6571a63ffa | ||
|
a7e81c8ea0 | ||
|
73d9abae3e | ||
|
c8b9e42649 | ||
|
62c00ae1d8 | ||
|
ea67c828cd | ||
|
476975540a | ||
|
3fbb0ac8dd | ||
|
71903eb27f | ||
|
193ff38ab3 | ||
|
bb8dbe9da5 | ||
|
f9d16c2b15 | ||
|
29bad2fe9b | ||
|
f4aca3f21d | ||
|
c6f9fe3af2 | ||
|
612d408034 | ||
|
a86f578139 | ||
|
5bac1ad58b | ||
|
cf2b021502 | ||
|
cd9b1fcb8c | ||
|
81ca78e7f4 | ||
|
fc0899b2ad | ||
|
2e138eb99f | ||
|
4f21f67775 | ||
|
d883854aad | ||
|
756c99653e | ||
|
3269c63f89 | ||
|
149fb0c216 | ||
|
d244a018d0 | ||
|
2e0370367c | ||
|
3cb4f6d9d4 | ||
|
ae3b8be4d4 | ||
|
61a40c1b43 | ||
|
a27c7b1398 | ||
|
a71f35d263 | ||
|
9a6deb5a1a | ||
|
f582344b9a | ||
|
e41c4b5bb6 | ||
|
7b48431917 | ||
|
847c4f26aa | ||
|
1f36ec48e3 | ||
|
ff08add7c0 | ||
|
b4131ae817 | ||
|
ffc91a6c81 | ||
|
525ec8cd01 | ||
|
b209629579 | ||
|
a8af2e1837 | ||
|
93ecc046ea | ||
|
8bef5dc805 | ||
|
ad41bcca5a | ||
|
aed2e35bc0 | ||
|
ab90ff565a | ||
|
ca8982494b | ||
|
a7b44426cd | ||
|
e7af42a9f8 | ||
|
b9fed8fca6 | ||
|
79b8dac0fa | ||
|
d8eee47ca4 | ||
|
a3e32d9a15 | ||
|
989d1aff60 | ||
|
95b2560a08 | ||
|
76133074d1 | ||
|
4cfe463dfa | ||
|
03c65dd0e9 | ||
|
6faddfcd3d | ||
|
1820c2c598 | ||
|
23b688287f | ||
|
42ccc1ef24 | ||
|
aef7892de6 | ||
|
cc40c18f49 | ||
|
b1838512b2 | ||
|
1c50caa8ca | ||
|
febdbceab1 | ||
|
597b4bfcca | ||
|
5c65006a66 | ||
|
34727c00c6 | ||
|
352146ef92 | ||
|
b107ae2ab9 | ||
|
d5d1a607ad | ||
|
2594164772 | ||
|
209555c42e | ||
|
e27f50b8e5 | ||
|
484f618685 | ||
|
da398c3d9c | ||
|
4e0ad52001 | ||
|
c9829dd3e4 | ||
|
35cf863f4b | ||
|
c53be459c6 | ||
|
ab1ea5a366 | ||
|
97e11bd94e | ||
|
10de7f649b | ||
|
d793ef1ab8 | ||
|
d3d45a8776 | ||
|
9f86fd0cb4 | ||
|
71d5b42726 | ||
|
3e78ed95d4 | ||
|
20a839fef5 | ||
|
b2d8c4374b | ||
|
251c9fee98 | ||
|
9f77810ca8 | ||
|
dc93462d42 | ||
|
4371c9ba03 | ||
|
6e2e825a5f | ||
|
af754e3fc4 | ||
|
06860ed5b4 | ||
|
09190c1735 | ||
|
a95b473417 | ||
|
56e3565a9e | ||
|
63230cb72d | ||
|
f81e57d5b8 | ||
|
ecd6817aaf | ||
|
6f58d58cae | ||
|
40d0caa70b | ||
|
0692be9bae | ||
|
0d28dce326 | ||
|
96031214c6 | ||
|
3420439f31 | ||
|
ab3f6dfcc6 | ||
|
3ebd43ebf0 | ||
|
38ff343134 | ||
|
6a8d3100d2 | ||
|
c3ec2e68ad | ||
|
bfe714e985 | ||
|
1d5755a4c2 | ||
|
445127accc | ||
|
c156af020c | ||
|
fa8751f9ad | ||
|
420f60b5c7 | ||
|
65fdbc85d5 | ||
|
87f75c737a | ||
|
5ff9052200 | ||
|
9641e268ea | ||
|
b154dd5f07 | ||
|
0683842ec3 | ||
|
f5121c64be | ||
|
054c7a2c05 | ||
|
3d3fc2947e | ||
|
e8e5d4afda | ||
|
8382f4d727 | ||
|
be3e198f9a | ||
|
acc50b709e | ||
|
5a6763f90d | ||
|
218476fab0 | ||
|
ec850dc820 | ||
|
c48b03584f | ||
|
efe8216445 | ||
|
0405aaa3da | ||
|
6bb213e56f | ||
|
2fb4256f84 | ||
|
82476138c1 | ||
|
5293beeb77 | ||
|
0c8f5f1221 | ||
|
4f4a12bb40 | ||
|
1dbe6c83c3 | ||
|
2b5033e732 | ||
|
8d9ff550d4 | ||
|
f1733f9c5d | ||
|
92c3c86be4 | ||
|
ada8b53423 | ||
|
7a1b6fd205 | ||
|
b78cbb98d8 | ||
|
04abcb71f6 | ||
|
1fe3507bc5 | ||
|
9fedafed15 | ||
|
23b3a9a650 | ||
|
7b2ac25a52 | ||
|
f91421418a | ||
|
3a14e8b013 | ||
|
ad97e95f2f | ||
|
5b01facd73 | ||
|
6fe637b9ee | ||
|
94d83dbb2d | ||
|
c9a843b417 | ||
|
b986c5585d | ||
|
87c115fc86 | ||
|
acee97916e | ||
|
1a741f696e | ||
|
37e7cd5211 | ||
|
91a2d052a7 | ||
|
06134d3ffc | ||
|
092957c14a | ||
|
d5e8f1a781 | ||
|
b5af125be6 | ||
|
ecc49318f0 | ||
|
974bba4a45 | ||
|
951732fc1b | ||
|
b6700924b2 | ||
|
5d43639e93 | ||
|
27e4208631 | ||
|
b505ad22e6 | ||
|
d67122dffc | ||
|
0774c4e5ca | ||
|
01850c2128 | ||
|
cbc210b014 | ||
|
c53ee4202b | ||
|
f2be12f97b | ||
|
6996ffa451 | ||
|
435a5fb3ad | ||
|
4947350ca5 | ||
|
c1495b0fa8 | ||
|
67dda9c8bb | ||
|
c8987e20c2 | ||
|
bcf03647ef | ||
|
a9d0690f4d | ||
|
10fbed50f3 | ||
|
785f1fedd9 | ||
|
b0c5dcf42f | ||
|
cf21efa103 | ||
|
7613e94ef6 | ||
|
b49c702f33 | ||
|
09532638d5 | ||
|
97df6d7415 | ||
|
54bfaa28c1 | ||
|
a09f92f9cc | ||
|
afd28fab03 | ||
|
d92ced6fb6 | ||
|
ae98fde321 | ||
|
9ddce88092 | ||
|
44f8fcb600 | ||
|
814fdcf9b6 | ||
|
5060605626 | ||
|
453ba14cbd | ||
|
285e6fe090 | ||
|
c3023fe681 | ||
|
c5df104a66 | ||
|
f6213d4f4d | ||
|
ca2ea388b7 | ||
|
69dba022c4 | ||
|
bfb99c361c | ||
|
5796054305 | ||
|
a3e1473ac1 | ||
|
7b5fc60e00 | ||
|
807ce0af4e | ||
|
3f05efd60f | ||
|
03d93aa19a | ||
|
cad9386ff6 | ||
|
7de6bbca61 | ||
|
1d43c0a2fa | ||
|
c8d32e0dd3 | ||
|
be3a21ae07 | ||
|
69081a9175 | ||
|
ab2f8a0c09 | ||
|
b961a501bb | ||
|
8e1bf6e3d4 | ||
|
d6756eb46b | ||
|
efbd42f7b5 | ||
|
3f90261520 | ||
|
ff5894d772 | ||
|
225285cb97 | ||
|
3220e2b978 | ||
|
54ae458b61 | ||
|
e076db5a4e | ||
|
a3d2a1062f | ||
|
2e6198077d | ||
|
55ab3b2dc7 | ||
|
bdc8cf2bc8 | ||
|
74836bebe4 | ||
|
95150a5d85 | ||
|
4d23b3267f | ||
|
e19adccd9c | ||
|
a25f331e83 | ||
|
69c06ba6fe | ||
|
8abeb2204b | ||
|
ec06fd0ad4 | ||
|
a904c183df | ||
|
5246b3e496 | ||
|
f80fdd7e83 | ||
|
6ba58896d2 | ||
|
3aa838bbe4 | ||
|
2c3a27e9c7 | ||
|
5532df24e4 | ||
|
b3898b9b2a | ||
|
9ce84714cd | ||
|
0bc112ce52 | ||
|
d46ff1d2d7 | ||
|
0cfecd31d3 | ||
|
008e88b84b | ||
|
2f698904e4 | ||
|
106f079afc | ||
|
6c1e3a17f7 | ||
|
540c57f755 | ||
|
5e26e8245b | ||
|
983117d788 | ||
|
b372f808dd | ||
|
bc9b623638 | ||
|
d5781fb110 | ||
|
487718512d | ||
|
8a92fb24c4 | ||
|
5c575e73c7 | ||
|
2180531a84 | ||
|
6812e8880e | ||
|
a932209143 | ||
|
4946b5907b | ||
|
efeba594ae | ||
|
b27e4c3dc3 | ||
|
5e1241acd2 | ||
|
126f7101ff | ||
|
b320e74921 | ||
|
add5e4ca5e | ||
|
466fd79c66 | ||
|
4596d9a577 | ||
|
b4ae142149 | ||
|
3fe43ce1d9 | ||
|
8524aecfed | ||
|
e04dc9d2bb | ||
|
3dcb9722d3 | ||
|
bd0215524b | ||
|
53e3ff2b24 | ||
|
5bb8163674 | ||
|
a891d64395 | ||
|
6b1d2523e8 | ||
|
08ee9526ad | ||
|
0efc46260f | ||
|
6a01a1257f | ||
|
a2a7e80007 | ||
|
cce66a13b6 | ||
|
43bced7997 | ||
|
788cd78321 | ||
|
e5d1cb5a2e | ||
|
d94bbfca7f | ||
|
465a39427b | ||
|
2664ac0efc | ||
|
c1277705d8 | ||
|
61cfd2f9f9 | ||
|
ee4dabeca6 | ||
|
e57d0fb93c | ||
|
ee20164207 | ||
|
d7ee44960e | ||
|
c13ebd3a9d | ||
|
6594a0a6e7 | ||
|
de44563301 | ||
|
e3d9cf0ac1 | ||
|
bd572d8843 | ||
|
037737d653 | ||
|
d2c7c426de | ||
|
1d09c6e47d | ||
|
84244b7cd5 | ||
|
422680ee9f | ||
|
42c7653e99 | ||
|
78c0c58946 | ||
|
cae61614f4 | ||
|
a7f6c9b3a0 | ||
|
5950b4bb78 | ||
|
817b19888f | ||
|
b64fb578fc | ||
|
e9e92368b0 | ||
|
ffcb173a7f | ||
|
b9c9c2797f | ||
|
204a4e459b | ||
|
0a53e966ea | ||
|
2e17c23333 | ||
|
63f3381a2b | ||
|
2292720fed | ||
|
b77efa521f | ||
|
cfe221706b | ||
|
54ae9d83b0 | ||
|
293e2a8e80 | ||
|
71cd6c137b | ||
|
0e14d11a20 | ||
|
bbc392caeb | ||
|
61840edb30 | ||
|
9867b7fccf | ||
|
58f198626a | ||
|
a145f1d931 | ||
|
9748f19172 | ||
|
ba1e94d3d9 | ||
|
cd9fadbc44 | ||
|
062a180a1c | ||
|
860ec1a7c4 | ||
|
5ca1bfc11c | ||
|
2f189cd076 | ||
|
2f0e6a6dda | ||
|
f187b42a98 | ||
|
f9250d91a5 | ||
|
491c828661 | ||
|
644ec36e64 | ||
|
838e1b19ec | ||
|
d5bbb2b640 | ||
|
dc91d4b1f9 | ||
|
9984647ebb | ||
|
ec5b16ef85 | ||
|
c3c60e10f1 | ||
|
8a2363621d | ||
|
dbbf21071b | ||
|
e967fe2266 | ||
|
b5c7657e01 | ||
|
680d6e2d31 | ||
|
7a19d24058 | ||
|
995ee41d6b | ||
|
b2f4385232 | ||
|
1f9a7d0b0c | ||
|
d5feffa117 | ||
|
fc62cf7e2f | ||
|
93f1859bab | ||
|
dcf0a605cf | ||
|
710036adae | ||
|
8a91fe6992 | ||
|
09c4c88a6d | ||
|
69919ac85c | ||
|
26f2410fe6 | ||
|
d43c32e9e8 | ||
|
5c4ee23795 | ||
|
fb3d871553 | ||
|
ae654730c4 | ||
|
ec79e86bee | ||
|
e8dabc71af | ||
|
9c0de0f731 | ||
|
4948862dce | ||
|
7479485d45 | ||
|
1e284f69ea | ||
|
516ed83638 | ||
|
26361883b4 | ||
|
d26f2dcb2c | ||
|
cc657c0c26 | ||
|
d90d7b6927 | ||
|
de6dc90575 | ||
|
3cdd6204c5 | ||
|
d996e9c9ee | ||
|
a4ba8d4b8f | ||
|
cd8a423b32 | ||
|
f7ca74cf1d | ||
|
ebba32e1c4 | ||
|
e02da90faa | ||
|
0de5132e2b | ||
|
7d2507527a | ||
|
97decb3a73 | ||
|
f360959efc | ||
|
465589c0fd | ||
|
9fcf7a2610 | ||
|
3035fbf468 | ||
|
c4c354bc4b | ||
|
c5da1bb952 | ||
|
486c41a69a | ||
|
87800a3659 | ||
|
d5fbe1b629 | ||
|
a3cee67a61 | ||
|
5ffc8c7443 | ||
|
04fd296ffe | ||
|
262d5eea22 | ||
|
ed5869646f | ||
|
b984fc4e0e | ||
|
0ef4901e48 | ||
|
8d85d46c1a | ||
|
eca975a370 | ||
|
3bb94d469d | ||
|
724afc000f | ||
|
be2d494fcf | ||
|
a36734db66 | ||
|
1cad069050 | ||
|
d24e0a6060 | ||
|
f65f793a0d | ||
|
f7ad489805 | ||
|
62459c4f00 | ||
|
05bd88b4d8 | ||
|
0546fff1ce | ||
|
83f7bd66e6 | ||
|
14ebb2fb93 | ||
|
916b2920be | ||
|
a20963889c | ||
|
4b77475807 | ||
|
9f672593bc | ||
|
a285ae65fb | ||
|
917a0239a9 | ||
|
3261c2e273 | ||
|
d0125d5aad | ||
|
65a3da92f1 | ||
|
5c621fa253 | ||
|
f9e71bee0a | ||
|
c3804877ed | ||
|
35e53ab9c7 | ||
|
053987b9e1 | ||
|
c0bc93c7c4 | ||
|
1f596eb2bc | ||
|
3f59b51be1 | ||
|
9d59da8974 | ||
|
51218bd1ad | ||
|
e1c3c53058 | ||
|
9fa497b056 | ||
|
057fbfc2b9 | ||
|
33156071cd | ||
|
bbb759c684 | ||
|
f1a7bc9ecd | ||
|
d514608f91 | ||
|
e8c2e26358 | ||
|
61c67849d7 | ||
|
2cdbb34763 | ||
|
538ba918c1 | ||
|
c330df9700 | ||
|
1789a2db29 | ||
|
c270b83341 | ||
|
02f0788033 | ||
|
a2ea6e8d47 | ||
|
a986f53134 | ||
|
9d2d45b473 | ||
|
0c892e6b3a | ||
|
a0fc38d6a2 | ||
|
28cbf45d1d | ||
|
72829be281 | ||
|
d57f9906bd | ||
|
3d4fea6510 | ||
|
f670644954 | ||
|
dd6821814a | ||
|
f33584511c | ||
|
3ec84553ef | ||
|
a301d4381f | ||
|
49b8ad7e94 | ||
|
180d29c7b3 | ||
|
a23ba91681 | ||
|
d986da63a5 | ||
|
d3ac8bb6f8 | ||
|
f87f3b9199 | ||
|
67bdcbcf19 | ||
|
69b2a5a876 | ||
|
a79dff3d98 | ||
|
c859a23136 | ||
|
7f92671765 | ||
|
ec38105b08 | ||
|
85dceb12a1 | ||
|
5126c37c1e | ||
|
e7e03e36de | ||
|
13c393fc9d | ||
|
cc2f375b13 | ||
|
49d5109c96 | ||
|
e5c8f5e2fa | ||
|
ff925a7578 | ||
|
33939a30ea | ||
|
308b296285 | ||
|
d55012f724 | ||
|
7c6eecc9c8 | ||
|
e4cbf5023a | ||
|
46356d3027 | ||
|
958a263437 | ||
|
a89ae82270 | ||
|
cedc39bf09 | ||
|
0131055ec9 | ||
|
0627e68078 | ||
|
7d78f6314a | ||
|
0f509f4a34 | ||
|
b043000044 | ||
|
c338eab3c6 | ||
|
24e1dc92ba | ||
|
00f98543ad | ||
|
8684356b44 | ||
|
a89615365e | ||
|
3c36c22ca8 | ||
|
7a2f5f43f9 | ||
|
dac27f1f18 | ||
|
6f276ae4bb | ||
|
537e288758 | ||
|
3cea1bd423 | ||
|
c976bbcfd2 | ||
|
ec29829550 | ||
|
cf7180af5b | ||
|
c58e3acc78 | ||
|
b78aef1e7e | ||
|
f20e4ddf3b | ||
|
40656641b6 | ||
|
6f8e8e98bb | ||
|
24b4aabae9 | ||
|
a9c6d79cda | ||
|
45bf4b02e7 | ||
|
667c5138cc | ||
|
85cf98888d | ||
|
048e5bb9a2 | ||
|
b9d47d0dd2 | ||
|
1813c25117 | ||
|
940c9c3bae | ||
|
5d0d7336ff | ||
|
6a26a05dbf | ||
|
c22e94c9c7 | ||
|
4aafa8cf47 | ||
|
9235a11d5f | ||
|
f141b9f778 | ||
|
1dca1efbc6 | ||
|
28c6b1393f | ||
|
6c97b8b875 | ||
|
ec2426b24d | ||
|
5cd3b11391 | ||
|
d30b565d98 | ||
|
74008d28a7 | ||
|
0d1b3cee4f | ||
|
0b44cb4426 | ||
|
eb8e4496b2 | ||
|
30f45d4df7 | ||
|
24e1c78d52 | ||
|
476fecbcf5 | ||
|
327b58fa03 | ||
|
d3eea9404d | ||
|
ca309d3b28 | ||
|
0ebd68f17f | ||
|
3696373609 | ||
|
165affc39b | ||
|
f794018e0c | ||
|
337cc5ca18 | ||
|
4e4d6b5d1f | ||
|
b011adc453 | ||
|
d8abc1d266 | ||
|
6cde066edd | ||
|
d163e38380 | ||
|
d9195970b7 | ||
|
20b946eae5 | ||
|
c00357663b | ||
|
2feb5f22cc | ||
|
f3aee056ad | ||
|
91ff5ca0ca | ||
|
4238e36d7f | ||
|
03a9d3bc5b | ||
|
fe46dee194 | ||
|
1c6c35f861 | ||
|
926b25fd54 | ||
|
e58142b4ac | ||
|
3e5c6afaee | ||
|
a1bf9fad98 | ||
|
afdc4219d0 | ||
|
94e4ed6644 | ||
|
ef8aaba014 | ||
|
d2ae07e69b | ||
|
760dd01b50 | ||
|
5bf90cb77d | ||
|
f5c3dc5d9d | ||
|
f2b820079a | ||
|
083024496a | ||
|
e01eea8ba8 | ||
|
eef948d797 | ||
|
8017be7d09 | ||
|
5b628a4c77 | ||
|
ddbdb51d20 | ||
|
0081a7ca92 | ||
|
e80ae4906f | ||
|
e4e22fb58e | ||
|
7086a400e8 | ||
|
a25a13fde0 | ||
|
993beab3b7 | ||
|
06875f2cc9 | ||
|
448fc7231c | ||
|
d0d030bafc | ||
|
833131e15f | ||
|
fb2810557a | ||
|
82a64db42b | ||
|
57af0ba0a4 | ||
|
17c1e64f7e | ||
|
af13ceac24 | ||
|
af2ec61965 | ||
|
a55cc55338 | ||
|
b2fb3fd35a | ||
|
94b7c8eb59 | ||
|
6d671007dd | ||
|
1a8d5fb4a9 | ||
|
f90c412b35 | ||
|
c73b5456eb | ||
|
3d87398721 | ||
|
96681887b8 | ||
|
f2c675cff6 | ||
|
1844bf17a7 | ||
|
47b56d6123 | ||
|
814d5f6d6c | ||
|
512d2dd5f8 | ||
|
ad20d5ac1c | ||
|
b79ff05e29 | ||
|
32b50f8188 | ||
|
a44096a26d | ||
|
32cf94fc74 | ||
|
39e28cd80a | ||
|
9b1356ad01 | ||
|
45521b377a | ||
|
46a366635c | ||
|
384b588a2d | ||
|
03e5a78d8f | ||
|
e32c76b059 | ||
|
ba7782f867 | ||
|
c5091b0e49 | ||
|
b54e32f0ca | ||
|
328e7738d5 | ||
|
aca359c2e2 | ||
|
1cd40dc9e5 | ||
|
dc9407281f | ||
|
8f727c1f35 | ||
|
a08ea28feb | ||
|
c5535dd9a0 | ||
|
3519cc5e44 | ||
|
d251bbbb05 | ||
|
1fc3ec9029 | ||
|
1877128fc4 | ||
|
76c5b4a06b | ||
|
9cd674b528 | ||
|
5aea7eb860 | ||
|
ec30df2020 | ||
|
bb33427c79 | ||
|
28a16cebda | ||
|
49c79405bc | ||
|
3098f32a07 | ||
|
b1e9c05bee | ||
|
69e06d89ff | ||
|
56b04017e7 | ||
|
3af61c031d | ||
|
f42a57088f | ||
|
4834452875 | ||
|
c5b9d99b48 | ||
|
f8d4c36f6d | ||
|
fa9ecbdb81 | ||
|
2bf6b29b8f | ||
|
25812f88f6 | ||
|
037989b8e4 | ||
|
dbc82cfb6a | ||
|
941a766aa3 | ||
|
72be7ad5f0 | ||
|
b2a36f71a0 | ||
|
6b17620389 | ||
|
b04195041a | ||
|
8ca4c66e3c | ||
|
449b7bf6e4 | ||
|
88e4b6390c | ||
|
de92bd1884 | ||
|
1b276a74fe | ||
|
fff31558a0 | ||
|
8084c4b2a3 | ||
|
1ca852191f | ||
|
af5fc6547a | ||
|
70f2b26c86 | ||
|
cfd95dbe87 | ||
|
c5bb404d6a | ||
|
4c23964964 | ||
|
e4a6dcd35c | ||
|
d4addd53ad | ||
|
d21298c156 | ||
|
9dba9fb366 | ||
|
09d40679c0 | ||
|
a323b126e5 | ||
|
553aad6ed2 | ||
|
5ca2ad6148 | ||
|
40537e1522 | ||
|
064a75b21b | ||
|
1d3445bc0f | ||
|
79232d02c9 | ||
|
9bd4986781 | ||
|
c3d443aaff | ||
|
b087888f94 | ||
|
da3dfd0998 | ||
|
4468d81472 | ||
|
7ddae8f2eb | ||
|
6b2851a671 | ||
|
4763969c8f | ||
|
f9b75e486c | ||
|
f868052062 | ||
|
61f0cbe10a | ||
|
0f4968d75c | ||
|
3077c22e4f | ||
|
0decc37b5a | ||
|
4a953b66e0 | ||
|
8435eeed4d | ||
|
cffb4de83b | ||
|
55175087c4 | ||
|
0ddfa8ad10 | ||
|
b74fb3f179 | ||
|
9d3d33b6a2 | ||
|
2abaa60c2e | ||
|
d5547f5c7c | ||
|
061510098c | ||
|
4fc3546887 | ||
|
868948509a | ||
|
e71492a2b3 | ||
|
c2055f3514 | ||
|
2809579dd7 | ||
|
c7afe3e9a4 | ||
|
cd026cd865 | ||
|
bc705aac03 | ||
|
a7c73036f0 | ||
|
e80d27f273 | ||
|
23adbf9540 | ||
|
a7347238e6 | ||
|
bf55666492 | ||
|
8ab0b0e460 | ||
|
b8e1849cec | ||
|
2255d05664 | ||
|
45a8eda49b | ||
|
b1207949ac | ||
|
d61fd01d61 | ||
|
4d5d3108fb | ||
|
e422f4154f | ||
|
0651f1bcd4 | ||
|
11f0ade921 | ||
|
5cab2e1c43 | ||
|
6381bdbf33 | ||
|
c8bcd4af2d | ||
|
cabb06230c | ||
|
4a75566a3b | ||
|
d9c924b472 | ||
|
52aa52c3b1 | ||
|
d254d6075a | ||
|
8004080643 | ||
|
13878974a6 | ||
|
6689e00e6a | ||
|
9b8a108f86 | ||
|
a3569d88c9 | ||
|
f2f6b6ede9 | ||
|
7f81cfd45a | ||
|
13b8e1a2ae | ||
|
36dbdf955d | ||
|
714b6c98ef | ||
|
7181d7ae6a | ||
|
9422d57283 | ||
|
5724cec1e5 | ||
|
35ca54d98f | ||
|
7398a9ebf9 | ||
|
4188fb536e | ||
|
a11e5e2deb | ||
|
5dd7e8c4d9 | ||
|
40bc390c3b | ||
|
8776348a80 | ||
|
49fb4421dd | ||
|
d232f08933 | ||
|
feba4da0b8 | ||
|
928d7186b5 | ||
|
3149a95d6a | ||
|
ed7ebfd58d | ||
|
808e310799 | ||
|
23fbb3cf22 | ||
|
c11e643f13 | ||
|
f5541860bc | ||
|
83ff0055b6 | ||
|
213f23aaf6 | ||
|
9970f3786f | ||
|
d01a568b8b | ||
|
e397884e1e | ||
|
ef65bac79b | ||
|
c0837c726f | ||
|
ce5429aba7 | ||
|
aa191e9202 | ||
|
a8f0283e93 | ||
|
999e47a26c | ||
|
a8103ca22d | ||
|
ef17f4913b | ||
|
1a30043776 | ||
|
b7e8b91eec | ||
|
3be1904653 | ||
|
cadd0e4244 | ||
|
f35ea9a3ca | ||
|
0971630e01 | ||
|
b60a9fa371 | ||
|
ab45360808 | ||
|
8523c0da32 | ||
|
f0dfa4d53d | ||
|
f2bedddce4 | ||
|
552138e851 | ||
|
ff71379a8e | ||
|
23a7feeb6e | ||
|
34f5db9ed4 | ||
|
9fd70cbdf9 | ||
|
87506852d1 | ||
|
e6893bc419 | ||
|
ebeab06710 | ||
|
1911c3690d | ||
|
3f3254a4df | ||
|
20ccb7b558 | ||
|
8cfe89604a | ||
|
15f0ba839f | ||
|
c400b914e5 | ||
|
a431c829cb | ||
|
25d0e125e5 | ||
|
b89a78ce17 | ||
|
f568012b9d | ||
|
328fb70e54 | ||
|
2f24e5ceb7 | ||
|
97809277df | ||
|
3d3e7a330c | ||
|
adf2275018 | ||
|
c1ecfbfe63 | ||
|
a7e7680789 | ||
|
382c19024f | ||
|
9379487942 | ||
|
5e99baf7b9 | ||
|
31951dae4c | ||
|
b1ab7f46e0 | ||
|
4e0c7ed5a6 | ||
|
2ef76d5e31 | ||
|
d3dda443cd | ||
|
ec3802c180 | ||
|
76654c7856 | ||
|
90239bb969 | ||
|
a6e4b0fea5 | ||
|
8ad37af70e | ||
|
c102d359f8 | ||
|
a07891e3e6 | ||
|
e946a35b18 | ||
|
68d7337f98 | ||
|
8b4198fd8d | ||
|
9c795fa40d | ||
|
5f2217a079 | ||
|
0cda4b7f23 | ||
|
e17e69604c | ||
|
e1bf4d4344 | ||
|
0c8a32b9ae | ||
|
d8c60aaae7 | ||
|
3216d65353 | ||
|
0df8d0a4e2 | ||
|
43f19034b9 | ||
|
2e6a937240 | ||
|
05d8a7f6be | ||
|
ceaf4bc6bc | ||
|
f2cc2f433c | ||
|
858aa6255c | ||
|
1d8ea9e869 | ||
|
8ead0b5483 | ||
|
72c081cfd6 | ||
|
1f701c94a7 | ||
|
acfc49fd41 | ||
|
9c29bd627e | ||
|
d24b141d8b | ||
|
bac9e005ae | ||
|
60bcca54eb | ||
|
eccea8a911 | ||
|
0833d0d8d5 | ||
|
55fce489bc | ||
|
2ce40c3c07 | ||
|
255cfb7d62 | ||
|
8a327be400 | ||
|
1cae76b443 | ||
|
828e8d2ae4 | ||
|
ce35509a40 | ||
|
24b8b4e904 | ||
|
c2c1dc7469 | ||
|
47c1071bb8 | ||
|
e401651f09 | ||
|
1449def040 | ||
|
d4d1ec2568 | ||
|
7455ac9bfb | ||
|
b6b1dcb275 | ||
|
2e4f49a223 | ||
|
88d4108f92 | ||
|
c9f3c02cb7 | ||
|
557372b312 | ||
|
18b24535fe | ||
|
20e0bca71d | ||
|
6977f655e3 | ||
|
e6fc9dc75d | ||
|
b57b3f4588 | ||
|
776ae92416 | ||
|
6ba53c1ac0 | ||
|
699d419c71 | ||
|
d4870b3854 | ||
|
e49a9096a5 | ||
|
33148ba832 | ||
|
bca23dd896 | ||
|
3a483a1b20 | ||
|
8e82bf69d8 | ||
|
284f9e3f2f | ||
|
5ffc3561ed | ||
|
250b445eee | ||
|
1a275ba184 | ||
|
187848660c | ||
|
c3f391dc5a | ||
|
143e75d213 | ||
|
d9d3caec22 | ||
|
7159ecb632 | ||
|
11f3425876 | ||
|
f390671018 | ||
|
512ec7fb87 | ||
|
5ecfc58e5f | ||
|
0541d8f1c5 | ||
|
f2b3752f3d | ||
|
36945a67d0 | ||
|
2c8dcd86e5 |
@@ -9,7 +9,7 @@ end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = space
|
||||
indent_size = tab
|
||||
tab_width = 2
|
||||
tab_width = 4
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -3,6 +3,11 @@
|
||||
*.swp
|
||||
__pycache__
|
||||
.cache
|
||||
.pytest_cache
|
||||
.tox
|
||||
.eggs
|
||||
*.egg-info
|
||||
|
||||
|
||||
# Created by https://www.gitignore.io/api/jetbrains+iml
|
||||
|
||||
|
6
.idea/codeStyles/Project.xml
generated
6
.idea/codeStyles/Project.xml
generated
@@ -1,11 +1,5 @@
|
||||
<component name="ProjectCodeStyleConfiguration">
|
||||
<code_scheme name="Project" version="173">
|
||||
<option name="OTHER_INDENT_OPTIONS">
|
||||
<value>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</value>
|
||||
</option>
|
||||
<MarkdownNavigatorCodeStyleSettings>
|
||||
<option name="RIGHT_MARGIN" value="72" />
|
||||
</MarkdownNavigatorCodeStyleSettings>
|
||||
|
@@ -1,3 +1,6 @@
|
||||
linters:
|
||||
shellcheck:
|
||||
shell: bash
|
||||
phpcs:
|
||||
csslint:
|
||||
flake8:
|
||||
|
@@ -7,4 +7,6 @@ python:
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
|
||||
script: py.test -vv
|
||||
script:
|
||||
# tox.ini handles setup, ordering of docker build first, and then run tests
|
||||
- tox
|
||||
|
126
README.md
126
README.md
@@ -3,7 +3,7 @@
|
||||
<b>Network-wide ad blocking via your own Linux hardware</b><br/>
|
||||
</p>
|
||||
|
||||
The Pi-hole is a [DNS sinkhole](https://en.wikipedia.org/wiki/DNS_Sinkhole) that protects your devices from unwanted content, without installing any client-side software.
|
||||
The Pi-hole[®](https://pi-hole.net/trademark-rules-and-brand-guidelines/) is a [DNS sinkhole](https://en.wikipedia.org/wiki/DNS_Sinkhole) that protects your devices from unwanted content, without installing any client-side software.
|
||||
|
||||
- **Easy-to-install**: our versatile installer walks you through the process, and [takes less than ten minutes](https://www.youtube.com/watch?v=vKWjx1AQYgs)
|
||||
- **Resolute**: content is blocked in _non-browser locations_, such as ad-laden mobile apps and smart TVs
|
||||
@@ -17,17 +17,17 @@ The Pi-hole is a [DNS sinkhole](https://en.wikipedia.org/wiki/DNS_Sinkhole) that
|
||||
- **Free**: open source software which helps ensure _you_ are the sole person in control of your privacy
|
||||
|
||||
-----
|
||||
<a href="https://www.codacy.com/app/Pi-hole/pi-hole?utm_source=github.com&utm_medium=referral&utm_content=pi-hole/pi-hole&utm_campaign=Badge_Grade"><img src="https://api.codacy.com/project/badge/Grade/c558a0f8d7124c99b02b84f0f5564238" alt="Codacy Grade"/></a>
|
||||
<a href="https://travis-ci.org/pi-hole/pi-hole"><img src="https://travis-ci.org/pi-hole/pi-hole.svg?branch=development" alt="Travis Build Status"/></a>
|
||||
<a href="https://www.bountysource.com/trackers/3011939-pi-hole-pi-hole?utm_source=3011939&utm_medium=shield&utm_campaign=TRACKER_BADGE"><img src="https://www.bountysource.com/badge/tracker?tracker_id=3011939" alt="BountySource"/></a>
|
||||
[](https://www.codacy.com/app/Pi-hole/pi-hole?utm_source=github.com&utm_medium=referral&utm_content=pi-hole/pi-hole&utm_campaign=Badge_Grade)
|
||||
[](https://travis-ci.org/pi-hole/pi-hole)
|
||||
[](https://www.bountysource.com/trackers/3011939-pi-hole-pi-hole?utm_source=3011939&utm_medium=shield&utm_campaign=TRACKER_BADGE)
|
||||
|
||||
## One-Step Automated Install
|
||||
Those who want to get started quickly and conveniently, may install Pi-hole using the following command:
|
||||
Those who want to get started quickly and conveniently may install Pi-hole using the following command:
|
||||
|
||||
#### `curl -sSL https://install.pi-hole.net | bash`
|
||||
|
||||
## Alternative Install Methods
|
||||
[Piping to `bash` is controversial](https://pi-hole.net/2016/07/25/curling-and-piping-to-bash), as it prevents you from [reading code that is about to run](https://github.com/pi-hole/pi-hole/blob/master/automated%20install/basic-install.sh) on your system. Therefore, we provide these alternative installation methods which allow code review before installation:
|
||||
[Piping to `bash` is controversial](https://pi-hole.net/2016/07/25/curling-and-piping-to-bash), as it prevents you from [reading code that is about to run](https://github.com/pi-hole/pi-hole/blob/master/automated%20install/basic-install.sh) on your system. Therefore, we provide these alternative installation methods which allow code review before installation:
|
||||
|
||||
### Method 1: Clone our repository and run
|
||||
```
|
||||
@@ -46,30 +46,32 @@ sudo bash basic-install.sh
|
||||
|
||||
Once the installer has been run, you will need to [configure your router to have **DHCP clients use Pi-hole as their DNS server**](https://discourse.pi-hole.net/t/how-do-i-configure-my-devices-to-use-pi-hole-as-their-dns-server/245) which ensures that all devices connecting to your network will have content blocked without any further intervention.
|
||||
|
||||
If your router does not support setting the DNS server, you can [use Pi-hole's built in DHCP server](https://discourse.pi-hole.net/t/how-do-i-use-pi-holes-built-in-dhcp-server-and-why-would-i-want-to/3026); just be sure to disable DHCP on your router first (if it has that feature available).
|
||||
If your router does not support setting the DNS server, you can [use Pi-hole's built-in DHCP server](https://discourse.pi-hole.net/t/how-do-i-use-pi-holes-built-in-dhcp-server-and-why-would-i-want-to/3026); just be sure to disable DHCP on your router first (if it has that feature available).
|
||||
|
||||
As a last resort, you can always manually set each device to use Pi-hole as their DNS server.
|
||||
|
||||
-----
|
||||
|
||||
## Pi-hole is free, but powered by your support
|
||||
There are many reoccurring costs involved with maintaining free, open source, and privacy respecting software; expenses which [our volunteer developers](https://github.com/orgs/pi-hole/people) pitch in to cover out-of-pocket. This is just one example of how strongly we feel about our software, as well as the importance of keeping it maintained.
|
||||
There are many reoccurring costs involved with maintaining free, open source, and privacy-respecting software; expenses which [our volunteer developers](https://github.com/orgs/pi-hole/people) pitch in to cover out-of-pocket. This is just one example of how strongly we feel about our software, as well as the importance of keeping it maintained.
|
||||
|
||||
Make no mistake: **your support is absolutely vital to help keep us innovating!**
|
||||
|
||||
### Donations
|
||||
Sending a donation using our links below is **extremely helpful** in offsetting a portion of our monthly expenses:
|
||||
|
||||
<img src="https://pi-hole.github.io/graphics/Badges/paypal-badge-black.svg" width="24" height="24" alt="PP"/> <a href="https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=3J2L3Z4DHW9UY">Donate via PayPal</a><br/>
|
||||
<img src="https://pi-hole.github.io/graphics/Badges/bitcoin-badge-black.svg" width="24" height="24" alt="BTC"/> Bitcoin Address: <code>1GKnevUnVaQM2pQieMyeHkpr8DXfkpfAtL</code>
|
||||
- <img src="https://pi-hole.github.io/graphics/Badges/paypal-badge-black.svg" width="24" height="24" alt="PP"/> <a href="https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=3J2L3Z4DHW9UY">Donate via PayPal</a><br/>
|
||||
- <img src="https://pi-hole.github.io/graphics/Badges/bitcoin-badge-black.svg" width="24" height="24" alt="BTC"/> [Bitcoin, Bitcoin Cash, Ethereum, Litecoin](https://commerce.coinbase.com/checkout/dd304d04-f324-4a77-931b-0db61c77a41b)
|
||||
|
||||
### Alternative support
|
||||
If you'd rather not donate (_which is okay!_), there are other ways you can help support us:
|
||||
|
||||
- [Digital Ocean](http://www.digitalocean.com/?refcode=344d234950e1) affiliate link
|
||||
- [Vultr](http://www.vultr.com/?ref=7190426) affiliate link
|
||||
- [UNIXstickers.com](http://unixstickers.refr.cc/jacobs) affiliate link
|
||||
- [Pi-hole Swag Store](https://pi-hole.net/shop/)
|
||||
If you'd rather not [donate](https://pi-hole.net/donate/) (_which is okay!_), there are other ways you can help support us:
|
||||
- [Patreon](https://patreon.com/pihole) _Become a patron for rewards_
|
||||
- [Digital Ocean](http://www.digitalocean.com/?refcode=344d234950e1) _affiliate link_
|
||||
- [Stickermule](https://www.stickermule.com/unlock?ref_id=9127301701&utm_medium=link&utm_source=invite) _earn a $10 credit after your first purchase_
|
||||
- [Pi-hole Swag Store](https://pi-hole.net/shop/) _affiliate link_
|
||||
- [Amazon](http://www.amazon.com/exec/obidos/redirect-home/pihole09-20) _affiliate link_
|
||||
- [DNS Made Easy](https://cp.dnsmadeeasy.com/u/133706) _affiliate link_
|
||||
- [Vultr](http://www.vultr.com/?ref=7190426) _affiliate link_
|
||||
- Spreading the word about our software, and how you have benefited from it
|
||||
|
||||
### Contributing via GitHub
|
||||
@@ -77,7 +79,7 @@ We welcome _everyone_ to contribute to issue reports, suggest new features, and
|
||||
|
||||
If you have something to add - anything from a typo through to a whole new feature, we're happy to check it out! Just make sure to fill out our template when submitting your request; the questions that it asks will help the volunteers quickly understand what you're aiming to achieve.
|
||||
|
||||
You'll find that the [install script](https://github.com/pi-hole/pi-hole/blob/master/automated%20install/basic-install.sh) and the [debug script](https://github.com/pi-hole/pi-hole/blob/master/advanced/Scripts/piholeDebug.sh) have an abundance of comments, which will help you better understand how Pi-hole works. They're also a valuable resource to those who want to learn how to write scripts or code a program! We encourage anyone who likes to tinker to read through it, and submit a pull request for us to review.
|
||||
You'll find that the [install script](https://github.com/pi-hole/pi-hole/blob/master/automated%20install/basic-install.sh) and the [debug script](https://github.com/pi-hole/pi-hole/blob/master/advanced/Scripts/piholeDebug.sh) have an abundance of comments, which will help you better understand how Pi-hole works. They're also a valuable resource to those who want to learn how to write scripts or code a program! We encourage anyone who likes to tinker to read through it and submit a pull request for us to review.
|
||||
|
||||
### Presentations about Pi-hole
|
||||
Word-of-mouth continues to help our project grow immensely, and so we are helping make this easier for people.
|
||||
@@ -93,9 +95,6 @@ While we are primarily reachable on our <a href="https://discourse.pi-hole.net/"
|
||||
<li><a href="https://discourse.pi-hole.net/c/faqs">Frequently Asked Questions</a></li>
|
||||
<li><a href="https://github.com/pi-hole/pi-hole/wiki">Pi-hole Wiki</a></li>
|
||||
<li><a href="https://discourse.pi-hole.net/c/feature-requests?order=votes">Feature Requests</a></li>
|
||||
</ul>
|
||||
<br/>
|
||||
<ul>
|
||||
<li><a href="https://discourse.pi-hole.net/">Discourse User Forum</a></li>
|
||||
<li><a href="https://www.reddit.com/r/pihole/">Reddit</a></li>
|
||||
<li><a href="https://gitter.im/pi-hole/pi-hole">Gitter</a> (Real-time chat)</li>
|
||||
@@ -108,7 +107,7 @@ While we are primarily reachable on our <a href="https://discourse.pi-hole.net/"
|
||||
|
||||
## Breakdown of Features
|
||||
### The Command Line Interface
|
||||
The `pihole` command has all the functionality necessary to be able to fully administer the Pi-hole, without the need of the Web Interface. It's fast, user-friendly, and auditable by anyone with understanding of `bash`.
|
||||
The `pihole` command has all the functionality necessary to be able to fully administer the Pi-hole, without the need of the Web Interface. It's fast, user-friendly, and auditable by anyone with an understanding of `bash`.
|
||||
|
||||
<a href="https://pi-hole.github.io/graphics/Screenshots/blacklist-cli.gif"><img src="https://pi-hole.github.io/graphics/Screenshots/blacklist-cli.gif" alt="Pi-hole Blacklist Demo"/></a>
|
||||
|
||||
@@ -127,7 +126,7 @@ You can read our [Core Feature Breakdown](https://github.com/pi-hole/pi-hole/wik
|
||||
### The Web Interface Dashboard
|
||||
This [optional dashboard](https://github.com/pi-hole/AdminLTE) allows you to view stats, change settings, and configure your Pi-hole. It's the power of the Command Line Interface, with none of the learning curve!
|
||||
|
||||
<a href="https://pi-hole.github.io/graphics/Screenshots/dashboard.png"><img src="https://pi-hole.github.io/graphics/Screenshots/dashboard.png" width="888" height="522" alt="Pi-hole Dashboard"/></a>
|
||||
<img src="https://pi-hole.github.io/graphics/Screenshots/pihole-dashboard.png" alt="Pi-hole Dashboard"/></a>
|
||||
|
||||
Some notable features include:
|
||||
* Mobile friendly interface
|
||||
@@ -135,18 +134,18 @@ Some notable features include:
|
||||
* Detailed graphs and doughnut charts
|
||||
* Top lists of domains and clients
|
||||
* A filterable and sortable query log
|
||||
* Long Term Statistics to view data over user defined time ranges
|
||||
* Long Term Statistics to view data over user-defined time ranges
|
||||
* The ability to easily manage and configure Pi-hole features
|
||||
* ... and all the main features of the Command Line Interface!
|
||||
|
||||
There are several ways to [access the dashboard](https://discourse.pi-hole.net/t/how-do-i-access-pi-holes-dashboard-admin-interface/3168):
|
||||
|
||||
1. `http://<IP_ADDPRESS_OF_YOUR_PI_HOLE>/admin/`
|
||||
2. `http:/pi.hole/admin/` (when using Pi-hole as your DNS server)
|
||||
2. `http://pi.hole/admin/` (when using Pi-hole as your DNS server)
|
||||
3. `http://pi.hole/` (when using Pi-hole as your DNS server)
|
||||
|
||||
## The Faster-Than-Light Engine
|
||||
The [FTL Engine](https://github.com/pi-hole/FTL) is a lightweight, purpose-built daemon used to provide statistics needed for the Web Interface, and its API can be easily integrated into your own projects. As the name implies, FTL does this all *very quickly*!
|
||||
## Faster-than-light Engine
|
||||
FTLDNS is a lightweight, purpose-built daemon used to provide statistics needed for the Web Interface, and its API can be easily integrated into your own projects. As the name implies, FTLDNS does this all *very quickly*!
|
||||
|
||||
Some of the statistics you can integrate include:
|
||||
* Total number of domains being blocked
|
||||
@@ -163,55 +162,52 @@ The API can be accessed via [`telnet`](https://github.com/pi-hole/FTL), the Web
|
||||
-----
|
||||
|
||||
## The Origin Of Pi-hole
|
||||
Pi-hole being a **advertising-aware DNS/Web server**, makes use of the following technologies:
|
||||
Pi-hole being an **advertising-aware DNS/Web server**, makes use of the following technologies:
|
||||
|
||||
* [`dnsmasq`](http://www.thekelleys.org.uk/dnsmasq/doc.html) - a lightweight DNS and DHCP server
|
||||
* [`curl`](https://curl.haxx.se) - A command line tool for transferring data with URL syntax
|
||||
* [`lighttpd`](https://www.lighttpd.net) - webserver designed and optimized for high performance
|
||||
* [`lighttpd`](https://www.lighttpd.net) - web server designed and optimized for high performance
|
||||
* [`php`](https://secure.php.net) - a popular general-purpose web scripting language
|
||||
* [AdminLTE Dashboard](https://github.com/almasaeed2010/AdminLTE) - premium admin control panel based on Bootstrap 3.x
|
||||
|
||||
While quite outdated at this point, [this original blog post about Pi-hole](https://jacobsalmela.com/2015/06/16/block-millions-ads-network-wide-with-a-raspberry-pi-hole-2-0/) goes into **great detail** about how Pi-hole was originally setup and how it works. Syntactically, it's no longer accurate, but the same basic principles and logic still apply to Pi-hole's current state.
|
||||
While quite outdated at this point, [this original blog post about Pi-hole](https://jacobsalmela.com/2015/06/16/block-millions-ads-network-wide-with-a-raspberry-pi-hole-2-0/) goes into **great detail** about how Pi-hole was originally set up and how it works. Syntactically, it's no longer accurate, but the same basic principles and logic still apply to Pi-hole's current state.
|
||||
|
||||
-----
|
||||
|
||||
## Coverage
|
||||
- [Lifehacker: Turn A Raspberry Pi Into An Ad Blocker With A Single Command](https://www.lifehacker.com.au/2015/02/turn-a-raspberry-pi-into-an-ad-blocker-with-a-single-command/) (February, 2015)
|
||||
- [MakeUseOf: Adblock Everywhere: The Raspberry Pi-Hole Way](http://www.makeuseof.com/tag/adblock-everywhere-raspberry-pi-hole-way/) (March, 2015)
|
||||
- [Catchpoint: Ad-Blocking on Apple iOS9: Valuing the End User Experience](http://blog.catchpoint.com/2015/09/14/ad-blocking-apple/) (September, 2015)
|
||||
- [Security Now Netcast: Pi-hole](https://www.youtube.com/watch?v=p7-osq_y8i8&t=100m26s) (October, 2015)
|
||||
- [TekThing: Raspberry Pi-Hole Makes Ads Disappear!](https://youtu.be/8Co59HU2gY0?t=2m) (December, 2015)
|
||||
- [Foolish Tech Show](https://youtu.be/bYyena0I9yc?t=2m4s) (December, 2015)
|
||||
- [Block Ads on All Home Devices for $53.18](https://medium.com/@robleathern/block-ads-on-all-home-devices-for-53-18-a5f1ec139693#.gj1xpgr5d) (December, 2015)
|
||||
- [Pi-Hole for Ubuntu 14.04](http://www.boyter.org/2015/12/pi-hole-ubuntu-14-04/) (December, 2015)
|
||||
- [MacObserver Podcast 585](https://www.macobserver.com/tmo/podcast/macgeekgab-585) (December, 2015)
|
||||
- [The Defrag Show: Endoscope USB Camera, The Final [HoloLens] Vote, Adblock Pi and more](https://channel9.msdn.com/Shows/The-Defrag-Show/Defrag-Endoscope-USB-Camera-The-Final-HoloLens-Vote-Adblock-Pi-and-more?WT.mc_id=dlvr_twitter_ch9#time=20m39s) (January, 2016)
|
||||
- [Adafruit: Pi-hole is a black hole for internet ads](https://blog.adafruit.com/2016/03/04/pi-hole-is-a-black-hole-for-internet-ads-piday-raspberrypi-raspberry_pi/) (March, 2016)
|
||||
- [Digital Trends: 5 Fun, Easy Projects You Can Try With a $35 Raspberry Pi](https://youtu.be/QwrKlyC2kdM?t=1m42s) (March, 2016)
|
||||
- [Adafruit: Raspberry Pi Quick Look at Pi Hole ad blocking server with Tony D](https://www.youtube.com/watch?v=eg4u2j1HYlI) (June, 2016)
|
||||
- [Devacron: OrangePi Zero as an Ad-Block server with Pi-Hole](http://www.devacron.com/orangepi-zero-as-an-ad-block-server-with-pi-hole/) (December, 2016)
|
||||
- [Linux Pro: The Hole Truth](http://www.linuxpromagazine.com/Issues/2017/200/The-sysadmin-s-daily-grind-Pi-hole) (July, 2017)
|
||||
- [Adafruit: installing Pi-hole on a Pi Zero W](https://learn.adafruit.com/pi-hole-ad-blocker-with-pi-zero-w/install-pi-hole) (August, 2017)
|
||||
- [CryptoAUSTRALIA: How We Tried 5 Privacy Focused Raspberry Pi Projects](https://blog.cryptoaustralia.org.au/2017/10/05/5-privacy-focused-raspberry-pi-projects/) (October, 2017)
|
||||
- [CryptoAUSTRALIA: Pi-hole Workshop](https://blog.cryptoaustralia.org.au/2017/11/02/pi-hole-network-wide-ad-blocker/) (November, 2017)
|
||||
- [Know How 355: Killing ads with a Raspberry Pi-Hole!](https://www.twit.tv/shows/know-how/episodes/355) (November, 2017)
|
||||
- [Hobohouse: Block Advertising on your Network with Pi-hole and Raspberry Pi](https://hobo.house/2018/02/27/block-advertising-with-pi-hole-and-raspberry-pi/) (March, 2018)
|
||||
- [Scott Helme: Securing DNS across all of my devices with Pi-Hole + DNS-over-HTTPS + 1.1.1.1](https://scotthelme.co.uk/securing-dns-across-all-of-my-devices-with-pihole-dns-over-https-1-1-1-1/) (April, 2018)
|
||||
- [Scott Helme: Catching and dealing with naughty devices on my home network](https://scotthelme.co.uk/catching-naughty-devices-on-my-home-network/) (April, 2018)
|
||||
- [Bloomberg Business Week: Brotherhood of the Ad blockers](https://www.bloomberg.com/news/features/2018-05-10/inside-the-brotherhood-of-pi-hole-ad-blockers) (May, 2018)
|
||||
- [Software Engineering Daily: Interview with the creator of Pi-hole](https://softwareengineeringdaily.com/2018/05/29/pi-hole-ad-blocker-hardware-with-jacob-salmela/) (May, 2018)
|
||||
- [Raspberry Pi: Block ads at home using Pi-hole and a Raspberry Pi](https://www.raspberrypi.org/blog/pi-hole-raspberry-pi/) (July, 2018)
|
||||
- [Troy Hunt: Mmm... Pi-hole...](https://www.troyhunt.com/mmm-pi-hole/) (September, 2018)
|
||||
- [PEBKAK Podcast: Interview With Jacob Salmela](https://www.jerseystudios.net/2018/10/11/150-pi-hole/) (October, 2018)
|
||||
|
||||
-----
|
||||
|
||||
## Pi-hole Projects
|
||||
- [The Big Blocklist Collection](https://wally3k.github.io)
|
||||
- [Docker Pi-hole container (x86 and ARM)](https://hub.docker.com/r/diginc/pi-hole/)
|
||||
- [Pi-Hole in the cloud](http://blog.codybunch.com/2015/07/28/Pi-Hole-in-the-cloud/)
|
||||
- [Pie in the Sky-Hole [A Pi-Hole in the cloud for ad-blocking via DNS]](https://dlaa.me/blog/post/skyhole)
|
||||
- [Pi-hole Enable/Disable Button](http://thetimmy.silvernight.org/pages/endisbutton/)
|
||||
- [Minibian Pi-hole](https://munkjensen.net/wiki/index.php/See_my_Pi-Hole#Minibian_Pi-hole)
|
||||
- [CHiP-hole: Network-wide Ad-blocker](https://www.hackster.io/jacobsalmela/chip-hole-network-wide-ad-blocker-98e037)
|
||||
- [Chrome Extension: Pi-Hole List Editor](https://chrome.google.com/webstore/detail/pi-hole-list-editor/hlnoeoejkllgkjbnnnhfolapllcnaglh) ([Source Code](https://github.com/packtloss/pihole-extension))
|
||||
- [Splunk: Pi-hole Visualiser](https://splunkbase.splunk.com/app/3023/)
|
||||
- [Adblocking with Pi-hole and Ubuntu 14.04 on VirtualBox](https://hbalagtas.blogspot.com.au/2016/02/adblocking-with-pi-hole-and-ubuntu-1404.html)
|
||||
- [Pi-hole stats in your Mac's menu bar](https://getbitbar.com/plugins/Network/pi-hole.1m.py)
|
||||
- [Pi-hole unRAID Template](https://forums.lime-technology.com/topic/36810-support-spants-nodered-mqtt-dashing-couchdb/)
|
||||
- [Pie in the Sky-Hole](https://dlaa.me/blog/post/skyhole)
|
||||
- [Copernicus: Windows Tray Application](https://github.com/goldbattle/copernicus)
|
||||
- [Let your blink1 device blink when Pi-hole filters ads](https://gist.github.com/elpatron68/ec0b4c582e5abf604885ac1e068d233f)
|
||||
- [Pi-hole metrics](https://github.com/nlamirault/pihole_exporter) exporter for [Prometheus](https://prometheus.io/)
|
||||
- [Magic Mirror with DNS Filtering](https://zonksec.com/blog/magic-mirror-dns-filtering/#dnssoftware)
|
||||
- [Pi-hole Droid: Android client](https://github.com/friimaind/pi-hole-droid)
|
||||
- [Windows DNS Swapper](https://github.com/roots84/DNS-Swapper), see [#1400](https://github.com/pi-hole/pi-hole/issues/1400)
|
||||
-----
|
||||
|
||||
## Coverage
|
||||
- [Lifehacker: Turn A Raspberry Pi Into An Ad Blocker With A Single Command](https://www.lifehacker.com.au/2015/02/turn-a-raspberry-pi-into-an-ad-blocker-with-a-single-command/)
|
||||
- [MakeUseOf: Adblock Everywhere: The Raspberry Pi-Hole Way](http://www.makeuseof.com/tag/adblock-everywhere-raspberry-pi-hole-way/)
|
||||
- [Catchpoint: Ad-Blocking on Apple iOS9: Valuing the End User Experience](http://blog.catchpoint.com/2015/09/14/ad-blocking-apple/)
|
||||
- [Security Now Netcast: Pi-hole](https://www.youtube.com/watch?v=p7-osq_y8i8&t=100m26s)
|
||||
- [TekThing: Raspberry Pi-Hole Makes Ads Disappear!](https://youtu.be/8Co59HU2gY0?t=2m)
|
||||
- [Foolish Tech Show](https://youtu.be/bYyena0I9yc?t=2m4s)
|
||||
- [Block Ads on All Home Devices for $53.18](https://medium.com/@robleathern/block-ads-on-all-home-devices-for-53-18-a5f1ec139693#.gj1xpgr5d)
|
||||
- [Pi-Hole for Ubuntu 14.04](http://www.boyter.org/2015/12/pi-hole-ubuntu-14-04/)
|
||||
- [MacObserver Podcast 585](https://www.macobserver.com/tmo/podcast/macgeekgab-585)
|
||||
- [The Defrag Show: Endoscope USB Camera, The Final [HoloLens] Vote, Adblock Pi and more](https://channel9.msdn.com/Shows/The-Defrag-Show/Defrag-Endoscope-USB-Camera-The-Final-HoloLens-Vote-Adblock-Pi-and-more?WT.mc_id=dlvr_twitter_ch9#time=20m39s)
|
||||
- [Adafruit: Pi-hole is a black hole for internet ads](https://blog.adafruit.com/2016/03/04/pi-hole-is-a-black-hole-for-internet-ads-piday-raspberrypi-raspberry_pi/)
|
||||
- [Digital Trends: 5 Fun, Easy Projects You Can Try With a $35 Raspberry Pi](https://youtu.be/QwrKlyC2kdM?t=1m42s)
|
||||
- [Adafruit: Raspberry Pi Quick Look at Pi Hole ad blocking server with Tony D](https://www.youtube.com/watch?v=eg4u2j1HYlI)
|
||||
- [Devacron: OrangePi Zero as an Ad-Block server with Pi-Hole](http://www.devacron.com/orangepi-zero-as-an-ad-block-server-with-pi-hole/)
|
||||
- [Linux Pro: The Hole Truth](http://www.linuxpromagazine.com/Issues/2017/200/The-sysadmin-s-daily-grind-Pi-hole)
|
||||
- [CryptoAUSTRALIA: How We Tried 5 Privacy Focused Raspberry Pi Projects](https://blog.cryptoaustralia.org.au/2017/10/05/5-privacy-focused-raspberry-pi-projects/)
|
||||
- [CryptoAUSTRALIA: Pi-hole Workshop](https://blog.cryptoaustralia.org.au/2017/11/02/pi-hole-network-wide-ad-blocker/)
|
||||
- [Know How 355: Killing ads with a Raspberry Pi-Hole!](https://www.twit.tv/shows/know-how/episodes/355)
|
||||
- [Windows DNS Swapper](https://github.com/roots84/DNS-Swapper)
|
||||
|
@@ -1,23 +0,0 @@
|
||||
# The below list amalgamates several lists we used previously.
|
||||
# See `https://github.com/StevenBlack/hosts` for details
|
||||
##StevenBlack's list
|
||||
https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts
|
||||
|
||||
##MalwareDomains
|
||||
https://mirror1.malwaredomains.com/files/justdomains
|
||||
|
||||
##Cameleon
|
||||
http://sysctl.org/cameleon/hosts
|
||||
|
||||
##Zeustracker
|
||||
https://zeustracker.abuse.ch/blocklist.php?download=domainblocklist
|
||||
|
||||
##Disconnect.me Tracking
|
||||
https://s3.amazonaws.com/lists.disconnect.me/simple_tracking.txt
|
||||
|
||||
##Disconnect.me Ads
|
||||
https://s3.amazonaws.com/lists.disconnect.me/simple_ad.txt
|
||||
|
||||
##Hosts-file.net
|
||||
https://hosts-file.net/ad_servers.txt
|
||||
|
@@ -1,13 +1,11 @@
|
||||
# Pi-hole: A black hole for Internet advertisements
|
||||
# (c) 2015, 2016 by Jacob Salmela
|
||||
# Network-wide ad blocking via your Raspberry Pi
|
||||
# http://pi-hole.net
|
||||
# dnsmasq config for Pi-hole
|
||||
# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
|
||||
# Network-wide ad blocking via your own hardware.
|
||||
#
|
||||
# Pi-hole is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
# Dnsmasq config for Pi-hole's FTLDNS
|
||||
#
|
||||
# This file is copyright under the latest version of the EUPL.
|
||||
# Please see LICENSE file for your rights under this license.
|
||||
|
||||
###############################################################################
|
||||
# FILE AUTOMATICALLY POPULATED BY PI-HOLE INSTALL/UPDATE PROCEDURE. #
|
||||
@@ -16,13 +14,12 @@
|
||||
# IF YOU WISH TO CHANGE THE UPSTREAM SERVERS, CHANGE THEM IN: #
|
||||
# /etc/pihole/setupVars.conf #
|
||||
# #
|
||||
# ANY OTHER CHANGES SHOULD BE MADE IN A SEPERATE CONFIG FILE #
|
||||
# OR IN /etc/dnsmasq.conf #
|
||||
# ANY OTHER CHANGES SHOULD BE MADE IN A SEPARATE CONFIG FILE #
|
||||
# WITHIN /etc/dnsmasq.d/yourname.conf #
|
||||
###############################################################################
|
||||
|
||||
addn-hosts=/etc/pihole/gravity.list
|
||||
addn-hosts=/etc/pihole/black.list
|
||||
addn-hosts=/etc/pihole/local.list
|
||||
addn-hosts=/etc/pihole/custom.list
|
||||
|
||||
domain-needed
|
||||
|
||||
@@ -39,7 +36,7 @@ interface=@INT@
|
||||
|
||||
cache-size=10000
|
||||
|
||||
log-queries=extra
|
||||
log-queries
|
||||
log-facility=/var/log/pihole.log
|
||||
|
||||
local-ttl=2
|
||||
|
File diff suppressed because it is too large
Load Diff
113
advanced/Scripts/database_migration/gravity-db.sh
Normal file
113
advanced/Scripts/database_migration/gravity-db.sh
Normal file
@@ -0,0 +1,113 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC1090
|
||||
|
||||
# Pi-hole: A black hole for Internet advertisements
|
||||
# (c) 2019 Pi-hole, LLC (https://pi-hole.net)
|
||||
# Network-wide ad blocking via your own hardware.
|
||||
#
|
||||
# Updates gravity.db database
|
||||
#
|
||||
# This file is copyright under the latest version of the EUPL.
|
||||
# Please see LICENSE file for your rights under this license.
|
||||
|
||||
readonly scriptPath="/etc/.pihole/advanced/Scripts/database_migration/gravity"
|
||||
|
||||
upgrade_gravityDB(){
|
||||
local database piholeDir auditFile version
|
||||
database="${1}"
|
||||
piholeDir="${2}"
|
||||
auditFile="${piholeDir}/auditlog.list"
|
||||
|
||||
# Get database version
|
||||
version="$(sqlite3 "${database}" "SELECT \"value\" FROM \"info\" WHERE \"property\" = 'version';")"
|
||||
|
||||
if [[ "$version" == "1" ]]; then
|
||||
# This migration script upgrades the gravity.db file by
|
||||
# adding the domain_audit table
|
||||
echo -e " ${INFO} Upgrading gravity database from version 1 to 2"
|
||||
sqlite3 "${database}" < "${scriptPath}/1_to_2.sql"
|
||||
version=2
|
||||
|
||||
# Store audit domains in database table
|
||||
if [ -e "${auditFile}" ]; then
|
||||
echo -e " ${INFO} Migrating content of ${auditFile} into new database"
|
||||
# database_table_from_file is defined in gravity.sh
|
||||
database_table_from_file "domain_audit" "${auditFile}"
|
||||
fi
|
||||
fi
|
||||
if [[ "$version" == "2" ]]; then
|
||||
# This migration script upgrades the gravity.db file by
|
||||
# renaming the regex table to regex_blacklist, and
|
||||
# creating a new regex_whitelist table + corresponding linking table and views
|
||||
echo -e " ${INFO} Upgrading gravity database from version 2 to 3"
|
||||
sqlite3 "${database}" < "${scriptPath}/2_to_3.sql"
|
||||
version=3
|
||||
fi
|
||||
if [[ "$version" == "3" ]]; then
|
||||
# This migration script unifies the formally separated domain
|
||||
# lists into a single table with a UNIQUE domain constraint
|
||||
echo -e " ${INFO} Upgrading gravity database from version 3 to 4"
|
||||
sqlite3 "${database}" < "${scriptPath}/3_to_4.sql"
|
||||
version=4
|
||||
fi
|
||||
if [[ "$version" == "4" ]]; then
|
||||
# This migration script upgrades the gravity and list views
|
||||
# implementing necessary changes for per-client blocking
|
||||
echo -e " ${INFO} Upgrading gravity database from version 4 to 5"
|
||||
sqlite3 "${database}" < "${scriptPath}/4_to_5.sql"
|
||||
version=5
|
||||
fi
|
||||
if [[ "$version" == "5" ]]; then
|
||||
# This migration script upgrades the adlist view
|
||||
# to return an ID used in gravity.sh
|
||||
echo -e " ${INFO} Upgrading gravity database from version 5 to 6"
|
||||
sqlite3 "${database}" < "${scriptPath}/5_to_6.sql"
|
||||
version=6
|
||||
fi
|
||||
if [[ "$version" == "6" ]]; then
|
||||
# This migration script adds a special group with ID 0
|
||||
# which is automatically associated to all clients not
|
||||
# having their own group assignments
|
||||
echo -e " ${INFO} Upgrading gravity database from version 6 to 7"
|
||||
sqlite3 "${database}" < "${scriptPath}/6_to_7.sql"
|
||||
version=7
|
||||
fi
|
||||
if [[ "$version" == "7" ]]; then
|
||||
# This migration script recreated the group table
|
||||
# to ensure uniqueness on the group name
|
||||
# We also add date_added and date_modified columns
|
||||
echo -e " ${INFO} Upgrading gravity database from version 7 to 8"
|
||||
sqlite3 "${database}" < "${scriptPath}/7_to_8.sql"
|
||||
version=8
|
||||
fi
|
||||
if [[ "$version" == "8" ]]; then
|
||||
# This migration fixes some issues that were introduced
|
||||
# in the previous migration script.
|
||||
echo -e " ${INFO} Upgrading gravity database from version 8 to 9"
|
||||
sqlite3 "${database}" < "${scriptPath}/8_to_9.sql"
|
||||
version=9
|
||||
fi
|
||||
if [[ "$version" == "9" ]]; then
|
||||
# This migration drops unused tables and creates triggers to remove
|
||||
# obsolete groups assignments when the linked items are deleted
|
||||
echo -e " ${INFO} Upgrading gravity database from version 9 to 10"
|
||||
sqlite3 "${database}" < "${scriptPath}/9_to_10.sql"
|
||||
version=10
|
||||
fi
|
||||
if [[ "$version" == "10" ]]; then
|
||||
# This adds timestamp and an optional comment field to the client table
|
||||
# These fields are only temporary and will be replaces by the columns
|
||||
# defined in gravity.db.sql during gravity swapping. We add them here
|
||||
# to keep the copying process generic (needs the same columns in both the
|
||||
# source and the destination databases).
|
||||
echo -e " ${INFO} Upgrading gravity database from version 10 to 11"
|
||||
sqlite3 "${database}" < "${scriptPath}/10_to_11.sql"
|
||||
version=11
|
||||
fi
|
||||
if [[ "$version" == "11" ]]; then
|
||||
# Rename group 0 from "Unassociated" to "Default"
|
||||
echo -e " ${INFO} Upgrading gravity database from version 11 to 12"
|
||||
sqlite3 "${database}" < "${scriptPath}/11_to_12.sql"
|
||||
version=12
|
||||
fi
|
||||
}
|
16
advanced/Scripts/database_migration/gravity/10_to_11.sql
Normal file
16
advanced/Scripts/database_migration/gravity/10_to_11.sql
Normal file
@@ -0,0 +1,16 @@
|
||||
.timeout 30000
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE client ADD COLUMN date_added INTEGER;
|
||||
ALTER TABLE client ADD COLUMN date_modified INTEGER;
|
||||
ALTER TABLE client ADD COLUMN comment TEXT;
|
||||
|
||||
CREATE TRIGGER tr_client_update AFTER UPDATE ON client
|
||||
BEGIN
|
||||
UPDATE client SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id;
|
||||
END;
|
||||
|
||||
UPDATE info SET value = 11 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
19
advanced/Scripts/database_migration/gravity/11_to_12.sql
Normal file
19
advanced/Scripts/database_migration/gravity/11_to_12.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
.timeout 30000
|
||||
|
||||
PRAGMA FOREIGN_KEYS=OFF;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
UPDATE "group" SET name = 'Default' WHERE id = 0;
|
||||
UPDATE "group" SET description = 'The default group' WHERE id = 0;
|
||||
|
||||
DROP TRIGGER IF EXISTS tr_group_zero;
|
||||
|
||||
CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
|
||||
BEGIN
|
||||
INSERT OR IGNORE INTO "group" (id,enabled,name,description) VALUES (0,1,'Default','The default group');
|
||||
END;
|
||||
|
||||
UPDATE info SET value = 12 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
14
advanced/Scripts/database_migration/gravity/1_to_2.sql
Normal file
14
advanced/Scripts/database_migration/gravity/1_to_2.sql
Normal file
@@ -0,0 +1,14 @@
|
||||
.timeout 30000
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE domain_audit
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
domain TEXT UNIQUE NOT NULL,
|
||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int))
|
||||
);
|
||||
|
||||
UPDATE info SET value = 2 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
65
advanced/Scripts/database_migration/gravity/2_to_3.sql
Normal file
65
advanced/Scripts/database_migration/gravity/2_to_3.sql
Normal file
@@ -0,0 +1,65 @@
|
||||
.timeout 30000
|
||||
|
||||
PRAGMA FOREIGN_KEYS=OFF;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE regex RENAME TO regex_blacklist;
|
||||
|
||||
CREATE TABLE regex_blacklist_by_group
|
||||
(
|
||||
regex_blacklist_id INTEGER NOT NULL REFERENCES regex_blacklist (id),
|
||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
||||
PRIMARY KEY (regex_blacklist_id, group_id)
|
||||
);
|
||||
|
||||
INSERT INTO regex_blacklist_by_group SELECT * FROM regex_by_group;
|
||||
DROP TABLE regex_by_group;
|
||||
DROP VIEW vw_regex;
|
||||
DROP TRIGGER tr_regex_update;
|
||||
|
||||
CREATE VIEW vw_regex_blacklist AS SELECT DISTINCT domain
|
||||
FROM regex_blacklist
|
||||
LEFT JOIN regex_blacklist_by_group ON regex_blacklist_by_group.regex_blacklist_id = regex_blacklist.id
|
||||
LEFT JOIN "group" ON "group".id = regex_blacklist_by_group.group_id
|
||||
WHERE regex_blacklist.enabled = 1 AND (regex_blacklist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
ORDER BY regex_blacklist.id;
|
||||
|
||||
CREATE TRIGGER tr_regex_blacklist_update AFTER UPDATE ON regex_blacklist
|
||||
BEGIN
|
||||
UPDATE regex_blacklist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
|
||||
END;
|
||||
|
||||
CREATE TABLE regex_whitelist
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
domain TEXT UNIQUE NOT NULL,
|
||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
comment TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE regex_whitelist_by_group
|
||||
(
|
||||
regex_whitelist_id INTEGER NOT NULL REFERENCES regex_whitelist (id),
|
||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
||||
PRIMARY KEY (regex_whitelist_id, group_id)
|
||||
);
|
||||
|
||||
CREATE VIEW vw_regex_whitelist AS SELECT DISTINCT domain
|
||||
FROM regex_whitelist
|
||||
LEFT JOIN regex_whitelist_by_group ON regex_whitelist_by_group.regex_whitelist_id = regex_whitelist.id
|
||||
LEFT JOIN "group" ON "group".id = regex_whitelist_by_group.group_id
|
||||
WHERE regex_whitelist.enabled = 1 AND (regex_whitelist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
ORDER BY regex_whitelist.id;
|
||||
|
||||
CREATE TRIGGER tr_regex_whitelist_update AFTER UPDATE ON regex_whitelist
|
||||
BEGIN
|
||||
UPDATE regex_whitelist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
|
||||
END;
|
||||
|
||||
|
||||
UPDATE info SET value = 3 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
96
advanced/Scripts/database_migration/gravity/3_to_4.sql
Normal file
96
advanced/Scripts/database_migration/gravity/3_to_4.sql
Normal file
@@ -0,0 +1,96 @@
|
||||
.timeout 30000
|
||||
|
||||
PRAGMA FOREIGN_KEYS=OFF;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE domainlist
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
type INTEGER NOT NULL DEFAULT 0,
|
||||
domain TEXT UNIQUE NOT NULL,
|
||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
comment TEXT
|
||||
);
|
||||
|
||||
ALTER TABLE whitelist ADD COLUMN type INTEGER;
|
||||
UPDATE whitelist SET type = 0;
|
||||
INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment)
|
||||
SELECT type,domain,enabled,date_added,date_modified,comment FROM whitelist;
|
||||
|
||||
ALTER TABLE blacklist ADD COLUMN type INTEGER;
|
||||
UPDATE blacklist SET type = 1;
|
||||
INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment)
|
||||
SELECT type,domain,enabled,date_added,date_modified,comment FROM blacklist;
|
||||
|
||||
ALTER TABLE regex_whitelist ADD COLUMN type INTEGER;
|
||||
UPDATE regex_whitelist SET type = 2;
|
||||
INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment)
|
||||
SELECT type,domain,enabled,date_added,date_modified,comment FROM regex_whitelist;
|
||||
|
||||
ALTER TABLE regex_blacklist ADD COLUMN type INTEGER;
|
||||
UPDATE regex_blacklist SET type = 3;
|
||||
INSERT INTO domainlist (type,domain,enabled,date_added,date_modified,comment)
|
||||
SELECT type,domain,enabled,date_added,date_modified,comment FROM regex_blacklist;
|
||||
|
||||
DROP TABLE whitelist_by_group;
|
||||
DROP TABLE blacklist_by_group;
|
||||
DROP TABLE regex_whitelist_by_group;
|
||||
DROP TABLE regex_blacklist_by_group;
|
||||
CREATE TABLE domainlist_by_group
|
||||
(
|
||||
domainlist_id INTEGER NOT NULL REFERENCES domainlist (id),
|
||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
||||
PRIMARY KEY (domainlist_id, group_id)
|
||||
);
|
||||
|
||||
DROP TRIGGER tr_whitelist_update;
|
||||
DROP TRIGGER tr_blacklist_update;
|
||||
DROP TRIGGER tr_regex_whitelist_update;
|
||||
DROP TRIGGER tr_regex_blacklist_update;
|
||||
CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist
|
||||
BEGIN
|
||||
UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
|
||||
END;
|
||||
|
||||
DROP VIEW vw_whitelist;
|
||||
CREATE VIEW vw_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||
FROM domainlist
|
||||
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
AND domainlist.type = 0
|
||||
ORDER BY domainlist.id;
|
||||
|
||||
DROP VIEW vw_blacklist;
|
||||
CREATE VIEW vw_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||
FROM domainlist
|
||||
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
AND domainlist.type = 1
|
||||
ORDER BY domainlist.id;
|
||||
|
||||
DROP VIEW vw_regex_whitelist;
|
||||
CREATE VIEW vw_regex_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||
FROM domainlist
|
||||
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
AND domainlist.type = 2
|
||||
ORDER BY domainlist.id;
|
||||
|
||||
DROP VIEW vw_regex_blacklist;
|
||||
CREATE VIEW vw_regex_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||
FROM domainlist
|
||||
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
AND domainlist.type = 3
|
||||
ORDER BY domainlist.id;
|
||||
|
||||
UPDATE info SET value = 4 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
38
advanced/Scripts/database_migration/gravity/4_to_5.sql
Normal file
38
advanced/Scripts/database_migration/gravity/4_to_5.sql
Normal file
@@ -0,0 +1,38 @@
|
||||
.timeout 30000
|
||||
|
||||
PRAGMA FOREIGN_KEYS=OFF;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
DROP TABLE gravity;
|
||||
CREATE TABLE gravity
|
||||
(
|
||||
domain TEXT NOT NULL,
|
||||
adlist_id INTEGER NOT NULL REFERENCES adlist (id),
|
||||
PRIMARY KEY(domain, adlist_id)
|
||||
);
|
||||
|
||||
DROP VIEW vw_gravity;
|
||||
CREATE VIEW vw_gravity AS SELECT domain, adlist_by_group.group_id AS group_id
|
||||
FROM gravity
|
||||
LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = gravity.adlist_id
|
||||
LEFT JOIN adlist ON adlist.id = gravity.adlist_id
|
||||
LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
|
||||
WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1);
|
||||
|
||||
CREATE TABLE client
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
ip TEXT NOL NULL UNIQUE
|
||||
);
|
||||
|
||||
CREATE TABLE client_by_group
|
||||
(
|
||||
client_id INTEGER NOT NULL REFERENCES client (id),
|
||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
||||
PRIMARY KEY (client_id, group_id)
|
||||
);
|
||||
|
||||
UPDATE info SET value = 5 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
18
advanced/Scripts/database_migration/gravity/5_to_6.sql
Normal file
18
advanced/Scripts/database_migration/gravity/5_to_6.sql
Normal file
@@ -0,0 +1,18 @@
|
||||
.timeout 30000
|
||||
|
||||
PRAGMA FOREIGN_KEYS=OFF;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
DROP VIEW vw_adlist;
|
||||
CREATE VIEW vw_adlist AS SELECT DISTINCT address, adlist.id AS id
|
||||
FROM adlist
|
||||
LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id
|
||||
LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
|
||||
WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
ORDER BY adlist.id;
|
||||
|
||||
UPDATE info SET value = 6 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
||||
|
35
advanced/Scripts/database_migration/gravity/6_to_7.sql
Normal file
35
advanced/Scripts/database_migration/gravity/6_to_7.sql
Normal file
@@ -0,0 +1,35 @@
|
||||
.timeout 30000
|
||||
|
||||
PRAGMA FOREIGN_KEYS=OFF;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
INSERT OR REPLACE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
|
||||
|
||||
INSERT INTO domainlist_by_group (domainlist_id, group_id) SELECT id, 0 FROM domainlist;
|
||||
INSERT INTO client_by_group (client_id, group_id) SELECT id, 0 FROM client;
|
||||
INSERT INTO adlist_by_group (adlist_id, group_id) SELECT id, 0 FROM adlist;
|
||||
|
||||
CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist
|
||||
BEGIN
|
||||
INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_client_add AFTER INSERT ON client
|
||||
BEGIN
|
||||
INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist
|
||||
BEGIN
|
||||
INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
|
||||
BEGIN
|
||||
INSERT OR REPLACE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
|
||||
END;
|
||||
|
||||
UPDATE info SET value = 7 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
35
advanced/Scripts/database_migration/gravity/7_to_8.sql
Normal file
35
advanced/Scripts/database_migration/gravity/7_to_8.sql
Normal file
@@ -0,0 +1,35 @@
|
||||
.timeout 30000
|
||||
|
||||
PRAGMA FOREIGN_KEYS=OFF;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE "group" RENAME TO "group__";
|
||||
|
||||
CREATE TABLE "group"
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
||||
name TEXT UNIQUE NOT NULL,
|
||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TRIGGER tr_group_update AFTER UPDATE ON "group"
|
||||
BEGIN
|
||||
UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id;
|
||||
END;
|
||||
|
||||
INSERT OR IGNORE INTO "group" (id,enabled,name,description) SELECT id,enabled,name,description FROM "group__";
|
||||
|
||||
DROP TABLE "group__";
|
||||
|
||||
CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
|
||||
BEGIN
|
||||
INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
|
||||
END;
|
||||
|
||||
UPDATE info SET value = 8 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
27
advanced/Scripts/database_migration/gravity/8_to_9.sql
Normal file
27
advanced/Scripts/database_migration/gravity/8_to_9.sql
Normal file
@@ -0,0 +1,27 @@
|
||||
.timeout 30000
|
||||
|
||||
PRAGMA FOREIGN_KEYS=OFF;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
DROP TRIGGER IF EXISTS tr_group_update;
|
||||
DROP TRIGGER IF EXISTS tr_group_zero;
|
||||
|
||||
PRAGMA legacy_alter_table=ON;
|
||||
ALTER TABLE "group" RENAME TO "group__";
|
||||
PRAGMA legacy_alter_table=OFF;
|
||||
ALTER TABLE "group__" RENAME TO "group";
|
||||
|
||||
CREATE TRIGGER tr_group_update AFTER UPDATE ON "group"
|
||||
BEGIN
|
||||
UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
|
||||
BEGIN
|
||||
INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Unassociated');
|
||||
END;
|
||||
|
||||
UPDATE info SET value = 9 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
29
advanced/Scripts/database_migration/gravity/9_to_10.sql
Normal file
29
advanced/Scripts/database_migration/gravity/9_to_10.sql
Normal file
@@ -0,0 +1,29 @@
|
||||
.timeout 30000
|
||||
|
||||
PRAGMA FOREIGN_KEYS=OFF;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
DROP TABLE IF EXISTS whitelist;
|
||||
DROP TABLE IF EXISTS blacklist;
|
||||
DROP TABLE IF EXISTS regex_whitelist;
|
||||
DROP TABLE IF EXISTS regex_blacklist;
|
||||
|
||||
CREATE TRIGGER tr_domainlist_delete AFTER DELETE ON domainlist
|
||||
BEGIN
|
||||
DELETE FROM domainlist_by_group WHERE domainlist_id = OLD.id;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_adlist_delete AFTER DELETE ON adlist
|
||||
BEGIN
|
||||
DELETE FROM adlist_by_group WHERE adlist_id = OLD.id;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_client_delete AFTER DELETE ON client
|
||||
BEGIN
|
||||
DELETE FROM client_by_group WHERE client_id = OLD.id;
|
||||
END;
|
||||
|
||||
UPDATE info SET value = 10 WHERE property = 'version';
|
||||
|
||||
COMMIT;
|
@@ -11,256 +11,271 @@
|
||||
# Globals
|
||||
basename=pihole
|
||||
piholeDir=/etc/"${basename}"
|
||||
whitelist="${piholeDir}"/whitelist.txt
|
||||
blacklist="${piholeDir}"/blacklist.txt
|
||||
readonly wildcardlist="/etc/dnsmasq.d/03-pihole-wildcard.conf"
|
||||
gravityDBfile="${piholeDir}/gravity.db"
|
||||
|
||||
reload=false
|
||||
addmode=true
|
||||
verbose=true
|
||||
wildcard=false
|
||||
web=false
|
||||
|
||||
domList=()
|
||||
|
||||
listMain=""
|
||||
listAlt=""
|
||||
typeId=""
|
||||
comment=""
|
||||
declare -i domaincount
|
||||
domaincount=0
|
||||
|
||||
colfile="/opt/pihole/COL_TABLE"
|
||||
source ${colfile}
|
||||
|
||||
# IDs are hard-wired to domain interpretation in the gravity database scheme
|
||||
# Clients (including FTL) will read them through the corresponding views
|
||||
readonly whitelist="0"
|
||||
readonly blacklist="1"
|
||||
readonly regex_whitelist="2"
|
||||
readonly regex_blacklist="3"
|
||||
|
||||
GetListnameFromTypeId() {
|
||||
if [[ "$1" == "${whitelist}" ]]; then
|
||||
echo "whitelist"
|
||||
elif [[ "$1" == "${blacklist}" ]]; then
|
||||
echo "blacklist"
|
||||
elif [[ "$1" == "${regex_whitelist}" ]]; then
|
||||
echo "regex whitelist"
|
||||
elif [[ "$1" == "${regex_blacklist}" ]]; then
|
||||
echo "regex blacklist"
|
||||
fi
|
||||
}
|
||||
|
||||
GetListParamFromTypeId() {
|
||||
if [[ "${typeId}" == "${whitelist}" ]]; then
|
||||
echo "w"
|
||||
elif [[ "${typeId}" == "${blacklist}" ]]; then
|
||||
echo "b"
|
||||
elif [[ "${typeId}" == "${regex_whitelist}" && "${wildcard}" == true ]]; then
|
||||
echo "-white-wild"
|
||||
elif [[ "${typeId}" == "${regex_whitelist}" ]]; then
|
||||
echo "-white-regex"
|
||||
elif [[ "${typeId}" == "${regex_blacklist}" && "${wildcard}" == true ]]; then
|
||||
echo "-wild"
|
||||
elif [[ "${typeId}" == "${regex_blacklist}" ]]; then
|
||||
echo "-regex"
|
||||
fi
|
||||
}
|
||||
|
||||
helpFunc() {
|
||||
if [[ "${listMain}" == "${whitelist}" ]]; then
|
||||
param="w"
|
||||
type="white"
|
||||
elif [[ "${listMain}" == "${wildcardlist}" ]]; then
|
||||
param="wild"
|
||||
type="wildcard black"
|
||||
else
|
||||
param="b"
|
||||
type="black"
|
||||
fi
|
||||
local listname param
|
||||
|
||||
listname="$(GetListnameFromTypeId "${typeId}")"
|
||||
param="$(GetListParamFromTypeId)"
|
||||
|
||||
echo "Usage: pihole -${param} [options] <domain> <domain2 ...>
|
||||
Example: 'pihole -${param} site.com', or 'pihole -${param} site1.com site2.com'
|
||||
${type^}list one or more domains
|
||||
${listname^} one or more domains
|
||||
|
||||
Options:
|
||||
-d, --delmode Remove domain(s) from the ${type}list
|
||||
-nr, --noreload Update ${type}list without refreshing dnsmasq
|
||||
-d, --delmode Remove domain(s) from the ${listname}
|
||||
-nr, --noreload Update ${listname} without reloading the DNS server
|
||||
-q, --quiet Make output less verbose
|
||||
-h, --help Show this help dialog
|
||||
-l, --list Display all your ${type}listed domains
|
||||
-l, --list Display all your ${listname}listed domains
|
||||
--nuke Removes all entries in a list"
|
||||
|
||||
exit 0
|
||||
}
|
||||
|
||||
EscapeRegexp() {
|
||||
# This way we may safely insert an arbitrary
|
||||
# string in our regular expressions
|
||||
# Also remove leading "." if present
|
||||
echo $* | sed 's/^\.*//' | sed "s/[]\.|$(){}?+*^]/\\\\&/g" | sed "s/\\//\\\\\//g"
|
||||
}
|
||||
ValidateDomain() {
|
||||
# Convert to lowercase
|
||||
domain="${1,,}"
|
||||
|
||||
HandleOther() {
|
||||
# Convert to lowercase
|
||||
domain="${1,,}"
|
||||
|
||||
# Check validity of domain
|
||||
if [[ "${#domain}" -le 253 ]]; then
|
||||
validDomain=$(grep -P "^((-|_)*[a-z\d]((-|_)*[a-z\d])*(-|_)*)(\.(-|_)*([a-z\d]((-|_)*[a-z\d])*))*$" <<< "${domain}") # Valid chars check
|
||||
validDomain=$(grep -P "^[^\.]{1,63}(\.[^\.]{1,63})*$" <<< "${validDomain}") # Length of each label
|
||||
fi
|
||||
|
||||
if [[ -n "${validDomain}" ]]; then
|
||||
domList=("${domList[@]}" ${validDomain})
|
||||
else
|
||||
echo -e " ${CROSS} ${domain} is not a valid argument or domain name!"
|
||||
fi
|
||||
}
|
||||
|
||||
PoplistFile() {
|
||||
# Check whitelist file exists, and if not, create it
|
||||
if [[ ! -f "${whitelist}" ]]; then
|
||||
touch "${whitelist}"
|
||||
fi
|
||||
|
||||
# Check blacklist file exists, and if not, create it
|
||||
if [[ ! -f "${blacklist}" ]]; then
|
||||
touch "${blacklist}"
|
||||
fi
|
||||
|
||||
for dom in "${domList[@]}"; do
|
||||
# Logic: If addmode then add to desired list and remove from the other; if delmode then remove from desired list but do not add to the other
|
||||
if ${addmode}; then
|
||||
AddDomain "${dom}" "${listMain}"
|
||||
RemoveDomain "${dom}" "${listAlt}"
|
||||
if [[ "${listMain}" == "${whitelist}" || "${listMain}" == "${blacklist}" ]]; then
|
||||
RemoveDomain "${dom}" "${wildcardlist}"
|
||||
fi
|
||||
else
|
||||
RemoveDomain "${dom}" "${listMain}"
|
||||
# Check validity of domain (don't check for regex entries)
|
||||
if [[ "${#domain}" -le 253 ]]; then
|
||||
if [[ ( "${typeId}" == "${regex_blacklist}" || "${typeId}" == "${regex_whitelist}" ) && "${wildcard}" == false ]]; then
|
||||
validDomain="${domain}"
|
||||
else
|
||||
validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check
|
||||
validDomain=$(grep -P "^[^\\.]{1,63}(\\.[^\\.]{1,63})*$" <<< "${validDomain}") # Length of each label
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${validDomain}" ]]; then
|
||||
domList=("${domList[@]}" "${validDomain}")
|
||||
else
|
||||
echo -e " ${CROSS} ${domain} is not a valid argument or domain name!"
|
||||
fi
|
||||
|
||||
domaincount=$((domaincount+1))
|
||||
}
|
||||
|
||||
ProcessDomainList() {
|
||||
for dom in "${domList[@]}"; do
|
||||
# Format domain into regex filter if requested
|
||||
if [[ "${wildcard}" == true ]]; then
|
||||
dom="(^|\\.)${dom//\./\\.}$"
|
||||
fi
|
||||
|
||||
# Logic: If addmode then add to desired list and remove from the other;
|
||||
# if delmode then remove from desired list but do not add to the other
|
||||
if ${addmode}; then
|
||||
AddDomain "${dom}"
|
||||
else
|
||||
RemoveDomain "${dom}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
AddDomain() {
|
||||
list="$2"
|
||||
domain=$(EscapeRegexp "$1")
|
||||
local domain num requestedListname existingTypeId existingListname
|
||||
domain="$1"
|
||||
|
||||
[[ "${list}" == "${whitelist}" ]] && listname="whitelist"
|
||||
[[ "${list}" == "${blacklist}" ]] && listname="blacklist"
|
||||
[[ "${list}" == "${wildcardlist}" ]] && listname="wildcard blacklist"
|
||||
|
||||
if [[ "${list}" == "${whitelist}" || "${list}" == "${blacklist}" ]]; then
|
||||
[[ "${list}" == "${whitelist}" && -z "${type}" ]] && type="--whitelist-only"
|
||||
[[ "${list}" == "${blacklist}" && -z "${type}" ]] && type="--blacklist-only"
|
||||
bool=true
|
||||
# Is the domain in the list we want to add it to?
|
||||
grep -Ex -q "${domain}" "${list}" > /dev/null 2>&1 || bool=false
|
||||
num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}';")"
|
||||
requestedListname="$(GetListnameFromTypeId "${typeId}")"
|
||||
|
||||
if [[ "${bool}" == false ]]; then
|
||||
# Domain not found in the whitelist file, add it!
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} Adding $1 to $listname..."
|
||||
fi
|
||||
reload=true
|
||||
# Add it to the list we want to add it to
|
||||
echo "$1" >> "${list}"
|
||||
else
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} ${1} already exists in ${listname}, no need to add!"
|
||||
if [[ "${num}" -ne 0 ]]; then
|
||||
existingTypeId="$(sqlite3 "${gravityDBfile}" "SELECT type FROM domainlist WHERE domain = '${domain}';")"
|
||||
if [[ "${existingTypeId}" == "${typeId}" ]]; then
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} ${1} already exists in ${requestedListname}, no need to add!"
|
||||
fi
|
||||
else
|
||||
existingListname="$(GetListnameFromTypeId "${existingTypeId}")"
|
||||
sqlite3 "${gravityDBfile}" "UPDATE domainlist SET type = ${typeId} WHERE domain='${domain}';"
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} ${1} already exists in ${existingListname}, it has been moved to ${requestedListname}!"
|
||||
fi
|
||||
fi
|
||||
return
|
||||
fi
|
||||
elif [[ "${list}" == "${wildcardlist}" ]]; then
|
||||
source "${piholeDir}/setupVars.conf"
|
||||
# Remove the /* from the end of the IP addresses
|
||||
IPV4_ADDRESS=${IPV4_ADDRESS%/*}
|
||||
IPV6_ADDRESS=${IPV6_ADDRESS%/*}
|
||||
[[ -z "${type}" ]] && type="--wildcard-only"
|
||||
bool=true
|
||||
# Is the domain in the list?
|
||||
grep -e "address=\/${domain}\/" "${wildcardlist}" > /dev/null 2>&1 || bool=false
|
||||
|
||||
if [[ "${bool}" == false ]]; then
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} Adding $1 to wildcard blacklist..."
|
||||
fi
|
||||
reload="restart"
|
||||
echo "address=/$1/${IPV4_ADDRESS}" >> "${wildcardlist}"
|
||||
if [[ "${#IPV6_ADDRESS}" > 0 ]]; then
|
||||
echo "address=/$1/${IPV6_ADDRESS}" >> "${wildcardlist}"
|
||||
fi
|
||||
else
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} ${1} already exists in wildcard blacklist, no need to add!"
|
||||
fi
|
||||
# Domain not found in the table, add it!
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} Adding ${domain} to the ${requestedListname}..."
|
||||
fi
|
||||
reload=true
|
||||
# Insert only the domain here. The enabled and date_added fields will be filled
|
||||
# with their default values (enabled = true, date_added = current timestamp)
|
||||
if [[ -z "${comment}" ]]; then
|
||||
sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type) VALUES ('${domain}',${typeId});"
|
||||
else
|
||||
# also add comment when variable has been set through the "--comment" option
|
||||
sqlite3 "${gravityDBfile}" "INSERT INTO domainlist (domain,type,comment) VALUES ('${domain}',${typeId},'${comment}');"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
RemoveDomain() {
|
||||
list="$2"
|
||||
domain=$(EscapeRegexp "$1")
|
||||
local domain num requestedListname
|
||||
domain="$1"
|
||||
|
||||
[[ "${list}" == "${whitelist}" ]] && listname="whitelist"
|
||||
[[ "${list}" == "${blacklist}" ]] && listname="blacklist"
|
||||
[[ "${list}" == "${wildcardlist}" ]] && listname="wildcard blacklist"
|
||||
# Is the domain in the list we want to remove it from?
|
||||
num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};")"
|
||||
|
||||
if [[ "${list}" == "${whitelist}" || "${list}" == "${blacklist}" ]]; then
|
||||
bool=true
|
||||
[[ "${list}" == "${whitelist}" && -z "${type}" ]] && type="--whitelist-only"
|
||||
[[ "${list}" == "${blacklist}" && -z "${type}" ]] && type="--blacklist-only"
|
||||
# Is it in the list? Logic follows that if its whitelisted it should not be blacklisted and vice versa
|
||||
grep -Ex -q "${domain}" "${list}" > /dev/null 2>&1 || bool=false
|
||||
if [[ "${bool}" == true ]]; then
|
||||
# Remove it from the other one
|
||||
echo -e " ${INFO} Removing $1 from $listname..."
|
||||
# /I flag: search case-insensitive
|
||||
sed -i "/${domain}/Id" "${list}"
|
||||
reload=true
|
||||
else
|
||||
requestedListname="$(GetListnameFromTypeId "${typeId}")"
|
||||
|
||||
if [[ "${num}" -eq 0 ]]; then
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} ${1} does not exist in ${listname}, no need to remove!"
|
||||
echo -e " ${INFO} ${domain} does not exist in ${requestedListname}, no need to remove!"
|
||||
fi
|
||||
return
|
||||
fi
|
||||
elif [[ "${list}" == "${wildcardlist}" ]]; then
|
||||
[[ -z "${type}" ]] && type="--wildcard-only"
|
||||
bool=true
|
||||
# Is it in the list?
|
||||
grep -e "address=\/${domain}\/" "${wildcardlist}" > /dev/null 2>&1 || bool=false
|
||||
if [[ "${bool}" == true ]]; then
|
||||
# Remove it from the other one
|
||||
echo -e " ${INFO} Removing $1 from $listname..."
|
||||
# /I flag: search case-insensitive
|
||||
sed -i "/address=\/${domain}/Id" "${list}"
|
||||
reload=true
|
||||
else
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} ${1} does not exist in ${listname}, no need to remove!"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Update Gravity
|
||||
Reload() {
|
||||
echo ""
|
||||
pihole -g --skip-download "${type:-}"
|
||||
# Domain found in the table, remove it!
|
||||
if [[ "${verbose}" == true ]]; then
|
||||
echo -e " ${INFO} Removing ${domain} from the ${requestedListname}..."
|
||||
fi
|
||||
reload=true
|
||||
# Remove it from the current list
|
||||
sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE domain = '${domain}' AND type = ${typeId};"
|
||||
}
|
||||
|
||||
Displaylist() {
|
||||
if [[ -f ${listMain} ]]; then
|
||||
if [[ "${listMain}" == "${whitelist}" ]]; then
|
||||
string="gravity resistant domains"
|
||||
local count num_pipes domain enabled status nicedate requestedListname
|
||||
|
||||
requestedListname="$(GetListnameFromTypeId "${typeId}")"
|
||||
data="$(sqlite3 "${gravityDBfile}" "SELECT domain,enabled,date_modified FROM domainlist WHERE type = ${typeId};" 2> /dev/null)"
|
||||
|
||||
if [[ -z $data ]]; then
|
||||
echo -e "Not showing empty list"
|
||||
else
|
||||
string="domains caught in the sinkhole"
|
||||
echo -e "Displaying ${requestedListname}:"
|
||||
count=1
|
||||
while IFS= read -r line
|
||||
do
|
||||
# Count number of pipes seen in this line
|
||||
# This is necessary because we can only detect the pipe separating the fields
|
||||
# from the end backwards as the domain (which is the first field) may contain
|
||||
# pipe symbols as they are perfectly valid regex filter control characters
|
||||
num_pipes="$(grep -c "^" <<< "$(grep -o "|" <<< "${line}")")"
|
||||
|
||||
# Extract domain and enabled status based on the obtained number of pipe characters
|
||||
domain="$(cut -d'|' -f"-$((num_pipes-1))" <<< "${line}")"
|
||||
enabled="$(cut -d'|' -f"$((num_pipes))" <<< "${line}")"
|
||||
datemod="$(cut -d'|' -f"$((num_pipes+1))" <<< "${line}")"
|
||||
|
||||
# Translate boolean status into human readable string
|
||||
if [[ "${enabled}" -eq 1 ]]; then
|
||||
status="enabled"
|
||||
else
|
||||
status="disabled"
|
||||
fi
|
||||
|
||||
# Get nice representation of numerical date stored in database
|
||||
nicedate=$(date --rfc-2822 -d "@${datemod}")
|
||||
|
||||
echo " ${count}: ${domain} (${status}, last modified ${nicedate})"
|
||||
count=$((count+1))
|
||||
done <<< "${data}"
|
||||
fi
|
||||
verbose=false
|
||||
echo -e "Displaying $string:\n"
|
||||
count=1
|
||||
while IFS= read -r RD; do
|
||||
echo " ${count}: ${RD}"
|
||||
count=$((count+1))
|
||||
done < "${listMain}"
|
||||
else
|
||||
echo -e " ${COL_LIGHT_RED}${listMain} does not exist!${COL_NC}"
|
||||
fi
|
||||
exit 0;
|
||||
exit 0;
|
||||
}
|
||||
|
||||
NukeList() {
|
||||
if [[ -f "${listMain}" ]]; then
|
||||
# Back up original list
|
||||
cp "${listMain}" "${listMain}.bck~"
|
||||
# Empty out file
|
||||
echo "" > "${listMain}"
|
||||
fi
|
||||
sqlite3 "${gravityDBfile}" "DELETE FROM domainlist WHERE type = ${typeId};"
|
||||
}
|
||||
|
||||
for var in "$@"; do
|
||||
case "${var}" in
|
||||
"-w" | "whitelist" ) listMain="${whitelist}"; listAlt="${blacklist}";;
|
||||
"-b" | "blacklist" ) listMain="${blacklist}"; listAlt="${whitelist}";;
|
||||
"-wild" | "wildcard" ) listMain="${wildcardlist}";;
|
||||
"-nr"| "--noreload" ) reload=false;;
|
||||
"-d" | "--delmode" ) addmode=false;;
|
||||
"-q" | "--quiet" ) verbose=false;;
|
||||
"-h" | "--help" ) helpFunc;;
|
||||
"-l" | "--list" ) Displaylist;;
|
||||
"--nuke" ) NukeList;;
|
||||
* ) HandleOther "${var}";;
|
||||
esac
|
||||
GetComment() {
|
||||
comment="$1"
|
||||
if [[ "${comment}" =~ [^a-zA-Z0-9_\#:/\.,\ -] ]]; then
|
||||
echo " ${CROSS} Found invalid characters in domain comment!"
|
||||
exit
|
||||
fi
|
||||
}
|
||||
|
||||
while (( "$#" )); do
|
||||
case "${1}" in
|
||||
"-w" | "whitelist" ) typeId=0;;
|
||||
"-b" | "blacklist" ) typeId=1;;
|
||||
"--white-regex" | "white-regex" ) typeId=2;;
|
||||
"--white-wild" | "white-wild" ) typeId=2; wildcard=true;;
|
||||
"--wild" | "wildcard" ) typeId=3; wildcard=true;;
|
||||
"--regex" | "regex" ) typeId=3;;
|
||||
"-nr"| "--noreload" ) reload=false;;
|
||||
"-d" | "--delmode" ) addmode=false;;
|
||||
"-q" | "--quiet" ) verbose=false;;
|
||||
"-h" | "--help" ) helpFunc;;
|
||||
"-l" | "--list" ) Displaylist;;
|
||||
"--nuke" ) NukeList;;
|
||||
"--web" ) web=true;;
|
||||
"--comment" ) GetComment "${2}"; shift;;
|
||||
* ) ValidateDomain "${1}";;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
shift
|
||||
|
||||
if [[ $# = 0 ]]; then
|
||||
helpFunc
|
||||
if [[ ${domaincount} == 0 ]]; then
|
||||
helpFunc
|
||||
fi
|
||||
|
||||
PoplistFile
|
||||
ProcessDomainList
|
||||
|
||||
# Used on web interface
|
||||
if $web; then
|
||||
echo "DONE"
|
||||
fi
|
||||
|
||||
if [[ "${reload}" != false ]]; then
|
||||
# Ensure that "restart" is used for Wildcard updates
|
||||
Reload "${reload}"
|
||||
pihole restartdns reload-lists
|
||||
fi
|
||||
|
66
advanced/Scripts/piholeARPTable.sh
Executable file
66
advanced/Scripts/piholeARPTable.sh
Executable file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC1090
|
||||
|
||||
# Pi-hole: A black hole for Internet advertisements
|
||||
# (c) 2019 Pi-hole, LLC (https://pi-hole.net)
|
||||
# Network-wide ad blocking via your own hardware.
|
||||
#
|
||||
# ARP table interaction
|
||||
#
|
||||
# This file is copyright under the latest version of the EUPL.
|
||||
# Please see LICENSE file for your rights under this license.
|
||||
|
||||
coltable="/opt/pihole/COL_TABLE"
|
||||
if [[ -f ${coltable} ]]; then
|
||||
source ${coltable}
|
||||
fi
|
||||
|
||||
# Determine database location
|
||||
# Obtain DBFILE=... setting from pihole-FTL.db
|
||||
# Constructed to return nothing when
|
||||
# a) the setting is not present in the config file, or
|
||||
# b) the setting is commented out (e.g. "#DBFILE=...")
|
||||
FTLconf="/etc/pihole/pihole-FTL.conf"
|
||||
if [ -e "$FTLconf" ]; then
|
||||
DBFILE="$(sed -n -e 's/^\s*DBFILE\s*=\s*//p' ${FTLconf})"
|
||||
fi
|
||||
# Test for empty string. Use standard path in this case.
|
||||
if [ -z "$DBFILE" ]; then
|
||||
DBFILE="/etc/pihole/pihole-FTL.db"
|
||||
fi
|
||||
|
||||
|
||||
flushARP(){
|
||||
local output
|
||||
if [[ "${args[1]}" != "quiet" ]]; then
|
||||
echo -ne " ${INFO} Flushing network table ..."
|
||||
fi
|
||||
|
||||
# Truncate network_addresses table in pihole-FTL.db
|
||||
# This needs to be done before we can truncate the network table due to
|
||||
# foreign key contraints
|
||||
if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network_addresses" 2>&1); then
|
||||
echo -e "${OVER} ${CROSS} Failed to truncate network_addresses table"
|
||||
echo " Database location: ${DBFILE}"
|
||||
echo " Output: ${output}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Truncate network table in pihole-FTL.db
|
||||
if ! output=$(sqlite3 "${DBFILE}" "DELETE FROM network" 2>&1); then
|
||||
echo -e "${OVER} ${CROSS} Failed to truncate network table"
|
||||
echo " Database location: ${DBFILE}"
|
||||
echo " Output: ${output}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ "${args[1]}" != "quiet" ]]; then
|
||||
echo -e "${OVER} ${TICK} Flushed network table"
|
||||
fi
|
||||
}
|
||||
|
||||
args=("$@")
|
||||
|
||||
case "${args[0]}" in
|
||||
"arpflush" ) flushARP;;
|
||||
esac
|
@@ -17,254 +17,185 @@ source "${PI_HOLE_FILES_DIR}/automated install/basic-install.sh"
|
||||
# piholeGitURL set in basic-install.sh
|
||||
# is_repo() sourced from basic-install.sh
|
||||
# setupVars set in basic-install.sh
|
||||
# check_download_exists sourced from basic-install.sh
|
||||
# fully_fetch_repo sourced from basic-install.sh
|
||||
# get_available_branches sourced from basic-install.sh
|
||||
# fetch_checkout_pull_branch sourced from basic-install.sh
|
||||
# checkout_pull_branch sourced from basic-install.sh
|
||||
|
||||
source "${setupVars}"
|
||||
|
||||
coltable="/opt/pihole/COL_TABLE"
|
||||
source ${coltable}
|
||||
|
||||
check_download_exists() {
|
||||
status=$(curl --head --silent "https://ftl.pi-hole.net/${1}" | head -n 1)
|
||||
if grep -q "404" <<< "$status"; then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
fully_fetch_repo() {
|
||||
# Add upstream branches to shallow clone
|
||||
local directory="${1}"
|
||||
|
||||
cd "${directory}" || return 1
|
||||
if is_repo "${directory}"; then
|
||||
git remote set-branches origin '*' || return 1
|
||||
git fetch --quiet || return 1
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_available_branches() {
|
||||
# Return available branches
|
||||
local directory
|
||||
directory="${1}"
|
||||
local output
|
||||
|
||||
cd "${directory}" || return 1
|
||||
# Get reachable remote branches, but store STDERR as STDOUT variable
|
||||
output=$( { git remote show origin | grep 'tracked' | sed 's/tracked//;s/ //g'; } 2>&1 )
|
||||
echo "$output"
|
||||
return
|
||||
}
|
||||
|
||||
fetch_checkout_pull_branch() {
|
||||
# Check out specified branch
|
||||
local directory
|
||||
directory="${1}"
|
||||
local branch
|
||||
branch="${2}"
|
||||
|
||||
# Set the reference for the requested branch, fetch, check it put and pull it
|
||||
cd "${directory}" || return 1
|
||||
git remote set-branches origin "${branch}" || return 1
|
||||
git stash --all --quiet &> /dev/null || true
|
||||
git clean --quiet --force -d || true
|
||||
git fetch --quiet || return 1
|
||||
checkout_pull_branch "${directory}" "${branch}" || return 1
|
||||
}
|
||||
|
||||
checkout_pull_branch() {
|
||||
# Check out specified branch
|
||||
local directory
|
||||
directory="${1}"
|
||||
local branch
|
||||
branch="${2}"
|
||||
local oldbranch
|
||||
|
||||
cd "${directory}" || return 1
|
||||
|
||||
oldbranch="$(git symbolic-ref HEAD)"
|
||||
|
||||
str="Switching to branch: '${branch}' from '${oldbranch}'"
|
||||
echo -ne " ${INFO} $str"
|
||||
git checkout "${branch}" --quiet || return 1
|
||||
echo -e "${OVER} ${TICK} $str"
|
||||
|
||||
git_pull=$(git pull || return 1)
|
||||
|
||||
if [[ "$git_pull" == *"up-to-date"* ]]; then
|
||||
echo -e " ${INFO} ${git_pull}"
|
||||
else
|
||||
echo -e "$git_pull\\n"
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
warning1() {
|
||||
echo " Please note that changing branches severely alters your Pi-hole subsystems"
|
||||
echo " Features that work on the master branch, may not on a development branch"
|
||||
echo -e " ${COL_LIGHT_RED}This feature is NOT supported unless a Pi-hole developer explicitly asks!${COL_NC}"
|
||||
read -r -p " Have you read and understood this? [y/N] " response
|
||||
case "${response}" in
|
||||
[yY][eE][sS]|[yY])
|
||||
echo ""
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo -e "\\n ${INFO} Branch change has been cancelled"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
echo " Please note that changing branches severely alters your Pi-hole subsystems"
|
||||
echo " Features that work on the master branch, may not on a development branch"
|
||||
echo -e " ${COL_LIGHT_RED}This feature is NOT supported unless a Pi-hole developer explicitly asks!${COL_NC}"
|
||||
read -r -p " Have you read and understood this? [y/N] " response
|
||||
case "${response}" in
|
||||
[yY][eE][sS]|[yY])
|
||||
echo ""
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo -e "\\n ${INFO} Branch change has been cancelled"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
checkout() {
|
||||
local corebranches
|
||||
local webbranches
|
||||
local corebranches
|
||||
local webbranches
|
||||
|
||||
# Avoid globbing
|
||||
set -f
|
||||
# Check if FTL is installed - do this early on as FTL is a hard dependency for Pi-hole
|
||||
local funcOutput
|
||||
funcOutput=$(get_binary_name) #Store output of get_binary_name here
|
||||
local binary
|
||||
binary="pihole-FTL${funcOutput##*pihole-FTL}" #binary name will be the last line of the output of get_binary_name (it always begins with pihole-FTL)
|
||||
|
||||
# This is unlikely
|
||||
if ! is_repo "${PI_HOLE_FILES_DIR}" ; then
|
||||
echo -e " ${COL_LIGHT_RED}Error: Core Pi-hole repo is missing from system!
|
||||
Please re-run install script from https://github.com/pi-hole/pi-hole${COL_NC}"
|
||||
exit 1;
|
||||
fi
|
||||
if [[ "${INSTALL_WEB}" == "true" ]]; then
|
||||
if ! is_repo "${webInterfaceDir}" ; then
|
||||
echo -e " ${COL_LIGHT_RED}Error: Web Admin repo is missing from system!
|
||||
Please re-run install script from https://github.com/pi-hole/pi-hole${COL_NC}"
|
||||
exit 1;
|
||||
# Avoid globbing
|
||||
set -f
|
||||
|
||||
# This is unlikely
|
||||
if ! is_repo "${PI_HOLE_FILES_DIR}" ; then
|
||||
echo -e " ${COL_LIGHT_RED}Error: Core Pi-hole repo is missing from system!"
|
||||
echo -e " Please re-run install script from https://github.com/pi-hole/pi-hole${COL_NC}"
|
||||
exit 1;
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "${1}" ]]; then
|
||||
echo -e " ${COL_LIGHT_RED}Invalid option${COL_NC}
|
||||
Try 'pihole checkout --help' for more information."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! warning1 ; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${1}" == "dev" ]] ; then
|
||||
# Shortcut to check out development branches
|
||||
echo -e " ${INFO} Shortcut \"dev\" detected - checking out development / devel branches..."
|
||||
echo ""
|
||||
echo -e " ${INFO} Pi-hole Core"
|
||||
fetch_checkout_pull_branch "${PI_HOLE_FILES_DIR}" "development" || { echo " ${CROSS} Unable to pull Core developement branch"; exit 1; }
|
||||
if [[ "${INSTALL_WEB}" == "true" ]]; then
|
||||
echo ""
|
||||
echo -e " ${INFO} Web interface"
|
||||
fetch_checkout_pull_branch "${webInterfaceDir}" "devel" || { echo " ${CROSS} Unable to pull Web development branch"; exit 1; }
|
||||
fi
|
||||
#echo -e " ${TICK} Pi-hole Core"
|
||||
|
||||
get_binary_name
|
||||
local path
|
||||
path="development/${binary}"
|
||||
echo "development" > /etc/pihole/ftlbranch
|
||||
elif [[ "${1}" == "master" ]] ; then
|
||||
# Shortcut to check out master branches
|
||||
echo -e " ${INFO} Shortcut \"master\" detected - checking out master branches..."
|
||||
echo -e " ${INFO} Pi-hole core"
|
||||
fetch_checkout_pull_branch "${PI_HOLE_FILES_DIR}" "master" || { echo " ${CROSS} Unable to pull Core master branch"; exit 1; }
|
||||
if [[ ${INSTALL_WEB} == "true" ]]; then
|
||||
echo -e " ${INFO} Web interface"
|
||||
fetch_checkout_pull_branch "${webInterfaceDir}" "master" || { echo " ${CROSS} Unable to pull Web master branch"; exit 1; }
|
||||
fi
|
||||
#echo -e " ${TICK} Web Interface"
|
||||
get_binary_name
|
||||
local path
|
||||
path="master/${binary}"
|
||||
echo "master" > /etc/pihole/ftlbranch
|
||||
elif [[ "${1}" == "core" ]] ; then
|
||||
str="Fetching branches from ${piholeGitUrl}"
|
||||
echo -ne " ${INFO} $str"
|
||||
if ! fully_fetch_repo "${PI_HOLE_FILES_DIR}" ; then
|
||||
echo -e "${OVER} ${CROSS} $str"
|
||||
exit 1
|
||||
fi
|
||||
corebranches=($(get_available_branches "${PI_HOLE_FILES_DIR}"))
|
||||
|
||||
if [[ "${corebranches[*]}" == *"master"* ]]; then
|
||||
echo -e "${OVER} ${TICK} $str
|
||||
${INFO} ${#corebranches[@]} branches available for Pi-hole Core"
|
||||
else
|
||||
# Print STDERR output from get_available_branches
|
||||
echo -e "${OVER} ${CROSS} $str\\n\\n${corebranches[*]}"
|
||||
exit 1
|
||||
if [[ "${INSTALL_WEB_INTERFACE}" == "true" ]]; then
|
||||
if ! is_repo "${webInterfaceDir}" ; then
|
||||
echo -e " ${COL_LIGHT_RED}Error: Web Admin repo is missing from system!"
|
||||
echo -e " Please re-run install script from https://github.com/pi-hole/pi-hole${COL_NC}"
|
||||
exit 1;
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
# Have the user choose the branch they want
|
||||
if ! (for e in "${corebranches[@]}"; do [[ "$e" == "${2}" ]] && exit 0; done); then
|
||||
echo -e " ${INFO} Requested branch \"${2}\" is not available"
|
||||
echo -e " ${INFO} Available branches for Core are:"
|
||||
for e in "${corebranches[@]}"; do echo " - $e"; done
|
||||
exit 1
|
||||
fi
|
||||
checkout_pull_branch "${PI_HOLE_FILES_DIR}" "${2}"
|
||||
elif [[ "${1}" == "web" ]] && [[ "${INSTALL_WEB}" == "true" ]] ; then
|
||||
str="Fetching branches from ${webInterfaceGitUrl}"
|
||||
echo -ne " ${INFO} $str"
|
||||
if ! fully_fetch_repo "${webInterfaceDir}" ; then
|
||||
echo -e "${OVER} ${CROSS} $str"
|
||||
exit 1
|
||||
fi
|
||||
webbranches=($(get_available_branches "${webInterfaceDir}"))
|
||||
|
||||
if [[ "${webbranches[*]}" == *"master"* ]]; then
|
||||
echo -e "${OVER} ${TICK} $str
|
||||
${INFO} ${#webbranches[@]} branches available for Web Admin"
|
||||
else
|
||||
# Print STDERR output from get_available_branches
|
||||
echo -e "${OVER} ${CROSS} $str\\n\\n${webbranches[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
# Have the user choose the branch they want
|
||||
if ! (for e in "${webbranches[@]}"; do [[ "$e" == "${2}" ]] && exit 0; done); then
|
||||
echo -e " ${INFO} Requested branch \"${2}\" is not available"
|
||||
echo -e " ${INFO} Available branches for Web Admin are:"
|
||||
for e in "${webbranches[@]}"; do echo " - $e"; done
|
||||
exit 1
|
||||
fi
|
||||
checkout_pull_branch "${webInterfaceDir}" "${2}"
|
||||
elif [[ "${1}" == "ftl" ]] ; then
|
||||
get_binary_name
|
||||
local path
|
||||
path="${2}/${binary}"
|
||||
|
||||
if check_download_exists "$path"; then
|
||||
echo " ${TICK} Branch ${2} exists"
|
||||
echo "${2}" > /etc/pihole/ftlbranch
|
||||
else
|
||||
echo " ${CROSS} Requested branch \"${2}\" is not available"
|
||||
ftlbranches=( $(git ls-remote https://github.com/pi-hole/ftl | grep 'heads' | sed 's/refs\/heads\///;s/ //g' | awk '{print $2}') )
|
||||
echo -e " ${INFO} Available branches for FTL are:"
|
||||
for e in "${ftlbranches[@]}"; do echo " - $e"; done
|
||||
if [[ -z "${1}" ]]; then
|
||||
echo -e " ${COL_LIGHT_RED}Invalid option${COL_NC}"
|
||||
echo -e " Try 'pihole checkout --help' for more information."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
else
|
||||
echo -e " ${INFO} Requested option \"${1}\" is not available"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Force updating everything
|
||||
if [[ ! "${1}" == "web" ]]; then
|
||||
echo -e " ${INFO} Running installer to upgrade your installation"
|
||||
if "${PI_HOLE_FILES_DIR}/automated install/basic-install.sh" --unattended; then
|
||||
exit 0
|
||||
else
|
||||
echo -e " ${COL_LIGHT_RED} Error: Unable to complete update, please contact support${COL_NC}"
|
||||
exit 1
|
||||
if ! warning1 ; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${1}" == "dev" ]] ; then
|
||||
# Shortcut to check out development branches
|
||||
echo -e " ${INFO} Shortcut \"dev\" detected - checking out development / devel branches..."
|
||||
echo ""
|
||||
echo -e " ${INFO} Pi-hole Core"
|
||||
fetch_checkout_pull_branch "${PI_HOLE_FILES_DIR}" "development" || { echo " ${CROSS} Unable to pull Core developement branch"; exit 1; }
|
||||
if [[ "${INSTALL_WEB_INTERFACE}" == "true" ]]; then
|
||||
echo ""
|
||||
echo -e " ${INFO} Web interface"
|
||||
fetch_checkout_pull_branch "${webInterfaceDir}" "devel" || { echo " ${CROSS} Unable to pull Web development branch"; exit 1; }
|
||||
fi
|
||||
#echo -e " ${TICK} Pi-hole Core"
|
||||
|
||||
local path
|
||||
path="development/${binary}"
|
||||
echo "development" > /etc/pihole/ftlbranch
|
||||
chmod 644 /etc/pihole/ftlbranch
|
||||
elif [[ "${1}" == "master" ]] ; then
|
||||
# Shortcut to check out master branches
|
||||
echo -e " ${INFO} Shortcut \"master\" detected - checking out master branches..."
|
||||
echo -e " ${INFO} Pi-hole core"
|
||||
fetch_checkout_pull_branch "${PI_HOLE_FILES_DIR}" "master" || { echo " ${CROSS} Unable to pull Core master branch"; exit 1; }
|
||||
if [[ ${INSTALL_WEB_INTERFACE} == "true" ]]; then
|
||||
echo -e " ${INFO} Web interface"
|
||||
fetch_checkout_pull_branch "${webInterfaceDir}" "master" || { echo " ${CROSS} Unable to pull Web master branch"; exit 1; }
|
||||
fi
|
||||
#echo -e " ${TICK} Web Interface"
|
||||
local path
|
||||
path="master/${binary}"
|
||||
echo "master" > /etc/pihole/ftlbranch
|
||||
chmod 644 /etc/pihole/ftlbranch
|
||||
elif [[ "${1}" == "core" ]] ; then
|
||||
str="Fetching branches from ${piholeGitUrl}"
|
||||
echo -ne " ${INFO} $str"
|
||||
if ! fully_fetch_repo "${PI_HOLE_FILES_DIR}" ; then
|
||||
echo -e "${OVER} ${CROSS} $str"
|
||||
exit 1
|
||||
fi
|
||||
corebranches=($(get_available_branches "${PI_HOLE_FILES_DIR}"))
|
||||
|
||||
if [[ "${corebranches[*]}" == *"master"* ]]; then
|
||||
echo -e "${OVER} ${TICK} $str"
|
||||
echo -e " ${INFO} ${#corebranches[@]} branches available for Pi-hole Core"
|
||||
else
|
||||
# Print STDERR output from get_available_branches
|
||||
echo -e "${OVER} ${CROSS} $str\\n\\n${corebranches[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
# Have the user choose the branch they want
|
||||
if ! (for e in "${corebranches[@]}"; do [[ "$e" == "${2}" ]] && exit 0; done); then
|
||||
echo -e " ${INFO} Requested branch \"${2}\" is not available"
|
||||
echo -e " ${INFO} Available branches for Core are:"
|
||||
for e in "${corebranches[@]}"; do echo " - $e"; done
|
||||
exit 1
|
||||
fi
|
||||
checkout_pull_branch "${PI_HOLE_FILES_DIR}" "${2}"
|
||||
elif [[ "${1}" == "web" ]] && [[ "${INSTALL_WEB_INTERFACE}" == "true" ]] ; then
|
||||
str="Fetching branches from ${webInterfaceGitUrl}"
|
||||
echo -ne " ${INFO} $str"
|
||||
if ! fully_fetch_repo "${webInterfaceDir}" ; then
|
||||
echo -e "${OVER} ${CROSS} $str"
|
||||
exit 1
|
||||
fi
|
||||
webbranches=($(get_available_branches "${webInterfaceDir}"))
|
||||
|
||||
if [[ "${webbranches[*]}" == *"master"* ]]; then
|
||||
echo -e "${OVER} ${TICK} $str"
|
||||
echo -e " ${INFO} ${#webbranches[@]} branches available for Web Admin"
|
||||
else
|
||||
# Print STDERR output from get_available_branches
|
||||
echo -e "${OVER} ${CROSS} $str\\n\\n${webbranches[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
# Have the user choose the branch they want
|
||||
if ! (for e in "${webbranches[@]}"; do [[ "$e" == "${2}" ]] && exit 0; done); then
|
||||
echo -e " ${INFO} Requested branch \"${2}\" is not available"
|
||||
echo -e " ${INFO} Available branches for Web Admin are:"
|
||||
for e in "${webbranches[@]}"; do echo " - $e"; done
|
||||
exit 1
|
||||
fi
|
||||
checkout_pull_branch "${webInterfaceDir}" "${2}"
|
||||
elif [[ "${1}" == "ftl" ]] ; then
|
||||
local path
|
||||
path="${2}/${binary}"
|
||||
|
||||
if check_download_exists "$path"; then
|
||||
echo " ${TICK} Branch ${2} exists"
|
||||
echo "${2}" > /etc/pihole/ftlbranch
|
||||
chmod 644 /etc/pihole/ftlbranch
|
||||
FTLinstall "${binary}"
|
||||
restart_service pihole-FTL
|
||||
enable_service pihole-FTL
|
||||
else
|
||||
echo " ${CROSS} Requested branch \"${2}\" is not available"
|
||||
ftlbranches=( $(git ls-remote https://github.com/pi-hole/ftl | grep 'heads' | sed 's/refs\/heads\///;s/ //g' | awk '{print $2}') )
|
||||
echo -e " ${INFO} Available branches for FTL are:"
|
||||
for e in "${ftlbranches[@]}"; do echo " - $e"; done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
else
|
||||
echo -e " ${INFO} Requested option \"${1}\" is not available"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Force updating everything
|
||||
if [[ ! "${1}" == "web" && ! "${1}" == "ftl" ]]; then
|
||||
echo -e " ${INFO} Running installer to upgrade your installation"
|
||||
if "${PI_HOLE_FILES_DIR}/automated install/basic-install.sh" --unattended; then
|
||||
exit 0
|
||||
else
|
||||
echo -e " ${COL_LIGHT_RED} Error: Unable to complete update, please contact support${COL_NC}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -18,49 +18,53 @@ source ${colfile}
|
||||
# b) the setting is commented out (e.g. "#DBFILE=...")
|
||||
FTLconf="/etc/pihole/pihole-FTL.conf"
|
||||
if [ -e "$FTLconf" ]; then
|
||||
DBFILE="$(sed -n -e 's/^\s*DBFILE\s*=\s*//p' ${FTLconf})"
|
||||
DBFILE="$(sed -n -e 's/^\s*DBFILE\s*=\s*//p' ${FTLconf})"
|
||||
fi
|
||||
# Test for empty string. Use standard path in this case.
|
||||
if [ -z "$DBFILE" ]; then
|
||||
DBFILE="/etc/pihole/pihole-FTL.db"
|
||||
DBFILE="/etc/pihole/pihole-FTL.db"
|
||||
fi
|
||||
|
||||
if [[ "$@" != *"quiet"* ]]; then
|
||||
echo -ne " ${INFO} Flushing /var/log/pihole.log ..."
|
||||
echo -ne " ${INFO} Flushing /var/log/pihole.log ..."
|
||||
fi
|
||||
if [[ "$@" == *"once"* ]]; then
|
||||
# Nightly logrotation
|
||||
if command -v /usr/sbin/logrotate >/dev/null; then
|
||||
# Logrotate once
|
||||
/usr/sbin/logrotate --force /etc/pihole/logrotate
|
||||
else
|
||||
# Copy pihole.log over to pihole.log.1
|
||||
# and empty out pihole.log
|
||||
# Note that moving the file is not an option, as
|
||||
# dnsmasq would happily continue writing into the
|
||||
# moved file (it will have the same file handler)
|
||||
cp /var/log/pihole.log /var/log/pihole.log.1
|
||||
echo " " > /var/log/pihole.log
|
||||
fi
|
||||
else
|
||||
# Manual flushing
|
||||
if command -v /usr/sbin/logrotate >/dev/null; then
|
||||
# Logrotate twice to move all data out of sight of FTL
|
||||
/usr/sbin/logrotate --force /etc/pihole/logrotate; sleep 3
|
||||
/usr/sbin/logrotate --force /etc/pihole/logrotate
|
||||
else
|
||||
# Flush both pihole.log and pihole.log.1 (if existing)
|
||||
echo " " > /var/log/pihole.log
|
||||
if [ -f /var/log/pihole.log.1 ]; then
|
||||
echo " " > /var/log/pihole.log.1
|
||||
# Nightly logrotation
|
||||
if command -v /usr/sbin/logrotate >/dev/null; then
|
||||
# Logrotate once
|
||||
/usr/sbin/logrotate --force /etc/pihole/logrotate
|
||||
else
|
||||
# Copy pihole.log over to pihole.log.1
|
||||
# and empty out pihole.log
|
||||
# Note that moving the file is not an option, as
|
||||
# dnsmasq would happily continue writing into the
|
||||
# moved file (it will have the same file handler)
|
||||
cp -p /var/log/pihole.log /var/log/pihole.log.1
|
||||
echo " " > /var/log/pihole.log
|
||||
chmod 644 /var/log/pihole.log
|
||||
fi
|
||||
fi
|
||||
# Delete most recent 24 hours from FTL's database, leave even older data intact (don't wipe out all history)
|
||||
deleted=$(sqlite3 "${DBFILE}" "DELETE FROM queries WHERE timestamp >= strftime('%s','now')-86400; select changes() from queries limit 1")
|
||||
else
|
||||
# Manual flushing
|
||||
if command -v /usr/sbin/logrotate >/dev/null; then
|
||||
# Logrotate twice to move all data out of sight of FTL
|
||||
/usr/sbin/logrotate --force /etc/pihole/logrotate; sleep 3
|
||||
/usr/sbin/logrotate --force /etc/pihole/logrotate
|
||||
else
|
||||
# Flush both pihole.log and pihole.log.1 (if existing)
|
||||
echo " " > /var/log/pihole.log
|
||||
if [ -f /var/log/pihole.log.1 ]; then
|
||||
echo " " > /var/log/pihole.log.1
|
||||
chmod 644 /var/log/pihole.log.1
|
||||
fi
|
||||
fi
|
||||
# Delete most recent 24 hours from FTL's database, leave even older data intact (don't wipe out all history)
|
||||
deleted=$(sqlite3 "${DBFILE}" "DELETE FROM queries WHERE timestamp >= strftime('%s','now')-86400; select changes() from queries limit 1")
|
||||
|
||||
# Restart pihole-FTL to force reloading history
|
||||
sudo pihole restartdns
|
||||
fi
|
||||
|
||||
if [[ "$@" != *"quiet"* ]]; then
|
||||
echo -e "${OVER} ${TICK} Flushed /var/log/pihole.log"
|
||||
echo -e " ${TICK} Deleted ${deleted} queries from database"
|
||||
echo -e "${OVER} ${TICK} Flushed /var/log/pihole.log"
|
||||
echo -e " ${TICK} Deleted ${deleted} queries from database"
|
||||
fi
|
||||
|
254
advanced/Scripts/query.sh
Executable file
254
advanced/Scripts/query.sh
Executable file
@@ -0,0 +1,254 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC1090
|
||||
# Pi-hole: A black hole for Internet advertisements
|
||||
# (c) 2018 Pi-hole, LLC (https://pi-hole.net)
|
||||
# Network-wide ad blocking via your own hardware.
|
||||
#
|
||||
# Query Domain Lists
|
||||
#
|
||||
# This file is copyright under the latest version of the EUPL.
|
||||
# Please see LICENSE file for your rights under this license.
|
||||
|
||||
# Globals
|
||||
piholeDir="/etc/pihole"
|
||||
gravityDBfile="${piholeDir}/gravity.db"
|
||||
options="$*"
|
||||
all=""
|
||||
exact=""
|
||||
blockpage=""
|
||||
matchType="match"
|
||||
|
||||
colfile="/opt/pihole/COL_TABLE"
|
||||
source "${colfile}"
|
||||
|
||||
# Scan an array of files for matching strings
|
||||
scanList(){
|
||||
# Escape full stops
|
||||
local domain="${1}" esc_domain="${1//./\\.}" lists="${2}" type="${3:-}"
|
||||
|
||||
# Prevent grep from printing file path
|
||||
cd "$piholeDir" || exit 1
|
||||
|
||||
# Prevent grep -i matching slowly: http://bit.ly/2xFXtUX
|
||||
export LC_CTYPE=C
|
||||
|
||||
# /dev/null forces filename to be printed when only one list has been generated
|
||||
case "${type}" in
|
||||
"exact" ) grep -i -E -l "(^|(?<!#)\\s)${esc_domain}($|\\s|#)" ${lists} /dev/null 2>/dev/null;;
|
||||
# Iterate through each regexp and check whether it matches the domainQuery
|
||||
# If it does, print the matching regexp and continue looping
|
||||
# Input 1 - regexps | Input 2 - domainQuery
|
||||
"regex" )
|
||||
for list in ${lists}; do
|
||||
if [[ "${domain}" =~ ${list} ]]; then
|
||||
printf "%b\n" "${list}";
|
||||
fi
|
||||
done;;
|
||||
* ) grep -i "${esc_domain}" ${lists} /dev/null 2>/dev/null;;
|
||||
esac
|
||||
}
|
||||
|
||||
if [[ "${options}" == "-h" ]] || [[ "${options}" == "--help" ]]; then
|
||||
echo "Usage: pihole -q [option] <domain>
|
||||
Example: 'pihole -q -exact domain.com'
|
||||
Query the adlists for a specified domain
|
||||
|
||||
Options:
|
||||
-exact Search the block lists for exact domain matches
|
||||
-all Return all query matches within a block list
|
||||
-h, --help Show this help dialog"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Handle valid options
|
||||
if [[ "${options}" == *"-bp"* ]]; then
|
||||
exact="exact"; blockpage=true
|
||||
else
|
||||
[[ "${options}" == *"-all"* ]] && all=true
|
||||
if [[ "${options}" == *"-exact"* ]]; then
|
||||
exact="exact"; matchType="exact ${matchType}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Strip valid options, leaving only the domain and invalid options
|
||||
# This allows users to place the options before or after the domain
|
||||
options=$(sed -E 's/ ?-(bp|adlists?|all|exact) ?//g' <<< "${options}")
|
||||
|
||||
# Handle remaining options
|
||||
# If $options contain non ASCII characters, convert to punycode
|
||||
case "${options}" in
|
||||
"" ) str="No domain specified";;
|
||||
*" "* ) str="Unknown query option specified";;
|
||||
*[![:ascii:]]* ) domainQuery=$(idn2 "${options}");;
|
||||
* ) domainQuery="${options}";;
|
||||
esac
|
||||
|
||||
if [[ -n "${str:-}" ]]; then
|
||||
echo -e "${str}${COL_NC}\\nTry 'pihole -q --help' for more information."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
scanDatabaseTable() {
|
||||
local domain table type querystr result extra
|
||||
domain="$(printf "%q" "${1}")"
|
||||
table="${2}"
|
||||
type="${3:-}"
|
||||
|
||||
# As underscores are legitimate parts of domains, we escape them when using the LIKE operator.
|
||||
# Underscores are SQLite wildcards matching exactly one character. We obviously want to suppress this
|
||||
# behavior. The "ESCAPE '\'" clause specifies that an underscore preceded by an '\' should be matched
|
||||
# as a literal underscore character. We pretreat the $domain variable accordingly to escape underscores.
|
||||
if [[ "${table}" == "gravity" ]]; then
|
||||
case "${exact}" in
|
||||
"exact" ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain = '${domain}'";;
|
||||
* ) querystr="SELECT gravity.domain,adlist.address,adlist.enabled FROM gravity LEFT JOIN adlist ON adlist.id = gravity.adlist_id WHERE domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";;
|
||||
esac
|
||||
else
|
||||
case "${exact}" in
|
||||
"exact" ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${type}' AND domain = '${domain}'";;
|
||||
* ) querystr="SELECT domain,enabled FROM domainlist WHERE type = '${type}' AND domain LIKE '%${domain//_/\\_}%' ESCAPE '\\'";;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Send prepared query to gravity database
|
||||
result="$(sqlite3 "${gravityDBfile}" "${querystr}")" 2> /dev/null
|
||||
if [[ -z "${result}" ]]; then
|
||||
# Return early when there are no matches in this table
|
||||
return
|
||||
fi
|
||||
|
||||
if [[ "${table}" == "gravity" ]]; then
|
||||
echo "${result}"
|
||||
return
|
||||
fi
|
||||
|
||||
# Mark domain as having been white-/blacklist matched (global variable)
|
||||
wbMatch=true
|
||||
|
||||
# Print table name
|
||||
if [[ -z "${blockpage}" ]]; then
|
||||
echo " ${matchType^} found in ${COL_BOLD}exact ${table}${COL_NC}"
|
||||
fi
|
||||
|
||||
# Loop over results and print them
|
||||
mapfile -t results <<< "${result}"
|
||||
for result in "${results[@]}"; do
|
||||
if [[ -n "${blockpage}" ]]; then
|
||||
echo "π ${result}"
|
||||
exit 0
|
||||
fi
|
||||
domain="${result/|*}"
|
||||
if [[ "${result#*|}" == "0" ]]; then
|
||||
extra=" (disabled)"
|
||||
else
|
||||
extra=""
|
||||
fi
|
||||
echo " ${domain}${extra}"
|
||||
done
|
||||
}
|
||||
|
||||
scanRegexDatabaseTable() {
|
||||
local domain list
|
||||
domain="${1}"
|
||||
list="${2}"
|
||||
type="${3:-}"
|
||||
|
||||
# Query all regex from the corresponding database tables
|
||||
mapfile -t regexList < <(sqlite3 "${gravityDBfile}" "SELECT domain FROM domainlist WHERE type = ${type}" 2> /dev/null)
|
||||
|
||||
# If we have regexps to process
|
||||
if [[ "${#regexList[@]}" -ne 0 ]]; then
|
||||
# Split regexps over a new line
|
||||
str_regexList=$(printf '%s\n' "${regexList[@]}")
|
||||
# Check domain against regexps
|
||||
mapfile -t regexMatches < <(scanList "${domain}" "${str_regexList}" "regex")
|
||||
# If there were regex matches
|
||||
if [[ "${#regexMatches[@]}" -ne 0 ]]; then
|
||||
# Split matching regexps over a new line
|
||||
str_regexMatches=$(printf '%s\n' "${regexMatches[@]}")
|
||||
# Form a "matched" message
|
||||
str_message="${matchType^} found in ${COL_BOLD}regex ${list}${COL_NC}"
|
||||
# Form a "results" message
|
||||
str_result="${COL_BOLD}${str_regexMatches}${COL_NC}"
|
||||
# If we are displaying more than just the source of the block
|
||||
if [[ -z "${blockpage}" ]]; then
|
||||
# Set the wildcard match flag
|
||||
wcMatch=true
|
||||
# Echo the "matched" message, indented by one space
|
||||
echo " ${str_message}"
|
||||
# Echo the "results" message, each line indented by three spaces
|
||||
# shellcheck disable=SC2001
|
||||
echo "${str_result}" | sed 's/^/ /'
|
||||
else
|
||||
echo "π .wildcard"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Scan Whitelist and Blacklist
|
||||
scanDatabaseTable "${domainQuery}" "whitelist" "0"
|
||||
scanDatabaseTable "${domainQuery}" "blacklist" "1"
|
||||
|
||||
# Scan Regex table
|
||||
scanRegexDatabaseTable "${domainQuery}" "whitelist" "2"
|
||||
scanRegexDatabaseTable "${domainQuery}" "blacklist" "3"
|
||||
|
||||
# Query block lists
|
||||
mapfile -t results <<< "$(scanDatabaseTable "${domainQuery}" "gravity")"
|
||||
|
||||
# Handle notices
|
||||
if [[ -z "${wbMatch:-}" ]] && [[ -z "${wcMatch:-}" ]] && [[ -z "${results[*]}" ]]; then
|
||||
echo -e " ${INFO} No ${exact/t/t }results found for ${COL_BOLD}${domainQuery}${COL_NC} within the block lists"
|
||||
exit 0
|
||||
elif [[ -z "${results[*]}" ]]; then
|
||||
# Result found in WL/BL/Wildcards
|
||||
exit 0
|
||||
elif [[ -z "${all}" ]] && [[ "${#results[*]}" -ge 100 ]]; then
|
||||
echo -e " ${INFO} Over 100 ${exact/t/t }results found for ${COL_BOLD}${domainQuery}${COL_NC}
|
||||
This can be overridden using the -all option"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Print "Exact matches for" title
|
||||
if [[ -n "${exact}" ]] && [[ -z "${blockpage}" ]]; then
|
||||
plural=""; [[ "${#results[*]}" -gt 1 ]] && plural="es"
|
||||
echo " ${matchType^}${plural} for ${COL_BOLD}${domainQuery}${COL_NC} found in:"
|
||||
fi
|
||||
|
||||
for result in "${results[@]}"; do
|
||||
match="${result/|*/}"
|
||||
extra="${result#*|}"
|
||||
adlistAddress="${extra/|*/}"
|
||||
extra="${extra#*|}"
|
||||
if [[ "${extra}" == "0" ]]; then
|
||||
extra="(disabled)"
|
||||
else
|
||||
extra=""
|
||||
fi
|
||||
|
||||
if [[ -n "${blockpage}" ]]; then
|
||||
echo "0 ${adlistAddress}"
|
||||
elif [[ -n "${exact}" ]]; then
|
||||
echo " - ${adlistAddress} ${extra}"
|
||||
else
|
||||
if [[ ! "${adlistAddress}" == "${adlistAddress_prev:-}" ]]; then
|
||||
count=""
|
||||
echo " ${matchType^} found in ${COL_BOLD}${adlistAddress}${COL_NC}:"
|
||||
adlistAddress_prev="${adlistAddress}"
|
||||
fi
|
||||
: $((count++))
|
||||
|
||||
# Print matching domain if $max_count has not been reached
|
||||
[[ -z "${all}" ]] && max_count="50"
|
||||
if [[ -z "${all}" ]] && [[ "${count}" -ge "${max_count}" ]]; then
|
||||
[[ "${count}" -gt "${max_count}" ]] && continue
|
||||
echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}"
|
||||
else
|
||||
echo " ${match} ${extra}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
exit 0
|
@@ -15,28 +15,28 @@
|
||||
# Borrowed from adafruit-pitft-helper < borrowed from raspi-config
|
||||
# https://github.com/adafruit/Adafruit-PiTFT-Helper/blob/master/adafruit-pitft-helper#L324-L334
|
||||
getInitSys() {
|
||||
if command -v systemctl > /dev/null && systemctl | grep -q '\-\.mount'; then
|
||||
SYSTEMD=1
|
||||
elif [ -f /etc/init.d/cron ] && [ ! -h /etc/init.d/cron ]; then
|
||||
SYSTEMD=0
|
||||
else
|
||||
echo "Unrecognised init system"
|
||||
return 1
|
||||
fi
|
||||
if command -v systemctl > /dev/null && systemctl | grep -q '\-\.mount'; then
|
||||
SYSTEMD=1
|
||||
elif [ -f /etc/init.d/cron ] && [ ! -h /etc/init.d/cron ]; then
|
||||
SYSTEMD=0
|
||||
else
|
||||
echo "Unrecognised init system"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Borrowed from adafruit-pitft-helper:
|
||||
# https://github.com/adafruit/Adafruit-PiTFT-Helper/blob/master/adafruit-pitft-helper#L274-L285
|
||||
autoLoginPiToConsole() {
|
||||
if [ -e /etc/init.d/lightdm ]; then
|
||||
if [ ${SYSTEMD} -eq 1 ]; then
|
||||
systemctl set-default multi-user.target
|
||||
ln -fs /etc/systemd/system/autologin@.service /etc/systemd/system/getty.target.wants/getty@tty1.service
|
||||
else
|
||||
update-rc.d lightdm disable 2
|
||||
sed /etc/inittab -i -e "s/1:2345:respawn:\/sbin\/getty --noclear 38400 tty1/1:2345:respawn:\/bin\/login -f pi tty1 <\/dev\/tty1 >\/dev\/tty1 2>&1/"
|
||||
fi
|
||||
fi
|
||||
if [ -e /etc/init.d/lightdm ]; then
|
||||
if [ ${SYSTEMD} -eq 1 ]; then
|
||||
systemctl set-default multi-user.target
|
||||
ln -fs /etc/systemd/system/autologin@.service /etc/systemd/system/getty.target.wants/getty@tty1.service
|
||||
else
|
||||
update-rc.d lightdm disable 2
|
||||
sed /etc/inittab -i -e "s/1:2345:respawn:\/sbin\/getty --noclear 38400 tty1/1:2345:respawn:\/bin\/login -f pi tty1 <\/dev\/tty1 >\/dev\/tty1 2>&1/"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
######### SCRIPT ###########
|
||||
|
@@ -19,6 +19,9 @@ readonly PI_HOLE_FILES_DIR="/etc/.pihole"
|
||||
# shellcheck disable=SC2034
|
||||
PH_TEST=true
|
||||
|
||||
# when --check-only is passed to this script, it will not perform the actual update
|
||||
CHECK_ONLY=false
|
||||
|
||||
# shellcheck disable=SC1090
|
||||
source "${PI_HOLE_FILES_DIR}/automated install/basic-install.sh"
|
||||
# shellcheck disable=SC1091
|
||||
@@ -28,143 +31,187 @@ source "/opt/pihole/COL_TABLE"
|
||||
# make_repo() sourced from basic-install.sh
|
||||
# update_repo() source from basic-install.sh
|
||||
# getGitFiles() sourced from basic-install.sh
|
||||
# get_binary_name() sourced from basic-install.sh
|
||||
# FTLcheckUpdate() sourced from basic-install.sh
|
||||
|
||||
GitCheckUpdateAvail() {
|
||||
local directory
|
||||
directory="${1}"
|
||||
curdir=$PWD
|
||||
cd "${directory}" || return
|
||||
local directory
|
||||
directory="${1}"
|
||||
curdir=$PWD
|
||||
cd "${directory}" || return
|
||||
|
||||
# Fetch latest changes in this repo
|
||||
git fetch --quiet origin
|
||||
# Fetch latest changes in this repo
|
||||
git fetch --quiet origin
|
||||
|
||||
# @ alone is a shortcut for HEAD. Older versions of git
|
||||
# need @{0}
|
||||
LOCAL="$(git rev-parse "@{0}")"
|
||||
# @ alone is a shortcut for HEAD. Older versions of git
|
||||
# need @{0}
|
||||
LOCAL="$(git rev-parse "@{0}")"
|
||||
|
||||
# The suffix @{upstream} to a branchname
|
||||
# (short form <branchname>@{u}) refers
|
||||
# to the branch that the branch specified
|
||||
# by branchname is set to build on top of#
|
||||
# (configured with branch.<name>.remote and
|
||||
# branch.<name>.merge). A missing branchname
|
||||
# defaults to the current one.
|
||||
REMOTE="$(git rev-parse "@{upstream}")"
|
||||
# The suffix @{upstream} to a branchname
|
||||
# (short form <branchname>@{u}) refers
|
||||
# to the branch that the branch specified
|
||||
# by branchname is set to build on top of#
|
||||
# (configured with branch.<name>.remote and
|
||||
# branch.<name>.merge). A missing branchname
|
||||
# defaults to the current one.
|
||||
REMOTE="$(git rev-parse "@{upstream}")"
|
||||
|
||||
if [[ "${#LOCAL}" == 0 ]]; then
|
||||
echo -e "\\n ${COL_LIGHT_RED}Error: Local revision could not be obtained, please contact Pi-hole Support
|
||||
Additional debugging output:${COL_NC}"
|
||||
git status
|
||||
exit
|
||||
fi
|
||||
if [[ "${#REMOTE}" == 0 ]]; then
|
||||
echo -e "\\n ${COL_LIGHT_RED}Error: Remote revision could not be obtained, please contact Pi-hole Support
|
||||
Additional debugging output:${COL_NC}"
|
||||
git status
|
||||
exit
|
||||
fi
|
||||
if [[ "${#LOCAL}" == 0 ]]; then
|
||||
echo -e "\\n ${COL_LIGHT_RED}Error: Local revision could not be obtained, please contact Pi-hole Support"
|
||||
echo -e " Additional debugging output:${COL_NC}"
|
||||
git status
|
||||
exit
|
||||
fi
|
||||
if [[ "${#REMOTE}" == 0 ]]; then
|
||||
echo -e "\\n ${COL_LIGHT_RED}Error: Remote revision could not be obtained, please contact Pi-hole Support"
|
||||
echo -e " Additional debugging output:${COL_NC}"
|
||||
git status
|
||||
exit
|
||||
fi
|
||||
|
||||
# Change back to original directory
|
||||
cd "${curdir}" || exit
|
||||
# Change back to original directory
|
||||
cd "${curdir}" || exit
|
||||
|
||||
if [[ "${LOCAL}" != "${REMOTE}" ]]; then
|
||||
# Local branch is behind remote branch -> Update
|
||||
return 0
|
||||
else
|
||||
# Local branch is up-to-date or in a situation
|
||||
# where this updater cannot be used (like on a
|
||||
# branch that exists only locally)
|
||||
return 1
|
||||
fi
|
||||
if [[ "${LOCAL}" != "${REMOTE}" ]]; then
|
||||
# Local branch is behind remote branch -> Update
|
||||
return 0
|
||||
else
|
||||
# Local branch is up-to-date or in a situation
|
||||
# where this updater cannot be used (like on a
|
||||
# branch that exists only locally)
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
local basicError="\\n ${COL_LIGHT_RED}Unable to complete update, please contact Pi-hole Support${COL_NC}"
|
||||
local core_update
|
||||
local web_update
|
||||
local FTL_update
|
||||
local basicError="\\n ${COL_LIGHT_RED}Unable to complete update, please contact Pi-hole Support${COL_NC}"
|
||||
local core_update
|
||||
local web_update
|
||||
local FTL_update
|
||||
|
||||
core_update=false
|
||||
web_update=false
|
||||
FTL_update=false
|
||||
|
||||
# shellcheck disable=1090,2154
|
||||
source "${setupVars}"
|
||||
|
||||
# This is unlikely
|
||||
if ! is_repo "${PI_HOLE_FILES_DIR}" ; then
|
||||
echo -e "\\n ${COL_LIGHT_RED}Error: Core Pi-hole repo is missing from system!
|
||||
Please re-run install script from https://pi-hole.net${COL_NC}"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
echo -e " ${INFO} Checking for updates..."
|
||||
|
||||
if GitCheckUpdateAvail "${PI_HOLE_FILES_DIR}" ; then
|
||||
core_update=true
|
||||
echo -e " ${INFO} Pi-hole Core:\\t${COL_YELLOW}update available${COL_NC}"
|
||||
else
|
||||
core_update=false
|
||||
echo -e " ${INFO} Pi-hole Core:\\t${COL_LIGHT_GREEN}up to date${COL_NC}"
|
||||
fi
|
||||
|
||||
if [[ "${INSTALL_WEB}" == true ]]; then
|
||||
if ! is_repo "${ADMIN_INTERFACE_DIR}" ; then
|
||||
echo -e "\\n ${COL_LIGHT_RED}Error: Web Admin repo is missing from system!
|
||||
Please re-run install script from https://pi-hole.net${COL_NC}"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if GitCheckUpdateAvail "${ADMIN_INTERFACE_DIR}" ; then
|
||||
web_update=true
|
||||
echo -e " ${INFO} Web Interface:\\t${COL_YELLOW}update available${COL_NC}"
|
||||
else
|
||||
web_update=false
|
||||
echo -e " ${INFO} Web Interface:\\t${COL_LIGHT_GREEN}up to date${COL_NC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if FTLcheckUpdate > /dev/null; then
|
||||
FTL_update=true
|
||||
echo -e " ${INFO} FTL:\\t\\t${COL_YELLOW}update available${COL_NC}"
|
||||
else
|
||||
web_update=false
|
||||
FTL_update=false
|
||||
echo -e " ${INFO} FTL:\\t\\t${COL_LIGHT_GREEN}up to date${COL_NC}"
|
||||
fi
|
||||
|
||||
if [[ "${core_update}" == false && "${web_update}" == false && "${FTL_update}" == false ]]; then
|
||||
# shellcheck disable=1090,2154
|
||||
source "${setupVars}"
|
||||
|
||||
# This is unlikely
|
||||
if ! is_repo "${PI_HOLE_FILES_DIR}" ; then
|
||||
echo -e "\\n ${COL_LIGHT_RED}Error: Core Pi-hole repo is missing from system!"
|
||||
echo -e " Please re-run install script from https://pi-hole.net${COL_NC}"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
echo -e " ${INFO} Checking for updates..."
|
||||
|
||||
if GitCheckUpdateAvail "${PI_HOLE_FILES_DIR}" ; then
|
||||
core_update=true
|
||||
echo -e " ${INFO} Pi-hole Core:\\t${COL_YELLOW}update available${COL_NC}"
|
||||
else
|
||||
core_update=false
|
||||
echo -e " ${INFO} Pi-hole Core:\\t${COL_LIGHT_GREEN}up to date${COL_NC}"
|
||||
fi
|
||||
|
||||
if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then
|
||||
if ! is_repo "${ADMIN_INTERFACE_DIR}" ; then
|
||||
echo -e "\\n ${COL_LIGHT_RED}Error: Web Admin repo is missing from system!"
|
||||
echo -e " Please re-run install script from https://pi-hole.net${COL_NC}"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if GitCheckUpdateAvail "${ADMIN_INTERFACE_DIR}" ; then
|
||||
web_update=true
|
||||
echo -e " ${INFO} Web Interface:\\t${COL_YELLOW}update available${COL_NC}"
|
||||
else
|
||||
web_update=false
|
||||
echo -e " ${INFO} Web Interface:\\t${COL_LIGHT_GREEN}up to date${COL_NC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
local funcOutput
|
||||
funcOutput=$(get_binary_name) #Store output of get_binary_name here
|
||||
local binary
|
||||
binary="pihole-FTL${funcOutput##*pihole-FTL}" #binary name will be the last line of the output of get_binary_name (it always begins with pihole-FTL)
|
||||
|
||||
if FTLcheckUpdate "${binary}" > /dev/null; then
|
||||
FTL_update=true
|
||||
echo -e " ${INFO} FTL:\\t\\t${COL_YELLOW}update available${COL_NC}"
|
||||
else
|
||||
case $? in
|
||||
1)
|
||||
echo -e " ${INFO} FTL:\\t\\t${COL_LIGHT_GREEN}up to date${COL_NC}"
|
||||
;;
|
||||
2)
|
||||
echo -e " ${INFO} FTL:\\t\\t${COL_LIGHT_RED}Branch is not available.${COL_NC}\\n\\t\\t\\tUse ${COL_LIGHT_GREEN}pihole checkout ftl [branchname]${COL_NC} to switch to a valid branch."
|
||||
;;
|
||||
*)
|
||||
echo -e " ${INFO} FTL:\\t\\t${COL_LIGHT_RED}Something has gone wrong, contact support${COL_NC}"
|
||||
esac
|
||||
FTL_update=false
|
||||
fi
|
||||
|
||||
# Determine FTL branch
|
||||
local ftlBranch
|
||||
if [[ -f "/etc/pihole/ftlbranch" ]]; then
|
||||
ftlBranch=$(</etc/pihole/ftlbranch)
|
||||
else
|
||||
ftlBranch="master"
|
||||
fi
|
||||
|
||||
if [[ ! "${ftlBranch}" == "master" && ! "${ftlBranch}" == "development" ]]; then
|
||||
# Notify user that they are on a custom branch which might mean they they are lost
|
||||
# behind if a branch was merged to development and got abandoned
|
||||
printf " %b %bWarning:%b You are using FTL from a custom branch (%s) and might be missing future releases.\\n" "${INFO}" "${COL_LIGHT_RED}" "${COL_NC}" "${ftlBranch}"
|
||||
fi
|
||||
|
||||
if [[ "${core_update}" == false && "${web_update}" == false && "${FTL_update}" == false ]]; then
|
||||
echo ""
|
||||
echo -e " ${TICK} Everything is up to date!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${CHECK_ONLY}" == true ]]; then
|
||||
echo ""
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${core_update}" == true ]]; then
|
||||
echo ""
|
||||
echo -e " ${INFO} Pi-hole core files out of date, updating local repo."
|
||||
getGitFiles "${PI_HOLE_FILES_DIR}" "${PI_HOLE_GIT_URL}"
|
||||
echo -e " ${INFO} If you had made any changes in '/etc/.pihole/', they have been stashed using 'git stash'"
|
||||
fi
|
||||
|
||||
if [[ "${web_update}" == true ]]; then
|
||||
echo ""
|
||||
echo -e " ${INFO} Pi-hole Web Admin files out of date, updating local repo."
|
||||
getGitFiles "${ADMIN_INTERFACE_DIR}" "${ADMIN_INTERFACE_GIT_URL}"
|
||||
echo -e " ${INFO} If you had made any changes in '/var/www/html/admin/', they have been stashed using 'git stash'"
|
||||
fi
|
||||
|
||||
if [[ "${FTL_update}" == true ]]; then
|
||||
echo ""
|
||||
echo -e " ${INFO} FTL out of date, it will be updated by the installer."
|
||||
fi
|
||||
|
||||
if [[ "${FTL_update}" == true || "${core_update}" == true ]]; then
|
||||
${PI_HOLE_FILES_DIR}/automated\ install/basic-install.sh --reconfigure --unattended || \
|
||||
echo -e "${basicError}" && exit 1
|
||||
fi
|
||||
|
||||
if [[ "${FTL_update}" == true || "${core_update}" == true || "${web_update}" == true ]]; then
|
||||
# Force an update of the updatechecker
|
||||
/opt/pihole/updatecheck.sh
|
||||
/opt/pihole/updatecheck.sh x remote
|
||||
echo -e " ${INFO} Local version file information updated."
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -e " ${TICK} Everything is up to date!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${core_update}" == true ]]; then
|
||||
echo ""
|
||||
echo -e " ${INFO} Pi-hole core files out of date, updating local repo."
|
||||
getGitFiles "${PI_HOLE_FILES_DIR}" "${PI_HOLE_GIT_URL}"
|
||||
echo -e " ${INFO} If you had made any changes in '/etc/.pihole/', they have been stashed using 'git stash'"
|
||||
fi
|
||||
|
||||
if [[ "${web_update}" == true ]]; then
|
||||
echo ""
|
||||
echo -e " ${INFO} Pi-hole Web Admin files out of date, updating local repo."
|
||||
getGitFiles "${ADMIN_INTERFACE_DIR}" "${ADMIN_INTERFACE_GIT_URL}"
|
||||
echo -e " ${INFO} If you had made any changes in '/var/www/html/admin/', they have been stashed using 'git stash'"
|
||||
fi
|
||||
|
||||
if [[ "${FTL_update}" == true ]]; then
|
||||
echo ""
|
||||
echo -e " ${INFO} FTL out of date, it will be updated by the installer."
|
||||
fi
|
||||
|
||||
if [[ "${FTL_update}" == true || "${core_update}" == true || "${web_update}" == true ]]; then
|
||||
${PI_HOLE_FILES_DIR}/automated\ install/basic-install.sh --reconfigure --unattended || \
|
||||
echo -e "${basicError}" && exit 1
|
||||
fi
|
||||
echo ""
|
||||
exit 0
|
||||
}
|
||||
|
||||
if [[ "$1" == "--check-only" ]]; then
|
||||
CHECK_ONLY=true
|
||||
fi
|
||||
|
||||
main
|
||||
|
@@ -10,57 +10,85 @@
|
||||
|
||||
# Credit: https://stackoverflow.com/a/46324904
|
||||
function json_extract() {
|
||||
local key=$1
|
||||
local json=$2
|
||||
local key=$1
|
||||
local json=$2
|
||||
|
||||
local string_regex='"([^"\]|\\.)*"'
|
||||
local number_regex='-?(0|[1-9][0-9]*)(\.[0-9]+)?([eE][+-]?[0-9]+)?'
|
||||
local value_regex="${string_regex}|${number_regex}|true|false|null"
|
||||
local pair_regex="\"${key}\"[[:space:]]*:[[:space:]]*(${value_regex})"
|
||||
local string_regex='"([^"\]|\\.)*"'
|
||||
local number_regex='-?(0|[1-9][0-9]*)(\.[0-9]+)?([eE][+-]?[0-9]+)?'
|
||||
local value_regex="${string_regex}|${number_regex}|true|false|null"
|
||||
local pair_regex="\"${key}\"[[:space:]]*:[[:space:]]*(${value_regex})"
|
||||
|
||||
if [[ ${json} =~ ${pair_regex} ]]; then
|
||||
echo $(sed 's/^"\|"$//g' <<< "${BASH_REMATCH[1]}")
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
if [[ ${json} =~ ${pair_regex} ]]; then
|
||||
echo $(sed 's/^"\|"$//g' <<< "${BASH_REMATCH[1]}")
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function get_local_branch() {
|
||||
# Return active branch
|
||||
cd "${1}" 2> /dev/null || return 1
|
||||
git rev-parse --abbrev-ref HEAD || return 1
|
||||
# Return active branch
|
||||
cd "${1}" 2> /dev/null || return 1
|
||||
git rev-parse --abbrev-ref HEAD || return 1
|
||||
}
|
||||
|
||||
function get_local_version() {
|
||||
# Return active branch
|
||||
cd "${1}" 2> /dev/null || return 1
|
||||
git describe --long --dirty --tags || return 1
|
||||
# Return active branch
|
||||
cd "${1}" 2> /dev/null || return 1
|
||||
git describe --long --dirty --tags 2> /dev/null || return 1
|
||||
}
|
||||
|
||||
# Source the setupvars config file
|
||||
# shellcheck disable=SC1091
|
||||
. /etc/pihole/setupVars.conf
|
||||
|
||||
if [[ "$2" == "remote" ]]; then
|
||||
|
||||
if [[ "$3" == "reboot" ]]; then
|
||||
sleep 30
|
||||
fi
|
||||
if [[ "$3" == "reboot" ]]; then
|
||||
sleep 30
|
||||
fi
|
||||
|
||||
GITHUB_CORE_VERSION="$(json_extract tag_name "$(curl -q 'https://api.github.com/repos/pi-hole/pi-hole/releases/latest' 2> /dev/null)")"
|
||||
GITHUB_WEB_VERSION="$(json_extract tag_name "$(curl -q 'https://api.github.com/repos/pi-hole/AdminLTE/releases/latest' 2> /dev/null)")"
|
||||
GITHUB_FTL_VERSION="$(json_extract tag_name "$(curl -q 'https://api.github.com/repos/pi-hole/FTL/releases/latest' 2> /dev/null)")"
|
||||
GITHUB_VERSION_FILE="/etc/pihole/GitHubVersions"
|
||||
|
||||
echo -n "${GITHUB_CORE_VERSION} ${GITHUB_WEB_VERSION} ${GITHUB_FTL_VERSION}" > "/etc/pihole/GitHubVersions"
|
||||
GITHUB_CORE_VERSION="$(json_extract tag_name "$(curl -s 'https://api.github.com/repos/pi-hole/pi-hole/releases/latest' 2> /dev/null)")"
|
||||
echo -n "${GITHUB_CORE_VERSION}" > "${GITHUB_VERSION_FILE}"
|
||||
chmod 644 "${GITHUB_VERSION_FILE}"
|
||||
|
||||
if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then
|
||||
GITHUB_WEB_VERSION="$(json_extract tag_name "$(curl -s 'https://api.github.com/repos/pi-hole/AdminLTE/releases/latest' 2> /dev/null)")"
|
||||
echo -n " ${GITHUB_WEB_VERSION}" >> "${GITHUB_VERSION_FILE}"
|
||||
fi
|
||||
|
||||
GITHUB_FTL_VERSION="$(json_extract tag_name "$(curl -s 'https://api.github.com/repos/pi-hole/FTL/releases/latest' 2> /dev/null)")"
|
||||
echo -n " ${GITHUB_FTL_VERSION}" >> "${GITHUB_VERSION_FILE}"
|
||||
|
||||
else
|
||||
|
||||
CORE_BRANCH="$(get_local_branch /etc/.pihole)"
|
||||
WEB_BRANCH="$(get_local_branch /var/www/html/admin)"
|
||||
FTL_BRANCH="$(pihole-FTL branch)"
|
||||
LOCAL_BRANCH_FILE="/etc/pihole/localbranches"
|
||||
|
||||
echo -n "${CORE_BRANCH} ${WEB_BRANCH} ${FTL_BRANCH}" > "/etc/pihole/localbranches"
|
||||
CORE_BRANCH="$(get_local_branch /etc/.pihole)"
|
||||
echo -n "${CORE_BRANCH}" > "${LOCAL_BRANCH_FILE}"
|
||||
chmod 644 "${LOCAL_BRANCH_FILE}"
|
||||
|
||||
CORE_VERSION="$(get_local_version /etc/.pihole)"
|
||||
WEB_VERSION="$(get_local_version /var/www/html/admin)"
|
||||
FTL_VERSION="$(pihole-FTL version)"
|
||||
if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then
|
||||
WEB_BRANCH="$(get_local_branch /var/www/html/admin)"
|
||||
echo -n " ${WEB_BRANCH}" >> "${LOCAL_BRANCH_FILE}"
|
||||
fi
|
||||
|
||||
echo -n "${CORE_VERSION} ${WEB_VERSION} ${FTL_VERSION}" > "/etc/pihole/localversions"
|
||||
FTL_BRANCH="$(pihole-FTL branch)"
|
||||
echo -n " ${FTL_BRANCH}" >> "${LOCAL_BRANCH_FILE}"
|
||||
|
||||
LOCAL_VERSION_FILE="/etc/pihole/localversions"
|
||||
|
||||
CORE_VERSION="$(get_local_version /etc/.pihole)"
|
||||
echo -n "${CORE_VERSION}" > "${LOCAL_VERSION_FILE}"
|
||||
chmod 644 "${LOCAL_VERSION_FILE}"
|
||||
|
||||
if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then
|
||||
WEB_VERSION="$(get_local_version /var/www/html/admin)"
|
||||
echo -n " ${WEB_VERSION}" >> "${LOCAL_VERSION_FILE}"
|
||||
fi
|
||||
|
||||
FTL_VERSION="$(pihole-FTL version)"
|
||||
echo -n " ${FTL_VERSION}" >> "${LOCAL_VERSION_FILE}"
|
||||
|
||||
fi
|
||||
|
@@ -14,135 +14,184 @@ COREGITDIR="/etc/.pihole/"
|
||||
WEBGITDIR="/var/www/html/admin/"
|
||||
|
||||
getLocalVersion() {
|
||||
# FTL requires a different method
|
||||
if [[ "$1" == "FTL" ]]; then
|
||||
pihole-FTL version
|
||||
# FTL requires a different method
|
||||
if [[ "$1" == "FTL" ]]; then
|
||||
pihole-FTL version
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Get the tagged version of the local repository
|
||||
local directory="${1}"
|
||||
local version
|
||||
|
||||
cd "${directory}" 2> /dev/null || { echo "${DEFAULT}"; return 1; }
|
||||
version=$(git describe --tags --always || echo "$DEFAULT")
|
||||
if [[ "${version}" =~ ^v ]]; then
|
||||
echo "${version}"
|
||||
elif [[ "${version}" == "${DEFAULT}" ]]; then
|
||||
echo "ERROR"
|
||||
return 1
|
||||
else
|
||||
echo "Untagged"
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Get the tagged version of the local repository
|
||||
local directory="${1}"
|
||||
local version
|
||||
|
||||
cd "${directory}" 2> /dev/null || { echo "${DEFAULT}"; return 1; }
|
||||
version=$(git describe --tags --always || echo "$DEFAULT")
|
||||
if [[ "${version}" =~ ^v ]]; then
|
||||
echo "${version}"
|
||||
elif [[ "${version}" == "${DEFAULT}" ]]; then
|
||||
echo "ERROR"
|
||||
return 1
|
||||
else
|
||||
echo "Untagged"
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
getLocalHash() {
|
||||
# Local FTL hash does not exist on filesystem
|
||||
if [[ "$1" == "FTL" ]]; then
|
||||
echo "N/A"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Get the short hash of the local repository
|
||||
local directory="${1}"
|
||||
local hash
|
||||
# Local FTL hash does not exist on filesystem
|
||||
if [[ "$1" == "FTL" ]]; then
|
||||
echo "N/A"
|
||||
return 0
|
||||
fi
|
||||
|
||||
cd "${directory}" 2> /dev/null || { echo "${DEFAULT}"; return 1; }
|
||||
hash=$(git rev-parse --short HEAD || echo "$DEFAULT")
|
||||
if [[ "${hash}" == "${DEFAULT}" ]]; then
|
||||
echo "ERROR"
|
||||
return 1
|
||||
else
|
||||
echo "${hash}"
|
||||
fi
|
||||
return 0
|
||||
# Get the short hash of the local repository
|
||||
local directory="${1}"
|
||||
local hash
|
||||
|
||||
cd "${directory}" 2> /dev/null || { echo "${DEFAULT}"; return 1; }
|
||||
hash=$(git rev-parse --short HEAD || echo "$DEFAULT")
|
||||
if [[ "${hash}" == "${DEFAULT}" ]]; then
|
||||
echo "ERROR"
|
||||
return 1
|
||||
else
|
||||
echo "${hash}"
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
getRemoteHash(){
|
||||
# Remote FTL hash is not applicable
|
||||
if [[ "$1" == "FTL" ]]; then
|
||||
echo "N/A"
|
||||
# Remote FTL hash is not applicable
|
||||
if [[ "$1" == "FTL" ]]; then
|
||||
echo "N/A"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local daemon="${1}"
|
||||
local branch="${2}"
|
||||
|
||||
hash=$(git ls-remote --heads "https://github.com/pi-hole/${daemon}" | \
|
||||
awk -v bra="$branch" '$0~bra {print substr($0,0,8);exit}')
|
||||
if [[ -n "$hash" ]]; then
|
||||
echo "$hash"
|
||||
else
|
||||
echo "ERROR"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
local daemon="${1}"
|
||||
local branch="${2}"
|
||||
|
||||
hash=$(git ls-remote --heads "https://github.com/pi-hole/${daemon}" | \
|
||||
awk -v bra="$branch" '$0~bra {print substr($0,0,8);exit}')
|
||||
if [[ -n "$hash" ]]; then
|
||||
echo "$hash"
|
||||
else
|
||||
echo "ERROR"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
getRemoteVersion(){
|
||||
# Get the version from the remote origin
|
||||
local daemon="${1}"
|
||||
local version
|
||||
# Get the version from the remote origin
|
||||
local daemon="${1}"
|
||||
local version
|
||||
local cachedVersions
|
||||
local arrCache
|
||||
cachedVersions="/etc/pihole/GitHubVersions"
|
||||
|
||||
version=$(curl --silent --fail "https://api.github.com/repos/pi-hole/${daemon}/releases/latest" | \
|
||||
awk -F: '$1 ~/tag_name/ { print $2 }' | \
|
||||
tr -cd '[[:alnum:]]._-')
|
||||
if [[ "${version}" =~ ^v ]]; then
|
||||
echo "${version}"
|
||||
else
|
||||
echo "ERROR"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
#If the above file exists, then we can read from that. Prevents overuse of Github API
|
||||
if [[ -f "$cachedVersions" ]]; then
|
||||
IFS=' ' read -r -a arrCache < "$cachedVersions"
|
||||
case $daemon in
|
||||
"pi-hole" ) echo "${arrCache[0]}";;
|
||||
"AdminLTE" ) echo "${arrCache[1]}";;
|
||||
"FTL" ) echo "${arrCache[2]}";;
|
||||
esac
|
||||
|
||||
return 0
|
||||
fi
|
||||
|
||||
version=$(curl --silent --fail "https://api.github.com/repos/pi-hole/${daemon}/releases/latest" | \
|
||||
awk -F: '$1 ~/tag_name/ { print $2 }' | \
|
||||
tr -cd '[[:alnum:]]._-')
|
||||
if [[ "${version}" =~ ^v ]]; then
|
||||
echo "${version}"
|
||||
else
|
||||
echo "ERROR"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
getLocalBranch(){
|
||||
# Get the checked out branch of the local directory
|
||||
local directory="${1}"
|
||||
local branch
|
||||
|
||||
# Local FTL btranch is stored in /etc/pihole/ftlbranch
|
||||
if [[ "$1" == "FTL" ]]; then
|
||||
branch="$(pihole-FTL branch)"
|
||||
else
|
||||
cd "${directory}" 2> /dev/null || { echo "${DEFAULT}"; return 1; }
|
||||
branch=$(git rev-parse --abbrev-ref HEAD || echo "$DEFAULT")
|
||||
fi
|
||||
if [[ ! "${branch}" =~ ^v ]]; then
|
||||
if [[ "${branch}" == "master" ]]; then
|
||||
echo ""
|
||||
elif [[ "${branch}" == "HEAD" ]]; then
|
||||
echo "in detached HEAD state at "
|
||||
else
|
||||
echo "${branch} "
|
||||
fi
|
||||
else
|
||||
# Branch started in "v"
|
||||
echo "release "
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
versionOutput() {
|
||||
[[ "$1" == "pi-hole" ]] && GITDIR=$COREGITDIR
|
||||
[[ "$1" == "AdminLTE" ]] && GITDIR=$WEBGITDIR
|
||||
[[ "$1" == "FTL" ]] && GITDIR="FTL"
|
||||
|
||||
[[ "$2" == "-c" ]] || [[ "$2" == "--current" ]] || [[ -z "$2" ]] && current=$(getLocalVersion $GITDIR)
|
||||
[[ "$2" == "-l" ]] || [[ "$2" == "--latest" ]] || [[ -z "$2" ]] && latest=$(getRemoteVersion "$1")
|
||||
if [[ "$2" == "-h" ]] || [[ "$2" == "--hash" ]]; then
|
||||
[[ "$3" == "-c" ]] || [[ "$3" == "--current" ]] || [[ -z "$3" ]] && curHash=$(getLocalHash "$GITDIR")
|
||||
[[ "$3" == "-l" ]] || [[ "$3" == "--latest" ]] || [[ -z "$3" ]] && latHash=$(getRemoteHash "$1" "$(cd "$GITDIR" 2> /dev/null && git rev-parse --abbrev-ref HEAD)")
|
||||
fi
|
||||
[[ "$1" == "pi-hole" ]] && GITDIR=$COREGITDIR
|
||||
[[ "$1" == "AdminLTE" ]] && GITDIR=$WEBGITDIR
|
||||
[[ "$1" == "FTL" ]] && GITDIR="FTL"
|
||||
|
||||
if [[ -n "$current" ]] && [[ -n "$latest" ]]; then
|
||||
output="${1^} version is $current (Latest: $latest)"
|
||||
elif [[ -n "$current" ]] && [[ -z "$latest" ]]; then
|
||||
output="Current ${1^} version is $current"
|
||||
elif [[ -z "$current" ]] && [[ -n "$latest" ]]; then
|
||||
output="Latest ${1^} version is $latest"
|
||||
elif [[ "$curHash" == "N/A" ]] || [[ "$latHash" == "N/A" ]]; then
|
||||
output="${1^} hash is not applicable"
|
||||
elif [[ -n "$curHash" ]] && [[ -n "$latHash" ]]; then
|
||||
output="${1^} hash is $curHash (Latest: $latHash)"
|
||||
elif [[ -n "$curHash" ]] && [[ -z "$latHash" ]]; then
|
||||
output="Current ${1^} hash is $curHash"
|
||||
elif [[ -z "$curHash" ]] && [[ -n "$latHash" ]]; then
|
||||
output="Latest ${1^} hash is $latHash"
|
||||
else
|
||||
errorOutput
|
||||
fi
|
||||
[[ "$2" == "-c" ]] || [[ "$2" == "--current" ]] || [[ -z "$2" ]] && current=$(getLocalVersion $GITDIR) && branch=$(getLocalBranch $GITDIR)
|
||||
[[ "$2" == "-l" ]] || [[ "$2" == "--latest" ]] || [[ -z "$2" ]] && latest=$(getRemoteVersion "$1")
|
||||
if [[ "$2" == "-h" ]] || [[ "$2" == "--hash" ]]; then
|
||||
[[ "$3" == "-c" ]] || [[ "$3" == "--current" ]] || [[ -z "$3" ]] && curHash=$(getLocalHash "$GITDIR") && branch=$(getLocalBranch $GITDIR)
|
||||
[[ "$3" == "-l" ]] || [[ "$3" == "--latest" ]] || [[ -z "$3" ]] && latHash=$(getRemoteHash "$1" "$(cd "$GITDIR" 2> /dev/null && git rev-parse --abbrev-ref HEAD)")
|
||||
fi
|
||||
if [[ -n "$current" ]] && [[ -n "$latest" ]]; then
|
||||
output="${1^} version is $branch$current (Latest: $latest)"
|
||||
elif [[ -n "$current" ]] && [[ -z "$latest" ]]; then
|
||||
output="Current ${1^} version is $branch$current."
|
||||
elif [[ -z "$current" ]] && [[ -n "$latest" ]]; then
|
||||
output="Latest ${1^} version is $latest"
|
||||
elif [[ "$curHash" == "N/A" ]] || [[ "$latHash" == "N/A" ]]; then
|
||||
output="${1^} hash is not applicable"
|
||||
elif [[ -n "$curHash" ]] && [[ -n "$latHash" ]]; then
|
||||
output="${1^} hash is $curHash (Latest: $latHash)"
|
||||
elif [[ -n "$curHash" ]] && [[ -z "$latHash" ]]; then
|
||||
output="Current ${1^} hash is $curHash"
|
||||
elif [[ -z "$curHash" ]] && [[ -n "$latHash" ]]; then
|
||||
output="Latest ${1^} hash is $latHash"
|
||||
else
|
||||
errorOutput
|
||||
fi
|
||||
|
||||
[[ -n "$output" ]] && echo " $output"
|
||||
[[ -n "$output" ]] && echo " $output"
|
||||
}
|
||||
|
||||
errorOutput() {
|
||||
echo " Invalid Option! Try 'pihole -v --help' for more information."
|
||||
exit 1
|
||||
echo " Invalid Option! Try 'pihole -v --help' for more information."
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
defaultOutput() {
|
||||
versionOutput "pi-hole" "$@"
|
||||
versionOutput "AdminLTE" "$@"
|
||||
versionOutput "FTL" "$@"
|
||||
# Source the setupvars config file
|
||||
# shellcheck disable=SC1091
|
||||
source /etc/pihole/setupVars.conf
|
||||
|
||||
versionOutput "pi-hole" "$@"
|
||||
|
||||
if [[ "${INSTALL_WEB_INTERFACE}" == true ]]; then
|
||||
versionOutput "AdminLTE" "$@"
|
||||
fi
|
||||
|
||||
versionOutput "FTL" "$@"
|
||||
}
|
||||
|
||||
helpFunc() {
|
||||
echo "Usage: pihole -v [repo | option] [option]
|
||||
echo "Usage: pihole -v [repo | option] [option]
|
||||
Example: 'pihole -v -p -l'
|
||||
Show Pi-hole, Admin Console & FTL versions
|
||||
|
||||
@@ -150,7 +199,7 @@ Repositories:
|
||||
-p, --pihole Only retrieve info regarding Pi-hole repository
|
||||
-a, --admin Only retrieve info regarding AdminLTE repository
|
||||
-f, --ftl Only retrieve info regarding FTL repository
|
||||
|
||||
|
||||
Options:
|
||||
-c, --current Return the current version
|
||||
-l, --latest Return the latest version
|
||||
@@ -160,9 +209,9 @@ Options:
|
||||
}
|
||||
|
||||
case "${1}" in
|
||||
"-p" | "--pihole" ) shift; versionOutput "pi-hole" "$@";;
|
||||
"-a" | "--admin" ) shift; versionOutput "AdminLTE" "$@";;
|
||||
"-f" | "--ftl" ) shift; versionOutput "FTL" "$@";;
|
||||
"-h" | "--help" ) helpFunc;;
|
||||
* ) defaultOutput "$@";;
|
||||
"-p" | "--pihole" ) shift; versionOutput "pi-hole" "$@";;
|
||||
"-a" | "--admin" ) shift; versionOutput "AdminLTE" "$@";;
|
||||
"-f" | "--ftl" ) shift; versionOutput "FTL" "$@";;
|
||||
"-h" | "--help" ) helpFunc;;
|
||||
* ) defaultOutput "$@";;
|
||||
esac
|
||||
|
@@ -16,14 +16,18 @@ readonly dhcpconfig="/etc/dnsmasq.d/02-pihole-dhcp.conf"
|
||||
readonly FTLconf="/etc/pihole/pihole-FTL.conf"
|
||||
# 03 -> wildcards
|
||||
readonly dhcpstaticconfig="/etc/dnsmasq.d/04-pihole-static-dhcp.conf"
|
||||
readonly PI_HOLE_BIN_DIR="/usr/local/bin"
|
||||
readonly dnscustomfile="/etc/pihole/custom.list"
|
||||
|
||||
readonly gravityDBfile="/etc/pihole/gravity.db"
|
||||
|
||||
coltable="/opt/pihole/COL_TABLE"
|
||||
if [[ -f ${coltable} ]]; then
|
||||
source ${coltable}
|
||||
source ${coltable}
|
||||
fi
|
||||
|
||||
helpFunc() {
|
||||
echo "Usage: pihole -a [options]
|
||||
echo "Usage: pihole -a [options]
|
||||
Example: pihole -a -p password
|
||||
Set options for the Admin Console
|
||||
|
||||
@@ -32,273 +36,286 @@ Options:
|
||||
-c, celsius Set Celsius as preferred temperature unit
|
||||
-f, fahrenheit Set Fahrenheit as preferred temperature unit
|
||||
-k, kelvin Set Kelvin as preferred temperature unit
|
||||
-r, hostrecord Add a name to the DNS associated to an IPv4/IPv6 address
|
||||
-e, email Set an administrative contact address for the Block Page
|
||||
-h, --help Show this help dialog
|
||||
-i, interface Specify dnsmasq's interface listening behavior
|
||||
-l, privacylevel Set privacy level (0 = lowest, 3 = highest)"
|
||||
exit 0
|
||||
-l, privacylevel Set privacy level (0 = lowest, 4 = highest)"
|
||||
exit 0
|
||||
}
|
||||
|
||||
add_setting() {
|
||||
echo "${1}=${2}" >> "${setupVars}"
|
||||
echo "${1}=${2}" >> "${setupVars}"
|
||||
}
|
||||
|
||||
delete_setting() {
|
||||
sed -i "/${1}/d" "${setupVars}"
|
||||
sed -i "/${1}/d" "${setupVars}"
|
||||
}
|
||||
|
||||
change_setting() {
|
||||
delete_setting "${1}"
|
||||
add_setting "${1}" "${2}"
|
||||
delete_setting "${1}"
|
||||
add_setting "${1}" "${2}"
|
||||
}
|
||||
|
||||
addFTLsetting() {
|
||||
echo "${1}=${2}" >> "${FTLconf}"
|
||||
echo "${1}=${2}" >> "${FTLconf}"
|
||||
}
|
||||
|
||||
deleteFTLsetting() {
|
||||
sed -i "/${1}/d" "${FTLconf}"
|
||||
sed -i "/${1}/d" "${FTLconf}"
|
||||
}
|
||||
|
||||
changeFTLsetting() {
|
||||
deleteFTLsetting "${1}"
|
||||
addFTLsetting "${1}" "${2}"
|
||||
deleteFTLsetting "${1}"
|
||||
addFTLsetting "${1}" "${2}"
|
||||
}
|
||||
|
||||
add_dnsmasq_setting() {
|
||||
if [[ "${2}" != "" ]]; then
|
||||
echo "${1}=${2}" >> "${dnsmasqconfig}"
|
||||
else
|
||||
echo "${1}" >> "${dnsmasqconfig}"
|
||||
fi
|
||||
if [[ "${2}" != "" ]]; then
|
||||
echo "${1}=${2}" >> "${dnsmasqconfig}"
|
||||
else
|
||||
echo "${1}" >> "${dnsmasqconfig}"
|
||||
fi
|
||||
}
|
||||
|
||||
delete_dnsmasq_setting() {
|
||||
sed -i "/${1}/d" "${dnsmasqconfig}"
|
||||
sed -i "/${1}/d" "${dnsmasqconfig}"
|
||||
}
|
||||
|
||||
SetTemperatureUnit() {
|
||||
change_setting "TEMPERATUREUNIT" "${unit}"
|
||||
echo -e " ${TICK} Set temperature unit to ${unit}"
|
||||
change_setting "TEMPERATUREUNIT" "${unit}"
|
||||
echo -e " ${TICK} Set temperature unit to ${unit}"
|
||||
}
|
||||
|
||||
HashPassword() {
|
||||
# Compute password hash twice to avoid rainbow table vulnerability
|
||||
return=$(echo -n ${1} | sha256sum | sed 's/\s.*$//')
|
||||
return=$(echo -n ${return} | sha256sum | sed 's/\s.*$//')
|
||||
echo ${return}
|
||||
# Compute password hash twice to avoid rainbow table vulnerability
|
||||
return=$(echo -n "${1}" | sha256sum | sed 's/\s.*$//')
|
||||
return=$(echo -n "${return}" | sha256sum | sed 's/\s.*$//')
|
||||
echo "${return}"
|
||||
}
|
||||
|
||||
SetWebPassword() {
|
||||
if [ "${SUDO_USER}" == "www-data" ]; then
|
||||
echo "Security measure: user www-data is not allowed to change webUI password!"
|
||||
echo "Exiting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "${SUDO_USER}" == "lighttpd" ]; then
|
||||
echo "Security measure: user lighttpd is not allowed to change webUI password!"
|
||||
echo "Exiting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( ${#args[2]} > 0 )) ; then
|
||||
readonly PASSWORD="${args[2]}"
|
||||
readonly CONFIRM="${PASSWORD}"
|
||||
else
|
||||
# Prevents a bug if the user presses Ctrl+C and it continues to hide the text typed.
|
||||
# So we reset the terminal via stty if the user does press Ctrl+C
|
||||
trap '{ echo -e "\nNo password will be set" ; stty sane ; exit 1; }' INT
|
||||
read -s -p "Enter New Password (Blank for no password): " PASSWORD
|
||||
echo ""
|
||||
|
||||
if [ "${PASSWORD}" == "" ]; then
|
||||
change_setting "WEBPASSWORD" ""
|
||||
echo -e " ${TICK} Password Removed"
|
||||
exit 0
|
||||
if [ "${SUDO_USER}" == "www-data" ]; then
|
||||
echo "Security measure: user www-data is not allowed to change webUI password!"
|
||||
echo "Exiting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
read -s -p "Confirm Password: " CONFIRM
|
||||
echo ""
|
||||
fi
|
||||
if [ "${SUDO_USER}" == "lighttpd" ]; then
|
||||
echo "Security measure: user lighttpd is not allowed to change webUI password!"
|
||||
echo "Exiting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "${PASSWORD}" == "${CONFIRM}" ] ; then
|
||||
hash=$(HashPassword "${PASSWORD}")
|
||||
# Save hash to file
|
||||
change_setting "WEBPASSWORD" "${hash}"
|
||||
echo -e " ${TICK} New password set"
|
||||
else
|
||||
echo -e " ${CROSS} Passwords don't match. Your password has not been changed"
|
||||
exit 1
|
||||
fi
|
||||
if (( ${#args[2]} > 0 )) ; then
|
||||
readonly PASSWORD="${args[2]}"
|
||||
readonly CONFIRM="${PASSWORD}"
|
||||
else
|
||||
# Prevents a bug if the user presses Ctrl+C and it continues to hide the text typed.
|
||||
# So we reset the terminal via stty if the user does press Ctrl+C
|
||||
trap '{ echo -e "\nNo password will be set" ; stty sane ; exit 1; }' INT
|
||||
read -s -r -p "Enter New Password (Blank for no password): " PASSWORD
|
||||
echo ""
|
||||
|
||||
if [ "${PASSWORD}" == "" ]; then
|
||||
change_setting "WEBPASSWORD" ""
|
||||
echo -e " ${TICK} Password Removed"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
read -s -r -p "Confirm Password: " CONFIRM
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [ "${PASSWORD}" == "${CONFIRM}" ] ; then
|
||||
# We do not wrap this in brackets, otherwise BASH will expand any appropriate syntax
|
||||
hash=$(HashPassword "$PASSWORD")
|
||||
# Save hash to file
|
||||
change_setting "WEBPASSWORD" "${hash}"
|
||||
echo -e " ${TICK} New password set"
|
||||
else
|
||||
echo -e " ${CROSS} Passwords don't match. Your password has not been changed"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
ProcessDNSSettings() {
|
||||
source "${setupVars}"
|
||||
source "${setupVars}"
|
||||
|
||||
delete_dnsmasq_setting "server"
|
||||
delete_dnsmasq_setting "server"
|
||||
|
||||
COUNTER=1
|
||||
while [[ 1 ]]; do
|
||||
var=PIHOLE_DNS_${COUNTER}
|
||||
if [ -z "${!var}" ]; then
|
||||
break;
|
||||
fi
|
||||
add_dnsmasq_setting "server" "${!var}"
|
||||
let COUNTER=COUNTER+1
|
||||
done
|
||||
COUNTER=1
|
||||
while true ; do
|
||||
var=PIHOLE_DNS_${COUNTER}
|
||||
if [ -z "${!var}" ]; then
|
||||
break;
|
||||
fi
|
||||
add_dnsmasq_setting "server" "${!var}"
|
||||
(( COUNTER++ ))
|
||||
done
|
||||
|
||||
delete_dnsmasq_setting "domain-needed"
|
||||
# The option LOCAL_DNS_PORT is deprecated
|
||||
# We apply it once more, and then convert it into the current format
|
||||
if [ -n "${LOCAL_DNS_PORT}" ]; then
|
||||
add_dnsmasq_setting "server" "127.0.0.1#${LOCAL_DNS_PORT}"
|
||||
add_setting "PIHOLE_DNS_${COUNTER}" "127.0.0.1#${LOCAL_DNS_PORT}"
|
||||
delete_setting "LOCAL_DNS_PORT"
|
||||
fi
|
||||
|
||||
if [[ "${DNS_FQDN_REQUIRED}" == true ]]; then
|
||||
add_dnsmasq_setting "domain-needed"
|
||||
fi
|
||||
delete_dnsmasq_setting "domain-needed"
|
||||
|
||||
delete_dnsmasq_setting "bogus-priv"
|
||||
if [[ "${DNS_FQDN_REQUIRED}" == true ]]; then
|
||||
add_dnsmasq_setting "domain-needed"
|
||||
fi
|
||||
|
||||
if [[ "${DNS_BOGUS_PRIV}" == true ]]; then
|
||||
add_dnsmasq_setting "bogus-priv"
|
||||
fi
|
||||
delete_dnsmasq_setting "bogus-priv"
|
||||
|
||||
delete_dnsmasq_setting "dnssec"
|
||||
delete_dnsmasq_setting "trust-anchor="
|
||||
if [[ "${DNS_BOGUS_PRIV}" == true ]]; then
|
||||
add_dnsmasq_setting "bogus-priv"
|
||||
fi
|
||||
|
||||
if [[ "${DNSSEC}" == true ]]; then
|
||||
echo "dnssec
|
||||
trust-anchor=.,19036,8,2,49AAC11D7B6F6446702E54A1607371607A1A41855200FD2CE1CDDE32F24E8FB5
|
||||
delete_dnsmasq_setting "dnssec"
|
||||
delete_dnsmasq_setting "trust-anchor="
|
||||
|
||||
if [[ "${DNSSEC}" == true ]]; then
|
||||
echo "dnssec
|
||||
trust-anchor=.,20326,8,2,E06D44B80B8F1D39A95C0B0D7C65D08458E880409BBC683457104237C7F8EC8D
|
||||
" >> "${dnsmasqconfig}"
|
||||
fi
|
||||
fi
|
||||
|
||||
delete_dnsmasq_setting "host-record"
|
||||
delete_dnsmasq_setting "host-record"
|
||||
|
||||
if [ ! -z "${HOSTRECORD}" ]; then
|
||||
add_dnsmasq_setting "host-record" "${HOSTRECORD}"
|
||||
fi
|
||||
if [ -n "${HOSTRECORD}" ]; then
|
||||
add_dnsmasq_setting "host-record" "${HOSTRECORD}"
|
||||
fi
|
||||
|
||||
# Setup interface listening behavior of dnsmasq
|
||||
delete_dnsmasq_setting "interface"
|
||||
delete_dnsmasq_setting "local-service"
|
||||
# Setup interface listening behavior of dnsmasq
|
||||
delete_dnsmasq_setting "interface"
|
||||
delete_dnsmasq_setting "local-service"
|
||||
|
||||
if [[ "${DNSMASQ_LISTENING}" == "all" ]]; then
|
||||
# Listen on all interfaces, permit all origins
|
||||
add_dnsmasq_setting "except-interface" "nonexisting"
|
||||
elif [[ "${DNSMASQ_LISTENING}" == "local" ]]; then
|
||||
# Listen only on all interfaces, but only local subnets
|
||||
add_dnsmasq_setting "local-service"
|
||||
else
|
||||
# Listen only on one interface
|
||||
# Use eth0 as fallback interface if interface is missing in setupVars.conf
|
||||
if [ -z "${PIHOLE_INTERFACE}" ]; then
|
||||
PIHOLE_INTERFACE="eth0"
|
||||
fi
|
||||
if [[ "${DNSMASQ_LISTENING}" == "all" ]]; then
|
||||
# Listen on all interfaces, permit all origins
|
||||
add_dnsmasq_setting "except-interface" "nonexisting"
|
||||
elif [[ "${DNSMASQ_LISTENING}" == "local" ]]; then
|
||||
# Listen only on all interfaces, but only local subnets
|
||||
add_dnsmasq_setting "local-service"
|
||||
else
|
||||
# Listen only on one interface
|
||||
# Use eth0 as fallback interface if interface is missing in setupVars.conf
|
||||
if [ -z "${PIHOLE_INTERFACE}" ]; then
|
||||
PIHOLE_INTERFACE="eth0"
|
||||
fi
|
||||
|
||||
add_dnsmasq_setting "interface" "${PIHOLE_INTERFACE}"
|
||||
fi
|
||||
if [[ "${CONDITIONAL_FORWARDING}" == true ]]; then
|
||||
add_dnsmasq_setting "server=/${CONDITIONAL_FORWARDING_DOMAIN}/${CONDITIONAL_FORWARDING_IP}"
|
||||
add_dnsmasq_setting "server=/${CONDITIONAL_FORWARDING_REVERSE}/${CONDITIONAL_FORWARDING_IP}"
|
||||
fi
|
||||
add_dnsmasq_setting "interface" "${PIHOLE_INTERFACE}"
|
||||
fi
|
||||
|
||||
if [[ "${CONDITIONAL_FORWARDING}" == true ]]; then
|
||||
add_dnsmasq_setting "server=/${CONDITIONAL_FORWARDING_DOMAIN}/${CONDITIONAL_FORWARDING_IP}"
|
||||
add_dnsmasq_setting "server=/${CONDITIONAL_FORWARDING_REVERSE}/${CONDITIONAL_FORWARDING_IP}"
|
||||
fi
|
||||
|
||||
# Prevent Firefox from automatically switching over to DNS-over-HTTPS
|
||||
# This follows https://support.mozilla.org/en-US/kb/configuring-networks-disable-dns-over-https
|
||||
# (sourced 7th September 2019)
|
||||
add_dnsmasq_setting "server=/use-application-dns.net/"
|
||||
}
|
||||
|
||||
SetDNSServers() {
|
||||
# Save setting to file
|
||||
delete_setting "PIHOLE_DNS"
|
||||
IFS=',' read -r -a array <<< "${args[2]}"
|
||||
for index in "${!array[@]}"
|
||||
do
|
||||
add_setting "PIHOLE_DNS_$((index+1))" "${array[index]}"
|
||||
done
|
||||
# Save setting to file
|
||||
delete_setting "PIHOLE_DNS"
|
||||
IFS=',' read -r -a array <<< "${args[2]}"
|
||||
for index in "${!array[@]}"
|
||||
do
|
||||
add_setting "PIHOLE_DNS_$((index+1))" "${array[index]}"
|
||||
done
|
||||
|
||||
if [[ "${args[3]}" == "domain-needed" ]]; then
|
||||
change_setting "DNS_FQDN_REQUIRED" "true"
|
||||
else
|
||||
change_setting "DNS_FQDN_REQUIRED" "false"
|
||||
fi
|
||||
if [[ "${args[3]}" == "domain-needed" ]]; then
|
||||
change_setting "DNS_FQDN_REQUIRED" "true"
|
||||
else
|
||||
change_setting "DNS_FQDN_REQUIRED" "false"
|
||||
fi
|
||||
|
||||
if [[ "${args[4]}" == "bogus-priv" ]]; then
|
||||
change_setting "DNS_BOGUS_PRIV" "true"
|
||||
else
|
||||
change_setting "DNS_BOGUS_PRIV" "false"
|
||||
fi
|
||||
if [[ "${args[4]}" == "bogus-priv" ]]; then
|
||||
change_setting "DNS_BOGUS_PRIV" "true"
|
||||
else
|
||||
change_setting "DNS_BOGUS_PRIV" "false"
|
||||
fi
|
||||
|
||||
if [[ "${args[5]}" == "dnssec" ]]; then
|
||||
change_setting "DNSSEC" "true"
|
||||
else
|
||||
change_setting "DNSSEC" "false"
|
||||
fi
|
||||
if [[ "${args[6]}" == "conditional_forwarding" ]]; then
|
||||
change_setting "CONDITIONAL_FORWARDING" "true"
|
||||
change_setting "CONDITIONAL_FORWARDING_IP" "${args[7]}"
|
||||
change_setting "CONDITIONAL_FORWARDING_DOMAIN" "${args[8]}"
|
||||
change_setting "CONDITIONAL_FORWARDING_REVERSE" "${args[9]}"
|
||||
else
|
||||
change_setting "CONDITIONAL_FORWARDING" "false"
|
||||
delete_setting "CONDITIONAL_FORWARDING_IP"
|
||||
delete_setting "CONDITIONAL_FORWARDING_DOMAIN"
|
||||
delete_setting "CONDITIONAL_FORWARDING_REVERSE"
|
||||
fi
|
||||
if [[ "${args[5]}" == "dnssec" ]]; then
|
||||
change_setting "DNSSEC" "true"
|
||||
else
|
||||
change_setting "DNSSEC" "false"
|
||||
fi
|
||||
|
||||
ProcessDNSSettings
|
||||
if [[ "${args[6]}" == "conditional_forwarding" ]]; then
|
||||
change_setting "CONDITIONAL_FORWARDING" "true"
|
||||
change_setting "CONDITIONAL_FORWARDING_IP" "${args[7]}"
|
||||
change_setting "CONDITIONAL_FORWARDING_DOMAIN" "${args[8]}"
|
||||
change_setting "CONDITIONAL_FORWARDING_REVERSE" "${args[9]}"
|
||||
else
|
||||
change_setting "CONDITIONAL_FORWARDING" "false"
|
||||
delete_setting "CONDITIONAL_FORWARDING_IP"
|
||||
delete_setting "CONDITIONAL_FORWARDING_DOMAIN"
|
||||
delete_setting "CONDITIONAL_FORWARDING_REVERSE"
|
||||
fi
|
||||
|
||||
# Restart dnsmasq to load new configuration
|
||||
RestartDNS
|
||||
ProcessDNSSettings
|
||||
|
||||
# Restart dnsmasq to load new configuration
|
||||
RestartDNS
|
||||
}
|
||||
|
||||
SetExcludeDomains() {
|
||||
change_setting "API_EXCLUDE_DOMAINS" "${args[2]}"
|
||||
change_setting "API_EXCLUDE_DOMAINS" "${args[2]}"
|
||||
}
|
||||
|
||||
SetExcludeClients() {
|
||||
change_setting "API_EXCLUDE_CLIENTS" "${args[2]}"
|
||||
change_setting "API_EXCLUDE_CLIENTS" "${args[2]}"
|
||||
}
|
||||
|
||||
Poweroff(){
|
||||
nohup bash -c "sleep 5; poweroff" &> /dev/null </dev/null &
|
||||
nohup bash -c "sleep 5; poweroff" &> /dev/null </dev/null &
|
||||
}
|
||||
|
||||
Reboot() {
|
||||
nohup bash -c "sleep 5; reboot" &> /dev/null </dev/null &
|
||||
nohup bash -c "sleep 5; reboot" &> /dev/null </dev/null &
|
||||
}
|
||||
|
||||
RestartDNS() {
|
||||
/usr/local/bin/pihole restartdns
|
||||
"${PI_HOLE_BIN_DIR}"/pihole restartdns
|
||||
}
|
||||
|
||||
SetQueryLogOptions() {
|
||||
change_setting "API_QUERY_LOG_SHOW" "${args[2]}"
|
||||
change_setting "API_QUERY_LOG_SHOW" "${args[2]}"
|
||||
}
|
||||
|
||||
ProcessDHCPSettings() {
|
||||
source "${setupVars}"
|
||||
source "${setupVars}"
|
||||
|
||||
if [[ "${DHCP_ACTIVE}" == "true" ]]; then
|
||||
if [[ "${DHCP_ACTIVE}" == "true" ]]; then
|
||||
interface="${PIHOLE_INTERFACE}"
|
||||
|
||||
# Use eth0 as fallback interface
|
||||
if [ -z ${interface} ]; then
|
||||
interface="eth0"
|
||||
interface="eth0"
|
||||
fi
|
||||
|
||||
if [[ "${PIHOLE_DOMAIN}" == "" ]]; then
|
||||
PIHOLE_DOMAIN="lan"
|
||||
change_setting "PIHOLE_DOMAIN" "${PIHOLE_DOMAIN}"
|
||||
PIHOLE_DOMAIN="lan"
|
||||
change_setting "PIHOLE_DOMAIN" "${PIHOLE_DOMAIN}"
|
||||
fi
|
||||
|
||||
if [[ "${DHCP_LEASETIME}" == "0" ]]; then
|
||||
leasetime="infinite"
|
||||
leasetime="infinite"
|
||||
elif [[ "${DHCP_LEASETIME}" == "" ]]; then
|
||||
leasetime="24"
|
||||
change_setting "DHCP_LEASETIME" "${leasetime}"
|
||||
leasetime="24"
|
||||
change_setting "DHCP_LEASETIME" "${leasetime}"
|
||||
elif [[ "${DHCP_LEASETIME}" == "24h" ]]; then
|
||||
#Installation is affected by known bug, introduced in a previous version.
|
||||
#This will automatically clean up setupVars.conf and remove the unnecessary "h"
|
||||
leasetime="24"
|
||||
change_setting "DHCP_LEASETIME" "${leasetime}"
|
||||
#Installation is affected by known bug, introduced in a previous version.
|
||||
#This will automatically clean up setupVars.conf and remove the unnecessary "h"
|
||||
leasetime="24"
|
||||
change_setting "DHCP_LEASETIME" "${leasetime}"
|
||||
else
|
||||
leasetime="${DHCP_LEASETIME}h"
|
||||
leasetime="${DHCP_LEASETIME}h"
|
||||
fi
|
||||
|
||||
# Write settings to file
|
||||
@@ -312,13 +329,20 @@ dhcp-option=option:router,${DHCP_ROUTER}
|
||||
dhcp-leasefile=/etc/pihole/dhcp.leases
|
||||
#quiet-dhcp
|
||||
" > "${dhcpconfig}"
|
||||
chmod 644 "${dhcpconfig}"
|
||||
|
||||
if [[ "${PIHOLE_DOMAIN}" != "none" ]]; then
|
||||
echo "domain=${PIHOLE_DOMAIN}" >> "${dhcpconfig}"
|
||||
fi
|
||||
if [[ "${PIHOLE_DOMAIN}" != "none" ]]; then
|
||||
echo "domain=${PIHOLE_DOMAIN}" >> "${dhcpconfig}"
|
||||
fi
|
||||
|
||||
# Sourced from setupVars
|
||||
# shellcheck disable=SC2154
|
||||
if [[ "${DHCP_rapid_commit}" == "true" ]]; then
|
||||
echo "dhcp-rapid-commit" >> "${dhcpconfig}"
|
||||
fi
|
||||
|
||||
if [[ "${DHCP_IPv6}" == "true" ]]; then
|
||||
echo "#quiet-dhcp6
|
||||
echo "#quiet-dhcp6
|
||||
#enable-ra
|
||||
dhcp-option=option6:dns-server,[::]
|
||||
dhcp-range=::100,::1ff,constructor:${interface},ra-names,slaac,${leasetime}
|
||||
@@ -326,158 +350,169 @@ ra-param=*,0,0
|
||||
" >> "${dhcpconfig}"
|
||||
fi
|
||||
|
||||
else
|
||||
if [[ -f "${dhcpconfig}" ]]; then
|
||||
rm "${dhcpconfig}" &> /dev/null
|
||||
fi
|
||||
fi
|
||||
else
|
||||
if [[ -f "${dhcpconfig}" ]]; then
|
||||
rm "${dhcpconfig}" &> /dev/null
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
EnableDHCP() {
|
||||
change_setting "DHCP_ACTIVE" "true"
|
||||
change_setting "DHCP_START" "${args[2]}"
|
||||
change_setting "DHCP_END" "${args[3]}"
|
||||
change_setting "DHCP_ROUTER" "${args[4]}"
|
||||
change_setting "DHCP_LEASETIME" "${args[5]}"
|
||||
change_setting "PIHOLE_DOMAIN" "${args[6]}"
|
||||
change_setting "DHCP_IPv6" "${args[7]}"
|
||||
change_setting "DHCP_ACTIVE" "true"
|
||||
change_setting "DHCP_START" "${args[2]}"
|
||||
change_setting "DHCP_END" "${args[3]}"
|
||||
change_setting "DHCP_ROUTER" "${args[4]}"
|
||||
change_setting "DHCP_LEASETIME" "${args[5]}"
|
||||
change_setting "PIHOLE_DOMAIN" "${args[6]}"
|
||||
change_setting "DHCP_IPv6" "${args[7]}"
|
||||
change_setting "DHCP_rapid_commit" "${args[8]}"
|
||||
|
||||
# Remove possible old setting from file
|
||||
delete_dnsmasq_setting "dhcp-"
|
||||
delete_dnsmasq_setting "quiet-dhcp"
|
||||
# Remove possible old setting from file
|
||||
delete_dnsmasq_setting "dhcp-"
|
||||
delete_dnsmasq_setting "quiet-dhcp"
|
||||
|
||||
ProcessDHCPSettings
|
||||
# If a DHCP client claims that its name is "wpad", ignore that.
|
||||
# This fixes a security hole. see CERT Vulnerability VU#598349
|
||||
# We also ignore "localhost" as Windows behaves strangely if a
|
||||
# device claims this host name
|
||||
add_dnsmasq_setting "dhcp-name-match=set:hostname-ignore,wpad
|
||||
dhcp-name-match=set:hostname-ignore,localhost
|
||||
dhcp-ignore-names=tag:hostname-ignore"
|
||||
|
||||
RestartDNS
|
||||
ProcessDHCPSettings
|
||||
|
||||
RestartDNS
|
||||
}
|
||||
|
||||
DisableDHCP() {
|
||||
change_setting "DHCP_ACTIVE" "false"
|
||||
change_setting "DHCP_ACTIVE" "false"
|
||||
|
||||
# Remove possible old setting from file
|
||||
delete_dnsmasq_setting "dhcp-"
|
||||
delete_dnsmasq_setting "quiet-dhcp"
|
||||
# Remove possible old setting from file
|
||||
delete_dnsmasq_setting "dhcp-"
|
||||
delete_dnsmasq_setting "quiet-dhcp"
|
||||
|
||||
ProcessDHCPSettings
|
||||
ProcessDHCPSettings
|
||||
|
||||
RestartDNS
|
||||
RestartDNS
|
||||
}
|
||||
|
||||
SetWebUILayout() {
|
||||
change_setting "WEBUIBOXEDLAYOUT" "${args[2]}"
|
||||
change_setting "WEBUIBOXEDLAYOUT" "${args[2]}"
|
||||
}
|
||||
|
||||
CheckUrl(){
|
||||
local regex
|
||||
# Check for characters NOT allowed in URLs
|
||||
regex="[^a-zA-Z0-9:/?&%=~._-]"
|
||||
if [[ "${1}" =~ ${regex} ]]; then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
CustomizeAdLists() {
|
||||
list="/etc/pihole/adlists.list"
|
||||
local address
|
||||
address="${args[3]}"
|
||||
local comment
|
||||
comment="${args[4]}"
|
||||
|
||||
if [[ "${args[2]}" == "enable" ]]; then
|
||||
sed -i "\\@${args[3]}@s/^#http/http/g" "${list}"
|
||||
elif [[ "${args[2]}" == "disable" ]]; then
|
||||
sed -i "\\@${args[3]}@s/^http/#http/g" "${list}"
|
||||
elif [[ "${args[2]}" == "add" ]]; then
|
||||
echo "${args[3]}" >> ${list}
|
||||
elif [[ "${args[2]}" == "del" ]]; then
|
||||
var=$(echo "${args[3]}" | sed 's/\//\\\//g')
|
||||
sed -i "/${var}/Id" "${list}"
|
||||
else
|
||||
echo "Not permitted"
|
||||
return 1
|
||||
fi
|
||||
if CheckUrl "${address}"; then
|
||||
if [[ "${args[2]}" == "enable" ]]; then
|
||||
sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 1 WHERE address = '${address}'"
|
||||
elif [[ "${args[2]}" == "disable" ]]; then
|
||||
sqlite3 "${gravityDBfile}" "UPDATE adlist SET enabled = 0 WHERE address = '${address}'"
|
||||
elif [[ "${args[2]}" == "add" ]]; then
|
||||
sqlite3 "${gravityDBfile}" "INSERT OR IGNORE INTO adlist (address, comment) VALUES ('${address}', '${comment}')"
|
||||
elif [[ "${args[2]}" == "del" ]]; then
|
||||
sqlite3 "${gravityDBfile}" "DELETE FROM adlist WHERE address = '${address}'"
|
||||
else
|
||||
echo "Not permitted"
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
echo "Invalid Url"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
SetPrivacyMode() {
|
||||
if [[ "${args[2]}" == "true" ]]; then
|
||||
change_setting "API_PRIVACY_MODE" "true"
|
||||
else
|
||||
change_setting "API_PRIVACY_MODE" "false"
|
||||
fi
|
||||
if [[ "${args[2]}" == "true" ]]; then
|
||||
change_setting "API_PRIVACY_MODE" "true"
|
||||
else
|
||||
change_setting "API_PRIVACY_MODE" "false"
|
||||
fi
|
||||
}
|
||||
|
||||
ResolutionSettings() {
|
||||
typ="${args[2]}"
|
||||
state="${args[3]}"
|
||||
typ="${args[2]}"
|
||||
state="${args[3]}"
|
||||
|
||||
if [[ "${typ}" == "forward" ]]; then
|
||||
change_setting "API_GET_UPSTREAM_DNS_HOSTNAME" "${state}"
|
||||
elif [[ "${typ}" == "clients" ]]; then
|
||||
change_setting "API_GET_CLIENT_HOSTNAME" "${state}"
|
||||
fi
|
||||
if [[ "${typ}" == "forward" ]]; then
|
||||
change_setting "API_GET_UPSTREAM_DNS_HOSTNAME" "${state}"
|
||||
elif [[ "${typ}" == "clients" ]]; then
|
||||
change_setting "API_GET_CLIENT_HOSTNAME" "${state}"
|
||||
fi
|
||||
}
|
||||
|
||||
AddDHCPStaticAddress() {
|
||||
mac="${args[2]}"
|
||||
ip="${args[3]}"
|
||||
host="${args[4]}"
|
||||
mac="${args[2]}"
|
||||
ip="${args[3]}"
|
||||
host="${args[4]}"
|
||||
|
||||
if [[ "${ip}" == "noip" ]]; then
|
||||
# Static host name
|
||||
echo "dhcp-host=${mac},${host}" >> "${dhcpstaticconfig}"
|
||||
elif [[ "${host}" == "nohost" ]]; then
|
||||
# Static IP
|
||||
echo "dhcp-host=${mac},${ip}" >> "${dhcpstaticconfig}"
|
||||
else
|
||||
# Full info given
|
||||
echo "dhcp-host=${mac},${ip},${host}" >> "${dhcpstaticconfig}"
|
||||
fi
|
||||
if [[ "${ip}" == "noip" ]]; then
|
||||
# Static host name
|
||||
echo "dhcp-host=${mac},${host}" >> "${dhcpstaticconfig}"
|
||||
elif [[ "${host}" == "nohost" ]]; then
|
||||
# Static IP
|
||||
echo "dhcp-host=${mac},${ip}" >> "${dhcpstaticconfig}"
|
||||
else
|
||||
# Full info given
|
||||
echo "dhcp-host=${mac},${ip},${host}" >> "${dhcpstaticconfig}"
|
||||
fi
|
||||
}
|
||||
|
||||
RemoveDHCPStaticAddress() {
|
||||
mac="${args[2]}"
|
||||
sed -i "/dhcp-host=${mac}.*/d" "${dhcpstaticconfig}"
|
||||
}
|
||||
|
||||
SetHostRecord() {
|
||||
if [[ "${1}" == "-h" ]] || [[ "${1}" == "--help" ]]; then
|
||||
echo "Usage: pihole -a hostrecord <domain> [IPv4-address],[IPv6-address]
|
||||
Example: 'pihole -a hostrecord home.domain.com 192.168.1.1,2001:db8:a0b:12f0::1'
|
||||
Add a name to the DNS associated to an IPv4/IPv6 address
|
||||
|
||||
Options:
|
||||
\"\" Empty: Remove host record
|
||||
-h, --help Show this help dialog"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -n "${args[3]}" ]]; then
|
||||
change_setting "HOSTRECORD" "${args[2]},${args[3]}"
|
||||
echo -e " ${TICK} Setting host record for ${args[2]} to ${args[3]}"
|
||||
else
|
||||
change_setting "HOSTRECORD" ""
|
||||
echo -e " ${TICK} Removing host record"
|
||||
fi
|
||||
|
||||
ProcessDNSSettings
|
||||
|
||||
# Restart dnsmasq to load new configuration
|
||||
RestartDNS
|
||||
mac="${args[2]}"
|
||||
sed -i "/dhcp-host=${mac}.*/d" "${dhcpstaticconfig}"
|
||||
}
|
||||
|
||||
SetAdminEmail() {
|
||||
if [[ "${1}" == "-h" ]] || [[ "${1}" == "--help" ]]; then
|
||||
echo "Usage: pihole -a email <address>
|
||||
if [[ "${1}" == "-h" ]] || [[ "${1}" == "--help" ]]; then
|
||||
echo "Usage: pihole -a email <address>
|
||||
Example: 'pihole -a email admin@address.com'
|
||||
Set an administrative contact address for the Block Page
|
||||
|
||||
Options:
|
||||
\"\" Empty: Remove admin contact
|
||||
-h, --help Show this help dialog"
|
||||
exit 0
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -n "${args[2]}" ]]; then
|
||||
change_setting "ADMIN_EMAIL" "${args[2]}"
|
||||
echo -e " ${TICK} Setting admin contact to ${args[2]}"
|
||||
else
|
||||
change_setting "ADMIN_EMAIL" ""
|
||||
echo -e " ${TICK} Removing admin contact"
|
||||
fi
|
||||
if [[ -n "${args[2]}" ]]; then
|
||||
|
||||
# Sanitize email address in case of security issues
|
||||
# Regex from https://stackoverflow.com/a/2138832/4065967
|
||||
local regex
|
||||
regex="^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}\$"
|
||||
if [[ ! "${args[2]}" =~ ${regex} ]]; then
|
||||
echo -e " ${CROSS} Invalid email address"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
change_setting "ADMIN_EMAIL" "${args[2]}"
|
||||
echo -e " ${TICK} Setting admin contact to ${args[2]}"
|
||||
else
|
||||
change_setting "ADMIN_EMAIL" ""
|
||||
echo -e " ${TICK} Removing admin contact"
|
||||
fi
|
||||
}
|
||||
|
||||
SetListeningMode() {
|
||||
source "${setupVars}"
|
||||
source "${setupVars}"
|
||||
|
||||
if [[ "$3" == "-h" ]] || [[ "$3" == "--help" ]]; then
|
||||
echo "Usage: pihole -a -i [interface]
|
||||
if [[ "$3" == "-h" ]] || [[ "$3" == "--help" ]]; then
|
||||
echo "Usage: pihole -a -i [interface]
|
||||
Example: 'pihole -a -i local'
|
||||
Specify dnsmasq's network interface listening behavior
|
||||
|
||||
@@ -486,82 +521,143 @@ Interfaces:
|
||||
devices that are at most one hop away (local devices)
|
||||
single Listen only on ${PIHOLE_INTERFACE} interface
|
||||
all Listen on all interfaces, permit all origins"
|
||||
exit 0
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${args[2]}" == "all" ]]; then
|
||||
echo -e " ${INFO} Listening on all interfaces, permiting all origins. Please use a firewall!"
|
||||
change_setting "DNSMASQ_LISTENING" "all"
|
||||
elif [[ "${args[2]}" == "local" ]]; then
|
||||
echo -e " ${INFO} Listening on all interfaces, permiting origins from one hop away (LAN)"
|
||||
change_setting "DNSMASQ_LISTENING" "local"
|
||||
else
|
||||
echo -e " ${INFO} Listening only on interface ${PIHOLE_INTERFACE}"
|
||||
change_setting "DNSMASQ_LISTENING" "single"
|
||||
fi
|
||||
if [[ "${args[2]}" == "all" ]]; then
|
||||
echo -e " ${INFO} Listening on all interfaces, permitting all origins. Please use a firewall!"
|
||||
change_setting "DNSMASQ_LISTENING" "all"
|
||||
elif [[ "${args[2]}" == "local" ]]; then
|
||||
echo -e " ${INFO} Listening on all interfaces, permitting origins from one hop away (LAN)"
|
||||
change_setting "DNSMASQ_LISTENING" "local"
|
||||
else
|
||||
echo -e " ${INFO} Listening only on interface ${PIHOLE_INTERFACE}"
|
||||
change_setting "DNSMASQ_LISTENING" "single"
|
||||
fi
|
||||
|
||||
# Don't restart DNS server yet because other settings
|
||||
# will be applied afterwards if "-web" is set
|
||||
if [[ "${args[3]}" != "-web" ]]; then
|
||||
ProcessDNSSettings
|
||||
# Restart dnsmasq to load new configuration
|
||||
RestartDNS
|
||||
fi
|
||||
# Don't restart DNS server yet because other settings
|
||||
# will be applied afterwards if "-web" is set
|
||||
if [[ "${args[3]}" != "-web" ]]; then
|
||||
ProcessDNSSettings
|
||||
# Restart dnsmasq to load new configuration
|
||||
RestartDNS
|
||||
fi
|
||||
}
|
||||
|
||||
Teleporter() {
|
||||
local datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S")
|
||||
php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "pi-hole-teleporter_${datetimestamp}.zip"
|
||||
local datetimestamp
|
||||
datetimestamp=$(date "+%Y-%m-%d_%H-%M-%S")
|
||||
php /var/www/html/admin/scripts/pi-hole/php/teleporter.php > "pi-hole-teleporter_${datetimestamp}.tar.gz"
|
||||
}
|
||||
|
||||
audit()
|
||||
checkDomain()
|
||||
{
|
||||
echo "${args[2]}" >> /etc/pihole/auditlog.list
|
||||
local domain validDomain
|
||||
# Convert to lowercase
|
||||
domain="${1,,}"
|
||||
validDomain=$(grep -P "^((-|_)*[a-z\\d]((-|_)*[a-z\\d])*(-|_)*)(\\.(-|_)*([a-z\\d]((-|_)*[a-z\\d])*))*$" <<< "${domain}") # Valid chars check
|
||||
validDomain=$(grep -P "^[^\\.]{1,63}(\\.[^\\.]{1,63})*$" <<< "${validDomain}") # Length of each label
|
||||
echo "${validDomain}"
|
||||
}
|
||||
|
||||
addAudit()
|
||||
{
|
||||
shift # skip "-a"
|
||||
shift # skip "audit"
|
||||
local domains validDomain
|
||||
domains=""
|
||||
for domain in "$@"
|
||||
do
|
||||
# Check domain to be added. Only continue if it is valid
|
||||
validDomain="$(checkDomain "${domain}")"
|
||||
if [[ -n "${validDomain}" ]]; then
|
||||
# Put comma in between domains when there is
|
||||
# more than one domains to be added
|
||||
# SQL INSERT allows adding multiple rows at once using the format
|
||||
## INSERT INTO table (domain) VALUES ('abc.de'),('fgh.ij'),('klm.no'),('pqr.st');
|
||||
if [[ -n "${domains}" ]]; then
|
||||
domains="${domains},"
|
||||
fi
|
||||
domains="${domains}('${domain}')"
|
||||
fi
|
||||
done
|
||||
# Insert only the domain here. The date_added field will be
|
||||
# filled with its default value (date_added = current timestamp)
|
||||
sqlite3 "${gravityDBfile}" "INSERT INTO domain_audit (domain) VALUES ${domains};"
|
||||
}
|
||||
|
||||
clearAudit()
|
||||
{
|
||||
sqlite3 "${gravityDBfile}" "DELETE FROM domain_audit;"
|
||||
}
|
||||
|
||||
SetPrivacyLevel() {
|
||||
# Set privacy level. Minimum is 0, maximum is 3
|
||||
if [ "${args[2]}" -ge 0 ] && [ "${args[2]}" -le 3 ]; then
|
||||
changeFTLsetting "PRIVACYLEVEL" "${args[2]}"
|
||||
fi
|
||||
# Set privacy level. Minimum is 0, maximum is 4
|
||||
if [ "${args[2]}" -ge 0 ] && [ "${args[2]}" -le 4 ]; then
|
||||
changeFTLsetting "PRIVACYLEVEL" "${args[2]}"
|
||||
fi
|
||||
}
|
||||
|
||||
AddCustomDNSAddress() {
|
||||
echo -e " ${TICK} Adding custom DNS entry..."
|
||||
|
||||
ip="${args[2]}"
|
||||
host="${args[3]}"
|
||||
echo "${ip} ${host}" >> "${dnscustomfile}"
|
||||
|
||||
# Restart dnsmasq to load new custom DNS entries
|
||||
RestartDNS
|
||||
}
|
||||
|
||||
RemoveCustomDNSAddress() {
|
||||
echo -e " ${TICK} Removing custom DNS entry..."
|
||||
|
||||
ip="${args[2]}"
|
||||
host="${args[3]}"
|
||||
sed -i "/${ip} ${host}/d" "${dnscustomfile}"
|
||||
|
||||
# Restart dnsmasq to update removed custom DNS entries
|
||||
RestartDNS
|
||||
}
|
||||
|
||||
main() {
|
||||
args=("$@")
|
||||
args=("$@")
|
||||
|
||||
case "${args[1]}" in
|
||||
"-p" | "password" ) SetWebPassword;;
|
||||
"-c" | "celsius" ) unit="C"; SetTemperatureUnit;;
|
||||
"-f" | "fahrenheit" ) unit="F"; SetTemperatureUnit;;
|
||||
"-k" | "kelvin" ) unit="K"; SetTemperatureUnit;;
|
||||
"setdns" ) SetDNSServers;;
|
||||
"setexcludedomains" ) SetExcludeDomains;;
|
||||
"setexcludeclients" ) SetExcludeClients;;
|
||||
"poweroff" ) Poweroff;;
|
||||
"reboot" ) Reboot;;
|
||||
"restartdns" ) RestartDNS;;
|
||||
"setquerylog" ) SetQueryLogOptions;;
|
||||
"enabledhcp" ) EnableDHCP;;
|
||||
"disabledhcp" ) DisableDHCP;;
|
||||
"layout" ) SetWebUILayout;;
|
||||
"-h" | "--help" ) helpFunc;;
|
||||
"privacymode" ) SetPrivacyMode;;
|
||||
"resolve" ) ResolutionSettings;;
|
||||
"addstaticdhcp" ) AddDHCPStaticAddress;;
|
||||
"removestaticdhcp" ) RemoveDHCPStaticAddress;;
|
||||
"-r" | "hostrecord" ) SetHostRecord "$3";;
|
||||
"-e" | "email" ) SetAdminEmail "$3";;
|
||||
"-i" | "interface" ) SetListeningMode "$@";;
|
||||
"-t" | "teleporter" ) Teleporter;;
|
||||
"adlist" ) CustomizeAdLists;;
|
||||
"audit" ) audit;;
|
||||
"-l" | "privacylevel" ) SetPrivacyLevel;;
|
||||
* ) helpFunc;;
|
||||
esac
|
||||
case "${args[1]}" in
|
||||
"-p" | "password" ) SetWebPassword;;
|
||||
"-c" | "celsius" ) unit="C"; SetTemperatureUnit;;
|
||||
"-f" | "fahrenheit" ) unit="F"; SetTemperatureUnit;;
|
||||
"-k" | "kelvin" ) unit="K"; SetTemperatureUnit;;
|
||||
"setdns" ) SetDNSServers;;
|
||||
"setexcludedomains" ) SetExcludeDomains;;
|
||||
"setexcludeclients" ) SetExcludeClients;;
|
||||
"poweroff" ) Poweroff;;
|
||||
"reboot" ) Reboot;;
|
||||
"restartdns" ) RestartDNS;;
|
||||
"setquerylog" ) SetQueryLogOptions;;
|
||||
"enabledhcp" ) EnableDHCP;;
|
||||
"disabledhcp" ) DisableDHCP;;
|
||||
"layout" ) SetWebUILayout;;
|
||||
"-h" | "--help" ) helpFunc;;
|
||||
"privacymode" ) SetPrivacyMode;;
|
||||
"resolve" ) ResolutionSettings;;
|
||||
"addstaticdhcp" ) AddDHCPStaticAddress;;
|
||||
"removestaticdhcp" ) RemoveDHCPStaticAddress;;
|
||||
"-e" | "email" ) SetAdminEmail "$3";;
|
||||
"-i" | "interface" ) SetListeningMode "$@";;
|
||||
"-t" | "teleporter" ) Teleporter;;
|
||||
"adlist" ) CustomizeAdLists;;
|
||||
"audit" ) addAudit "$@";;
|
||||
"clearaudit" ) clearAudit;;
|
||||
"-l" | "privacylevel" ) SetPrivacyLevel;;
|
||||
"addcustomdns" ) AddCustomDNSAddress;;
|
||||
"removecustomdns" ) RemoveCustomDNSAddress;;
|
||||
* ) helpFunc;;
|
||||
esac
|
||||
|
||||
shift
|
||||
shift
|
||||
|
||||
if [[ $# = 0 ]]; then
|
||||
helpFunc
|
||||
fi
|
||||
if [[ $# = 0 ]]; then
|
||||
helpFunc
|
||||
fi
|
||||
}
|
||||
|
28
advanced/Scripts/wildcard_regex_converter.sh
Normal file
28
advanced/Scripts/wildcard_regex_converter.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# Pi-hole: A black hole for Internet advertisements
|
||||
# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
|
||||
# Network-wide ad blocking via your own hardware.
|
||||
#
|
||||
# Provides an automated migration subroutine to convert Pi-hole v3.x wildcard domains to Pi-hole v4.x regex filters
|
||||
#
|
||||
# This file is copyright under the latest version of the EUPL.
|
||||
# Please see LICENSE file for your rights under this license.
|
||||
|
||||
# regexFile set in gravity.sh
|
||||
|
||||
wildcardFile="/etc/dnsmasq.d/03-pihole-wildcard.conf"
|
||||
|
||||
convert_wildcard_to_regex() {
|
||||
if [ ! -f "${wildcardFile}" ]; then
|
||||
return
|
||||
fi
|
||||
local addrlines domains uniquedomains
|
||||
# Obtain wildcard domains from old file
|
||||
addrlines="$(grep -oE "/.*/" ${wildcardFile})"
|
||||
# Strip "/" from domain names and convert "." to regex-compatible "\."
|
||||
domains="$(sed 's/\///g;s/\./\\./g' <<< "${addrlines}")"
|
||||
# Remove repeated domains (may have been inserted two times due to A and AAAA blocking)
|
||||
uniquedomains="$(uniq <<< "${domains}")"
|
||||
# Automatically generate regex filters and remove old wildcards file
|
||||
awk '{print "(^|\\.)"$0"$"}' <<< "${uniquedomains}" >> "${regexFile:?}" && rm "${wildcardFile}"
|
||||
}
|
188
advanced/Templates/gravity.db.sql
Normal file
188
advanced/Templates/gravity.db.sql
Normal file
@@ -0,0 +1,188 @@
|
||||
PRAGMA foreign_keys=OFF;
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE "group"
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
||||
name TEXT UNIQUE NOT NULL,
|
||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
description TEXT
|
||||
);
|
||||
INSERT INTO "group" (id,enabled,name,description) VALUES (0,1,'Default','The default group');
|
||||
|
||||
CREATE TABLE domainlist
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
type INTEGER NOT NULL DEFAULT 0,
|
||||
domain TEXT UNIQUE NOT NULL,
|
||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
comment TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE adlist
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
address TEXT UNIQUE NOT NULL,
|
||||
enabled BOOLEAN NOT NULL DEFAULT 1,
|
||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
comment TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE adlist_by_group
|
||||
(
|
||||
adlist_id INTEGER NOT NULL REFERENCES adlist (id),
|
||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
||||
PRIMARY KEY (adlist_id, group_id)
|
||||
);
|
||||
|
||||
CREATE TABLE gravity
|
||||
(
|
||||
domain TEXT NOT NULL,
|
||||
adlist_id INTEGER NOT NULL REFERENCES adlist (id)
|
||||
);
|
||||
|
||||
CREATE TABLE info
|
||||
(
|
||||
property TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO "info" VALUES('version','12');
|
||||
|
||||
CREATE TABLE domain_audit
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
domain TEXT UNIQUE NOT NULL,
|
||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int))
|
||||
);
|
||||
|
||||
CREATE TABLE domainlist_by_group
|
||||
(
|
||||
domainlist_id INTEGER NOT NULL REFERENCES domainlist (id),
|
||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
||||
PRIMARY KEY (domainlist_id, group_id)
|
||||
);
|
||||
|
||||
CREATE TABLE client
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
ip TEXT NOL NULL UNIQUE,
|
||||
date_added INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
date_modified INTEGER NOT NULL DEFAULT (cast(strftime('%s', 'now') as int)),
|
||||
comment TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE client_by_group
|
||||
(
|
||||
client_id INTEGER NOT NULL REFERENCES client (id),
|
||||
group_id INTEGER NOT NULL REFERENCES "group" (id),
|
||||
PRIMARY KEY (client_id, group_id)
|
||||
);
|
||||
|
||||
CREATE TRIGGER tr_adlist_update AFTER UPDATE ON adlist
|
||||
BEGIN
|
||||
UPDATE adlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE address = NEW.address;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_client_update AFTER UPDATE ON client
|
||||
BEGIN
|
||||
UPDATE client SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE ip = NEW.ip;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_domainlist_update AFTER UPDATE ON domainlist
|
||||
BEGIN
|
||||
UPDATE domainlist SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE domain = NEW.domain;
|
||||
END;
|
||||
|
||||
CREATE VIEW vw_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||
FROM domainlist
|
||||
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
AND domainlist.type = 0
|
||||
ORDER BY domainlist.id;
|
||||
|
||||
CREATE VIEW vw_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||
FROM domainlist
|
||||
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
AND domainlist.type = 1
|
||||
ORDER BY domainlist.id;
|
||||
|
||||
CREATE VIEW vw_regex_whitelist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||
FROM domainlist
|
||||
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
AND domainlist.type = 2
|
||||
ORDER BY domainlist.id;
|
||||
|
||||
CREATE VIEW vw_regex_blacklist AS SELECT domain, domainlist.id AS id, domainlist_by_group.group_id AS group_id
|
||||
FROM domainlist
|
||||
LEFT JOIN domainlist_by_group ON domainlist_by_group.domainlist_id = domainlist.id
|
||||
LEFT JOIN "group" ON "group".id = domainlist_by_group.group_id
|
||||
WHERE domainlist.enabled = 1 AND (domainlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
AND domainlist.type = 3
|
||||
ORDER BY domainlist.id;
|
||||
|
||||
CREATE VIEW vw_gravity AS SELECT domain, adlist_by_group.group_id AS group_id
|
||||
FROM gravity
|
||||
LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = gravity.adlist_id
|
||||
LEFT JOIN adlist ON adlist.id = gravity.adlist_id
|
||||
LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
|
||||
WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1);
|
||||
|
||||
CREATE VIEW vw_adlist AS SELECT DISTINCT address, adlist.id AS id
|
||||
FROM adlist
|
||||
LEFT JOIN adlist_by_group ON adlist_by_group.adlist_id = adlist.id
|
||||
LEFT JOIN "group" ON "group".id = adlist_by_group.group_id
|
||||
WHERE adlist.enabled = 1 AND (adlist_by_group.group_id IS NULL OR "group".enabled = 1)
|
||||
ORDER BY adlist.id;
|
||||
|
||||
CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist
|
||||
BEGIN
|
||||
INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_client_add AFTER INSERT ON client
|
||||
BEGIN
|
||||
INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist
|
||||
BEGIN
|
||||
INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_group_update AFTER UPDATE ON "group"
|
||||
BEGIN
|
||||
UPDATE "group" SET date_modified = (cast(strftime('%s', 'now') as int)) WHERE id = NEW.id;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_group_zero AFTER DELETE ON "group"
|
||||
BEGIN
|
||||
INSERT OR IGNORE INTO "group" (id,enabled,name) VALUES (0,1,'Default');
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_domainlist_delete AFTER DELETE ON domainlist
|
||||
BEGIN
|
||||
DELETE FROM domainlist_by_group WHERE domainlist_id = OLD.id;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_adlist_delete AFTER DELETE ON adlist
|
||||
BEGIN
|
||||
DELETE FROM adlist_by_group WHERE adlist_id = OLD.id;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_client_delete AFTER DELETE ON client
|
||||
BEGIN
|
||||
DELETE FROM client_by_group WHERE client_id = OLD.id;
|
||||
END;
|
||||
|
||||
COMMIT;
|
42
advanced/Templates/gravity_copy.sql
Normal file
42
advanced/Templates/gravity_copy.sql
Normal file
@@ -0,0 +1,42 @@
|
||||
.timeout 30000
|
||||
|
||||
ATTACH DATABASE '/etc/pihole/gravity.db' AS OLD;
|
||||
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
DROP TRIGGER tr_domainlist_add;
|
||||
DROP TRIGGER tr_client_add;
|
||||
DROP TRIGGER tr_adlist_add;
|
||||
|
||||
INSERT OR REPLACE INTO "group" SELECT * FROM OLD."group";
|
||||
INSERT OR REPLACE INTO domain_audit SELECT * FROM OLD.domain_audit;
|
||||
|
||||
INSERT OR REPLACE INTO domainlist SELECT * FROM OLD.domainlist;
|
||||
INSERT OR REPLACE INTO domainlist_by_group SELECT * FROM OLD.domainlist_by_group;
|
||||
|
||||
INSERT OR REPLACE INTO adlist SELECT * FROM OLD.adlist;
|
||||
INSERT OR REPLACE INTO adlist_by_group SELECT * FROM OLD.adlist_by_group;
|
||||
|
||||
INSERT OR REPLACE INTO info SELECT * FROM OLD.info;
|
||||
|
||||
INSERT OR REPLACE INTO client SELECT * FROM OLD.client;
|
||||
INSERT OR REPLACE INTO client_by_group SELECT * FROM OLD.client_by_group;
|
||||
|
||||
|
||||
CREATE TRIGGER tr_domainlist_add AFTER INSERT ON domainlist
|
||||
BEGIN
|
||||
INSERT INTO domainlist_by_group (domainlist_id, group_id) VALUES (NEW.id, 0);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_client_add AFTER INSERT ON client
|
||||
BEGIN
|
||||
INSERT INTO client_by_group (client_id, group_id) VALUES (NEW.id, 0);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER tr_adlist_add AFTER INSERT ON adlist
|
||||
BEGIN
|
||||
INSERT INTO adlist_by_group (adlist_id, group_id) VALUES (NEW.id, 0);
|
||||
END;
|
||||
|
||||
|
||||
COMMIT;
|
119
advanced/Templates/pihole-FTL.service
Normal file
119
advanced/Templates/pihole-FTL.service
Normal file
@@ -0,0 +1,119 @@
|
||||
#!/usr/bin/env bash
|
||||
### BEGIN INIT INFO
|
||||
# Provides: pihole-FTL
|
||||
# Required-Start: $remote_fs $syslog
|
||||
# Required-Stop: $remote_fs $syslog
|
||||
# Default-Start: 2 3 4 5
|
||||
# Default-Stop: 0 1 6
|
||||
# Short-Description: pihole-FTL daemon
|
||||
# Description: Enable service provided by pihole-FTL daemon
|
||||
### END INIT INFO
|
||||
|
||||
FTLUSER=pihole
|
||||
PIDFILE=/var/run/pihole-FTL.pid
|
||||
|
||||
get_pid() {
|
||||
# First, try to obtain PID from PIDFILE
|
||||
if [ -s "${PIDFILE}" ]; then
|
||||
cat "${PIDFILE}"
|
||||
return
|
||||
fi
|
||||
|
||||
# If the PIDFILE is empty or not available, obtain the PID using pidof
|
||||
pidof "pihole-FTL" | awk '{print $(NF)}'
|
||||
}
|
||||
|
||||
is_running() {
|
||||
ps "$(get_pid)" > /dev/null 2>&1
|
||||
}
|
||||
|
||||
|
||||
# Start the service
|
||||
start() {
|
||||
if is_running; then
|
||||
echo "pihole-FTL is already running"
|
||||
else
|
||||
# Touch files to ensure they exist (create if non-existing, preserve if existing)
|
||||
touch /var/log/pihole-FTL.log /var/log/pihole.log
|
||||
touch /run/pihole-FTL.pid /run/pihole-FTL.port
|
||||
touch /etc/pihole/dhcp.leases
|
||||
mkdir -p /var/run/pihole
|
||||
mkdir -p /var/log/pihole
|
||||
chown pihole:pihole /var/run/pihole /var/log/pihole
|
||||
# Remove possible leftovers from previous pihole-FTL processes
|
||||
rm -f /dev/shm/FTL-* 2> /dev/null
|
||||
rm /var/run/pihole/FTL.sock 2> /dev/null
|
||||
# Ensure that permissions are set so that pihole-FTL can edit all necessary files
|
||||
chown pihole:pihole /run/pihole-FTL.pid /run/pihole-FTL.port
|
||||
chown pihole:pihole /etc/pihole /etc/pihole/dhcp.leases 2> /dev/null
|
||||
chown pihole:pihole /var/log/pihole-FTL.log /var/log/pihole.log
|
||||
chmod 0644 /var/log/pihole-FTL.log /run/pihole-FTL.pid /run/pihole-FTL.port /var/log/pihole.log
|
||||
# Chown database files to the user FTL runs as. We ignore errors as the files may not (yet) exist
|
||||
chown pihole:pihole /etc/pihole/pihole-FTL.db /etc/pihole/gravity.db 2> /dev/null
|
||||
if setcap CAP_NET_BIND_SERVICE,CAP_NET_RAW,CAP_NET_ADMIN+eip "$(which pihole-FTL)"; then
|
||||
su -s /bin/sh -c "/usr/bin/pihole-FTL" "$FTLUSER"
|
||||
else
|
||||
echo "Warning: Starting pihole-FTL as root because setting capabilities is not supported on this system"
|
||||
pihole-FTL
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
}
|
||||
|
||||
# Stop the service
|
||||
stop() {
|
||||
if is_running; then
|
||||
kill "$(get_pid)"
|
||||
for i in {1..5}; do
|
||||
if ! is_running; then
|
||||
break
|
||||
fi
|
||||
|
||||
echo -n "."
|
||||
sleep 1
|
||||
done
|
||||
echo
|
||||
|
||||
if is_running; then
|
||||
echo "Not stopped; may still be shutting down or shutdown may have failed, killing now"
|
||||
kill -9 "$(get_pid)"
|
||||
exit 1
|
||||
else
|
||||
echo "Stopped"
|
||||
fi
|
||||
else
|
||||
echo "Not running"
|
||||
fi
|
||||
echo
|
||||
}
|
||||
|
||||
# Indicate the service status
|
||||
status() {
|
||||
if is_running; then
|
||||
echo "[ ok ] pihole-FTL is running"
|
||||
exit 0
|
||||
else
|
||||
echo "[ ] pihole-FTL is not running"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
### main logic ###
|
||||
case "$1" in
|
||||
stop)
|
||||
stop
|
||||
;;
|
||||
status)
|
||||
status
|
||||
;;
|
||||
start|restart|reload|condrestart)
|
||||
stop
|
||||
start
|
||||
;;
|
||||
*)
|
||||
echo $"Usage: $0 {start|stop|restart|reload|status}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
@@ -16,7 +16,9 @@
|
||||
|
||||
# Pi-hole: Update the ad sources once a week on Sunday at a random time in the
|
||||
# early morning. Download any updates from the adlists
|
||||
59 1 * * 7 root PATH="$PATH:/usr/local/bin/" pihole updateGravity
|
||||
# Squash output to log, then splat the log to stdout on error to allow for
|
||||
# standard crontab job error handling.
|
||||
59 1 * * 7 root PATH="$PATH:/usr/local/bin/" pihole updateGravity >/var/log/pihole_updateGravity.log || cat /var/log/pihole_updateGravity.log
|
||||
|
||||
# Pi-hole: Flush the log daily at 00:00
|
||||
# The flush script will use logrotate if available
|
@@ -1,11 +1,79 @@
|
||||
_pihole() {
|
||||
local cur prev opts
|
||||
local cur prev opts opts_admin opts_checkout opts_chronometer opts_debug opts_interface opts_logging opts_privacy opts_query opts_update opts_version
|
||||
COMPREPLY=()
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
opts="admin blacklist chronometer debug disable enable flush help logging query reconfigure restartdns setupLCD status tail uninstall updateGravity updatePihole version whitelist checkout"
|
||||
prev2="${COMP_WORDS[COMP_CWORD-2]}"
|
||||
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
|
||||
case "${prev}" in
|
||||
"pihole")
|
||||
opts="admin blacklist checkout chronometer debug disable enable flush help logging query reconfigure regex restartdns status tail uninstall updateGravity updatePihole version wildcard whitelist arpflush"
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
|
||||
;;
|
||||
"whitelist"|"blacklist"|"wildcard"|"regex")
|
||||
opts_lists="\--delmode \--noreload \--quiet \--list \--nuke"
|
||||
COMPREPLY=( $(compgen -W "${opts_lists}" -- ${cur}) )
|
||||
;;
|
||||
"admin")
|
||||
opts_admin="celsius email fahrenheit interface kelvin password privacylevel"
|
||||
COMPREPLY=( $(compgen -W "${opts_admin}" -- ${cur}) )
|
||||
;;
|
||||
"checkout")
|
||||
opts_checkout="core ftl web master dev"
|
||||
COMPREPLY=( $(compgen -W "${opts_checkout}" -- ${cur}) )
|
||||
;;
|
||||
"chronometer")
|
||||
opts_chronometer="\--exit \--json \--refresh"
|
||||
COMPREPLY=( $(compgen -W "${opts_chronometer}" -- ${cur}) )
|
||||
;;
|
||||
"debug")
|
||||
opts_debug="-a"
|
||||
COMPREPLY=( $(compgen -W "${opts_debug}" -- ${cur}) )
|
||||
;;
|
||||
"logging")
|
||||
opts_logging="on off 'off noflush'"
|
||||
COMPREPLY=( $(compgen -W "${opts_logging}" -- ${cur}) )
|
||||
;;
|
||||
"query")
|
||||
opts_query="-adlist -all -exact"
|
||||
COMPREPLY=( $(compgen -W "${opts_query}" -- ${cur}) )
|
||||
;;
|
||||
"updatePihole"|"-up")
|
||||
opts_update="--check-only"
|
||||
COMPREPLY=( $(compgen -W "${opts_update}" -- ${cur}) )
|
||||
;;
|
||||
"version")
|
||||
opts_version="\--admin \--current \--ftl \--hash \--latest \--pihole"
|
||||
COMPREPLY=( $(compgen -W "${opts_version}" -- ${cur}) )
|
||||
;;
|
||||
"interface")
|
||||
if ( [[ "$prev2" == "admin" ]] || [[ "$prev2" == "-a" ]] ); then
|
||||
opts_interface="$(cat /proc/net/dev | cut -d: -s -f1)"
|
||||
COMPREPLY=( $(compgen -W "${opts_interface}" -- ${cur}) )
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
;;
|
||||
"privacylevel")
|
||||
if ( [[ "$prev2" == "admin" ]] || [[ "$prev2" == "-a" ]] ); then
|
||||
opts_privacy="0 1 2 3 4"
|
||||
COMPREPLY=( $(compgen -W "${opts_privacy}" -- ${cur}) )
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
;;
|
||||
"core"|"admin"|"ftl")
|
||||
if [[ "$prev2" == "checkout" ]]; then
|
||||
opts_checkout="master dev"
|
||||
COMPREPLY=( $(compgen -W "${opts_checkout}" -- ${cur}) )
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
return 0
|
||||
}
|
||||
complete -F _pihole pihole
|
||||
|
@@ -6,8 +6,10 @@
|
||||
* This file is copyright under the latest version of the EUPL.
|
||||
* Please see LICENSE file for your rights under this license. */
|
||||
|
||||
// Sanitise HTTP_HOST output
|
||||
$serverName = htmlspecialchars($_SERVER["HTTP_HOST"]);
|
||||
// Sanitize SERVER_NAME output
|
||||
$serverName = htmlspecialchars($_SERVER["SERVER_NAME"]);
|
||||
// Remove external ipv6 brackets if any
|
||||
$serverName = preg_replace('/^\[(.*)\]$/', '${1}', $serverName);
|
||||
|
||||
if (!is_file("/etc/pihole/setupVars.conf"))
|
||||
die("[ERROR] File not found: <code>/etc/pihole/setupVars.conf</code>");
|
||||
@@ -38,13 +40,6 @@ $validExtTypes = array("asp", "htm", "html", "php", "rss", "xml", "");
|
||||
// Get extension of current URL
|
||||
$currentUrlExt = pathinfo($_SERVER["REQUEST_URI"], PATHINFO_EXTENSION);
|
||||
|
||||
// Check if this is served over HTTP or HTTPS
|
||||
if(isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] == "on") {
|
||||
$proto = "https";
|
||||
} else {
|
||||
$proto = "http";
|
||||
}
|
||||
|
||||
// Set mobile friendly viewport
|
||||
$viewPort = '<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"/>';
|
||||
|
||||
@@ -55,16 +50,24 @@ function setHeader($type = "x") {
|
||||
}
|
||||
|
||||
// Determine block page type
|
||||
if ($serverName === "pi.hole") {
|
||||
if ($serverName === "pi.hole"
|
||||
|| (!empty($_SERVER["VIRTUAL_HOST"]) && $serverName === $_SERVER["VIRTUAL_HOST"])) {
|
||||
// Redirect to Web Interface
|
||||
exit(header("Location: /admin"));
|
||||
} elseif (filter_var($serverName, FILTER_VALIDATE_IP) || in_array($serverName, $authorizedHosts)) {
|
||||
// Set Splash Page output
|
||||
$splashPage = "
|
||||
<html><head>
|
||||
<html>
|
||||
<head>
|
||||
$viewPort
|
||||
<link rel='stylesheet' href='/pihole/blockingpage.css' type='text/css'/>
|
||||
</head><body id='splashpage'><img src='/admin/img/logo.svg'/><br/>Pi-<b>hole</b>: Your black hole for Internet advertisements<br><a href='/admin'>Did you mean to go to the admin panel?</a></body></html>
|
||||
<link rel='stylesheet' href='pihole/blockingpage.css' type='text/css'/>
|
||||
</head>
|
||||
<body id='splashpage'>
|
||||
<img src='admin/img/logo.svg'/><br/>
|
||||
Pi-<b>hole</b>: Your black hole for Internet advertisements<br/>
|
||||
<a href='/admin'>Did you mean to go to the admin panel?</a>
|
||||
</body>
|
||||
</html>
|
||||
";
|
||||
|
||||
// Set splash/landing page based off presence of $landPage
|
||||
@@ -73,7 +76,7 @@ if ($serverName === "pi.hole") {
|
||||
// Unset variables so as to not be included in $landPage
|
||||
unset($serverName, $svPasswd, $svEmail, $authorizedHosts, $validExtTypes, $currentUrlExt, $viewPort);
|
||||
|
||||
// Render splash/landing page when directly browsing via IP or authorised hostname
|
||||
// Render splash/landing page when directly browsing via IP or authorized hostname
|
||||
exit($renderPage);
|
||||
} elseif ($currentUrlExt === "js") {
|
||||
// Serve Pi-hole Javascript for blocked domains requesting JS
|
||||
@@ -101,24 +104,30 @@ if ($serverName === "pi.hole") {
|
||||
// Define admin email address text based off $svEmail presence
|
||||
$bpAskAdmin = !empty($svEmail) ? '<a href="mailto:'.$svEmail.'?subject=Site Blocked: '.$serverName.'"></a>' : "<span/>";
|
||||
|
||||
// Determine if at least one block list has been generated
|
||||
if (empty(glob("/etc/pihole/list.0.*.domains")))
|
||||
die("[ERROR] There are no domain lists generated lists within <code>/etc/pihole/</code>! Please update gravity by running <code>pihole -g</code>, or repair Pi-hole using <code>pihole -r</code>.");
|
||||
|
||||
// Set location of adlists file
|
||||
if (is_file("/etc/pihole/adlists.list")) {
|
||||
$adLists = "/etc/pihole/adlists.list";
|
||||
} elseif (is_file("/etc/pihole/adlists.default")) {
|
||||
$adLists = "/etc/pihole/adlists.default";
|
||||
// Get possible non-standard location of FTL's database
|
||||
$FTLsettings = parse_ini_file("/etc/pihole/pihole-FTL.conf");
|
||||
if (isset($FTLsettings["GRAVITYDB"])) {
|
||||
$gravityDBFile = $FTLsettings["GRAVITYDB"];
|
||||
} else {
|
||||
die("[ERROR] File not found: <code>/etc/pihole/adlists.list</code>");
|
||||
$gravityDBFile = "/etc/pihole/gravity.db";
|
||||
}
|
||||
|
||||
// Get all URLs starting with "http" or "www" from adlists and re-index array numerically
|
||||
$adlistsUrls = array_values(preg_grep("/(^http)|(^www)/i", file($adLists, FILE_IGNORE_NEW_LINES)));
|
||||
// Connect to gravity.db
|
||||
try {
|
||||
$db = new SQLite3($gravityDBFile, SQLITE3_OPEN_READONLY);
|
||||
} catch (Exception $exception) {
|
||||
die("[ERROR]: Failed to connect to gravity.db");
|
||||
}
|
||||
|
||||
// Get all adlist addresses
|
||||
$adlistResults = $db->query("SELECT address FROM vw_adlist");
|
||||
$adlistsUrls = array();
|
||||
while ($row = $adlistResults->fetchArray()) {
|
||||
array_push($adlistsUrls, $row[0]);
|
||||
}
|
||||
|
||||
if (empty($adlistsUrls))
|
||||
die("[ERROR]: There are no adlist URL's found within <code>$adLists</code>");
|
||||
die("[ERROR]: There are no adlists enabled");
|
||||
|
||||
// Get total number of blocklists (Including Whitelist, Blacklist & Wildcard lists)
|
||||
$adlistsCount = count($adlistsUrls) + 3;
|
||||
@@ -130,7 +139,12 @@ ini_set("default_socket_timeout", 3);
|
||||
function queryAds($serverName) {
|
||||
// Determine the time it takes while querying adlists
|
||||
$preQueryTime = microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"];
|
||||
$queryAds = file("http://127.0.0.1/admin/scripts/pi-hole/php/queryads.php?domain=$serverName&bp", FILE_IGNORE_NEW_LINES);
|
||||
$queryAdsURL = sprintf(
|
||||
"http://127.0.0.1:%s/admin/scripts/pi-hole/php/queryads.php?domain=%s&bp",
|
||||
$_SERVER["SERVER_PORT"],
|
||||
$serverName
|
||||
);
|
||||
$queryAds = file($queryAdsURL, FILE_IGNORE_NEW_LINES);
|
||||
$queryAds = array_values(array_filter(preg_replace("/data:\s+/", "", $queryAds)));
|
||||
$queryTime = sprintf("%.0f", (microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"]) - $preQueryTime);
|
||||
|
||||
@@ -208,7 +222,7 @@ $phVersion = exec("cd /etc/.pihole/ && git describe --long --tags");
|
||||
if (explode("-", $phVersion)[1] != "0")
|
||||
$execTime = microtime(true)-$_SERVER["REQUEST_TIME_FLOAT"];
|
||||
|
||||
// Please Note: Text is added via CSS to allow an admin to provide a localised
|
||||
// Please Note: Text is added via CSS to allow an admin to provide a localized
|
||||
// language without the need to edit this file
|
||||
|
||||
setHeader();
|
||||
@@ -225,10 +239,10 @@ setHeader();
|
||||
<?=$viewPort ?>
|
||||
<meta name="robots" content="noindex,nofollow"/>
|
||||
<meta http-equiv="x-dns-prefetch-control" content="off">
|
||||
<link rel="shortcut icon" href="<?=$proto ?>://pi.hole/admin/img/favicon.png" type="image/x-icon"/>
|
||||
<link rel="stylesheet" href="<?=$proto ?>://pi.hole/pihole/blockingpage.css" type="text/css"/>
|
||||
<link rel="shortcut icon" href="admin/img/favicon.png" type="image/x-icon"/>
|
||||
<link rel="stylesheet" href="pihole/blockingpage.css" type="text/css"/>
|
||||
<title>● <?=$serverName ?></title>
|
||||
<script src="<?=$proto ?>://pi.hole/admin/scripts/vendor/jquery.min.js"></script>
|
||||
<script src="admin/scripts/vendor/jquery.min.js"></script>
|
||||
<script>
|
||||
window.onload = function () {
|
||||
<?php
|
||||
@@ -327,6 +341,7 @@ setHeader();
|
||||
setTimeout(function(){window.location.reload(1);}, 10000);
|
||||
$("#bpOutput").removeClass("add");
|
||||
$("#bpOutput").addClass("success");
|
||||
$("#bpOutput").html("");
|
||||
} else {
|
||||
$("#bpOutput").removeClass("add");
|
||||
$("#bpOutput").addClass("error");
|
||||
@@ -336,6 +351,7 @@ setHeader();
|
||||
error: function(jqXHR, exception) {
|
||||
$("#bpOutput").removeClass("add");
|
||||
$("#bpOutput").addClass("exception");
|
||||
$("#bpOutput").html("");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@@ -27,7 +27,7 @@ server.modules = (
|
||||
)
|
||||
|
||||
server.document-root = "/var/www/html"
|
||||
server.error-handler-404 = "pihole/index.php"
|
||||
server.error-handler-404 = "/pihole/index.php"
|
||||
server.upload-dirs = ( "/var/cache/lighttpd/uploads" )
|
||||
server.errorlog = "/var/log/lighttpd/error.log"
|
||||
server.pid-file = "/var/run/lighttpd.pid"
|
||||
@@ -44,9 +44,18 @@ static-file.exclude-extensions = ( ".php", ".pl", ".fcgi" )
|
||||
compress.cache-dir = "/var/cache/lighttpd/compress/"
|
||||
compress.filetype = ( "application/javascript", "text/css", "text/html", "text/plain" )
|
||||
|
||||
mimetype.assign = ( ".png" => "image/png",
|
||||
".jpg" => "image/jpeg",
|
||||
".jpeg" => "image/jpeg",
|
||||
".html" => "text/html",
|
||||
".css" => "text/css; charset=utf-8",
|
||||
".js" => "application/javascript",
|
||||
".json" => "application/json",
|
||||
".txt" => "text/plain",
|
||||
".svg" => "image/svg+xml" )
|
||||
|
||||
# default listening port for IPv6 falls back to the IPv4 port
|
||||
include_shell "/usr/share/lighttpd/use-ipv6.pl " + server.port
|
||||
include_shell "/usr/share/lighttpd/create-mime.assign.pl"
|
||||
|
||||
# Prevent Lighttpd from enabling Let's Encrypt SSL for every blocked domain
|
||||
#include_shell "/usr/share/lighttpd/include-conf-enabled.pl"
|
||||
@@ -72,4 +81,5 @@ $HTTP["url"] =~ "^/admin/\.(.*)" {
|
||||
}
|
||||
|
||||
# Add user chosen options held in external file
|
||||
# This uses include_shell instead of an include wildcard for compatibility
|
||||
include_shell "cat external.conf 2>/dev/null"
|
||||
|
@@ -28,7 +28,7 @@ server.modules = (
|
||||
)
|
||||
|
||||
server.document-root = "/var/www/html"
|
||||
server.error-handler-404 = "pihole/index.php"
|
||||
server.error-handler-404 = "/pihole/index.php"
|
||||
server.upload-dirs = ( "/var/cache/lighttpd/uploads" )
|
||||
server.errorlog = "/var/log/lighttpd/error.log"
|
||||
server.pid-file = "/var/run/lighttpd.pid"
|
||||
@@ -90,4 +90,5 @@ $HTTP["url"] =~ "^/admin/\.(.*)" {
|
||||
}
|
||||
|
||||
# Add user chosen options held in external file
|
||||
# This uses include_shell instead of an include wildcard for compatibility
|
||||
include_shell "cat external.conf 2>/dev/null"
|
||||
|
@@ -1,89 +0,0 @@
|
||||
#!/bin/bash
|
||||
### BEGIN INIT INFO
|
||||
# Provides: pihole-FTL
|
||||
# Required-Start: $remote_fs $syslog
|
||||
# Required-Stop: $remote_fs $syslog
|
||||
# Default-Start: 2 3 4 5
|
||||
# Default-Stop: 0 1 6
|
||||
# Short-Description: pihole-FTL daemon
|
||||
# Description: Enable service provided by pihole-FTL daemon
|
||||
### END INIT INFO
|
||||
|
||||
FTLUSER=pihole
|
||||
PIDFILE=/var/run/pihole-FTL.pid
|
||||
|
||||
get_pid() {
|
||||
pidof "pihole-FTL"
|
||||
}
|
||||
|
||||
is_running() {
|
||||
ps "$(get_pid)" > /dev/null 2>&1
|
||||
}
|
||||
|
||||
|
||||
# Start the service
|
||||
start() {
|
||||
if is_running; then
|
||||
echo "pihole-FTL is already running"
|
||||
else
|
||||
touch /var/log/pihole-FTL.log /run/pihole-FTL.pid /run/pihole-FTL.port /var/log/pihole.log
|
||||
mkdir -p /var/run/pihole
|
||||
mkdir -p /var/log/pihole
|
||||
chown pihole:pihole /var/run/pihole /var/log/pihole
|
||||
rm /var/run/pihole/FTL.sock 2> /dev/null
|
||||
chown pihole:pihole /var/log/pihole-FTL.log /run/pihole-FTL.pid /run/pihole-FTL.port
|
||||
chown pihole:pihole /etc/pihole /etc/pihole/dhcp.leases /var/log/pihole.log
|
||||
chmod 0644 /var/log/pihole-FTL.log /run/pihole-FTL.pid /run/pihole-FTL.port /var/log/pihole.log
|
||||
setcap CAP_NET_BIND_SERVICE,CAP_NET_RAW,CAP_NET_ADMIN+eip "$(which pihole-FTL)"
|
||||
echo "nameserver 127.0.0.1" | /sbin/resolvconf -a lo.piholeFTL
|
||||
su -s /bin/sh -c "/usr/bin/pihole-FTL" "$FTLUSER"
|
||||
echo
|
||||
fi
|
||||
}
|
||||
|
||||
# Stop the service
|
||||
stop() {
|
||||
if is_running; then
|
||||
/sbin/resolvconf -d lo.piholeFTL
|
||||
kill "$(get_pid)"
|
||||
for i in {1..5}; do
|
||||
if ! is_running; then
|
||||
break
|
||||
fi
|
||||
|
||||
echo -n "."
|
||||
sleep 1
|
||||
done
|
||||
echo
|
||||
|
||||
if is_running; then
|
||||
echo "Not stopped; may still be shutting down or shutdown may have failed, killing now"
|
||||
kill -9 "$(get_pid)"
|
||||
exit 1
|
||||
else
|
||||
echo "Stopped"
|
||||
fi
|
||||
else
|
||||
echo "Not running"
|
||||
fi
|
||||
echo
|
||||
}
|
||||
|
||||
### main logic ###
|
||||
case "$1" in
|
||||
stop)
|
||||
stop
|
||||
;;
|
||||
status)
|
||||
status pihole-FTL
|
||||
;;
|
||||
start|restart|reload|condrestart)
|
||||
stop
|
||||
start
|
||||
;;
|
||||
*)
|
||||
echo $"Usage: $0 {start|stop|restart|reload|status}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
File diff suppressed because it is too large
Load Diff
@@ -11,29 +11,29 @@
|
||||
source "/opt/pihole/COL_TABLE"
|
||||
|
||||
while true; do
|
||||
read -rp " ${QST} Are you sure you would like to remove ${COL_WHITE}Pi-hole${COL_NC}? [y/N] " yn
|
||||
case ${yn} in
|
||||
[Yy]* ) break;;
|
||||
[Nn]* ) echo -e "\n ${COL_LIGHT_GREEN}Uninstall has been cancelled${COL_NC}"; exit 0;;
|
||||
* ) echo -e "\n ${COL_LIGHT_GREEN}Uninstall has been cancelled${COL_NC}"; exit 0;;
|
||||
esac
|
||||
read -rp " ${QST} Are you sure you would like to remove ${COL_WHITE}Pi-hole${COL_NC}? [y/N] " yn
|
||||
case ${yn} in
|
||||
[Yy]* ) break;;
|
||||
[Nn]* ) echo -e "${OVER} ${COL_LIGHT_GREEN}Uninstall has been cancelled${COL_NC}"; exit 0;;
|
||||
* ) echo -e "${OVER} ${COL_LIGHT_GREEN}Uninstall has been cancelled${COL_NC}"; exit 0;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Must be root to uninstall
|
||||
str="Root user check"
|
||||
if [[ ${EUID} -eq 0 ]]; then
|
||||
echo -e " ${TICK} ${str}"
|
||||
echo -e " ${TICK} ${str}"
|
||||
else
|
||||
# Check if sudo is actually installed
|
||||
# If it isn't, exit because the uninstall can not complete
|
||||
if [ -x "$(command -v sudo)" ]; then
|
||||
export SUDO="sudo"
|
||||
else
|
||||
echo -e " ${CROSS} ${str}
|
||||
Script called with non-root privileges
|
||||
The Pi-hole requires elevated privleges to uninstall"
|
||||
exit 1
|
||||
fi
|
||||
# Check if sudo is actually installed
|
||||
# If it isn't, exit because the uninstall can not complete
|
||||
if [ -x "$(command -v sudo)" ]; then
|
||||
export SUDO="sudo"
|
||||
else
|
||||
echo -e " ${CROSS} ${str}
|
||||
Script called with non-root privileges
|
||||
The Pi-hole requires elevated privleges to uninstall"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
readonly PI_HOLE_FILES_DIR="/etc/.pihole"
|
||||
@@ -46,178 +46,174 @@ source "${setupVars}"
|
||||
distro_check
|
||||
|
||||
# Install packages used by the Pi-hole
|
||||
if [[ "${INSTALL_WEB}" == true ]]; then
|
||||
# Install the Web dependencies
|
||||
DEPS=("${INSTALLER_DEPS[@]}" "${PIHOLE_DEPS[@]}" "${PIHOLE_WEB_DEPS[@]}")
|
||||
# Otherwise,
|
||||
else
|
||||
# just install the Core dependencies
|
||||
DEPS=("${INSTALLER_DEPS[@]}" "${PIHOLE_DEPS[@]}")
|
||||
DEPS=("${INSTALLER_DEPS[@]}" "${PIHOLE_DEPS[@]}")
|
||||
if [[ "${INSTALL_WEB_SERVER}" == true ]]; then
|
||||
# Install the Web dependencies
|
||||
DEPS+=("${PIHOLE_WEB_DEPS[@]}")
|
||||
fi
|
||||
|
||||
# Compatability
|
||||
if [ -x "$(command -v rpm)" ]; then
|
||||
# Fedora Family
|
||||
PKG_REMOVE="${PKG_MANAGER} remove -y"
|
||||
package_check() {
|
||||
rpm -qa | grep ^$1- > /dev/null
|
||||
}
|
||||
package_cleanup() {
|
||||
${SUDO} ${PKG_MANAGER} -y autoremove
|
||||
}
|
||||
elif [ -x "$(command -v apt-get)" ]; then
|
||||
# Debian Family
|
||||
PKG_REMOVE="${PKG_MANAGER} -y remove --purge"
|
||||
package_check() {
|
||||
dpkg-query -W -f='${Status}' "$1" 2>/dev/null | grep -c "ok installed"
|
||||
}
|
||||
package_cleanup() {
|
||||
${SUDO} ${PKG_MANAGER} -y autoremove
|
||||
${SUDO} ${PKG_MANAGER} -y autoclean
|
||||
}
|
||||
if [ -x "$(command -v apt-get)" ]; then
|
||||
# Debian Family
|
||||
PKG_REMOVE=("${PKG_MANAGER}" -y remove --purge)
|
||||
package_check() {
|
||||
dpkg-query -W -f='${Status}' "$1" 2>/dev/null | grep -c "ok installed"
|
||||
}
|
||||
elif [ -x "$(command -v rpm)" ]; then
|
||||
# Fedora Family
|
||||
PKG_REMOVE=("${PKG_MANAGER}" remove -y)
|
||||
package_check() {
|
||||
rpm -qa | grep "^$1-" > /dev/null
|
||||
}
|
||||
else
|
||||
echo -e " ${CROSS} OS distribution not supported"
|
||||
exit 1
|
||||
echo -e " ${CROSS} OS distribution not supported"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
removeAndPurge() {
|
||||
# Purge dependencies
|
||||
echo ""
|
||||
for i in "${DEPS[@]}"; do
|
||||
package_check ${i} > /dev/null
|
||||
if [[ "$?" -eq 0 ]]; then
|
||||
while true; do
|
||||
read -rp " ${QST} Do you wish to remove ${COL_WHITE}${i}${COL_NC} from your system? [Y/N] " yn
|
||||
case ${yn} in
|
||||
[Yy]* )
|
||||
echo -ne " ${INFO} Removing ${i}...";
|
||||
${SUDO} ${PKG_REMOVE} "${i}" &> /dev/null;
|
||||
echo -e "${OVER} ${INFO} Removed ${i}";
|
||||
break;;
|
||||
[Nn]* ) echo -e " ${INFO} Skipped ${i}"; break;;
|
||||
esac
|
||||
done
|
||||
else
|
||||
echo -e " ${INFO} Package ${i} not installed"
|
||||
fi
|
||||
done
|
||||
# Purge dependencies
|
||||
echo ""
|
||||
for i in "${DEPS[@]}"; do
|
||||
if package_check "${i}" > /dev/null; then
|
||||
while true; do
|
||||
read -rp " ${QST} Do you wish to remove ${COL_WHITE}${i}${COL_NC} from your system? [Y/N] " yn
|
||||
case ${yn} in
|
||||
[Yy]* )
|
||||
echo -ne " ${INFO} Removing ${i}...";
|
||||
${SUDO} "${PKG_REMOVE[@]}" "${i}" &> /dev/null;
|
||||
echo -e "${OVER} ${INFO} Removed ${i}";
|
||||
break;;
|
||||
[Nn]* ) echo -e " ${INFO} Skipped ${i}"; break;;
|
||||
esac
|
||||
done
|
||||
else
|
||||
echo -e " ${INFO} Package ${i} not installed"
|
||||
fi
|
||||
done
|
||||
|
||||
# Remove dnsmasq config files
|
||||
${SUDO} rm -f /etc/dnsmasq.conf /etc/dnsmasq.conf.orig /etc/dnsmasq.d/01-pihole.conf &> /dev/null
|
||||
echo -e " ${TICK} Removing dnsmasq config files"
|
||||
# Remove dnsmasq config files
|
||||
${SUDO} rm -f /etc/dnsmasq.conf /etc/dnsmasq.conf.orig /etc/dnsmasq.d/*-pihole*.conf &> /dev/null
|
||||
echo -e " ${TICK} Removing dnsmasq config files"
|
||||
|
||||
# Take care of any additional package cleaning
|
||||
echo -ne " ${INFO} Removing & cleaning remaining dependencies..."
|
||||
package_cleanup &> /dev/null
|
||||
echo -e "${OVER} ${TICK} Removed & cleaned up remaining dependencies"
|
||||
|
||||
# Call removeNoPurge to remove Pi-hole specific files
|
||||
removeNoPurge
|
||||
# Call removeNoPurge to remove Pi-hole specific files
|
||||
removeNoPurge
|
||||
}
|
||||
|
||||
removeNoPurge() {
|
||||
# Only web directories/files that are created by Pi-hole should be removed
|
||||
echo -ne " ${INFO} Removing Web Interface..."
|
||||
${SUDO} rm -rf /var/www/html/admin &> /dev/null
|
||||
${SUDO} rm -rf /var/www/html/pihole &> /dev/null
|
||||
${SUDO} rm -f /var/www/html/index.lighttpd.orig &> /dev/null
|
||||
# Only web directories/files that are created by Pi-hole should be removed
|
||||
echo -ne " ${INFO} Removing Web Interface..."
|
||||
${SUDO} rm -rf /var/www/html/admin &> /dev/null
|
||||
${SUDO} rm -rf /var/www/html/pihole &> /dev/null
|
||||
${SUDO} rm -f /var/www/html/index.lighttpd.orig &> /dev/null
|
||||
|
||||
# If the web directory is empty after removing these files, then the parent html folder can be removed.
|
||||
if [ -d "/var/www/html" ]; then
|
||||
if [[ ! "$(ls -A /var/www/html)" ]]; then
|
||||
${SUDO} rm -rf /var/www/html &> /dev/null
|
||||
fi
|
||||
fi
|
||||
echo -e "${OVER} ${TICK} Removed Web Interface"
|
||||
|
||||
# Attempt to preserve backwards compatibility with older versions
|
||||
# to guarantee no additional changes were made to /etc/crontab after
|
||||
# the installation of pihole, /etc/crontab.pihole should be permanently
|
||||
# preserved.
|
||||
if [[ -f /etc/crontab.orig ]]; then
|
||||
${SUDO} mv /etc/crontab /etc/crontab.pihole
|
||||
${SUDO} mv /etc/crontab.orig /etc/crontab
|
||||
${SUDO} service cron restart
|
||||
echo -e " ${TICK} Restored the default system cron"
|
||||
fi
|
||||
|
||||
# Attempt to preserve backwards compatibility with older versions
|
||||
if [[ -f /etc/cron.d/pihole ]];then
|
||||
${SUDO} rm -f /etc/cron.d/pihole &> /dev/null
|
||||
echo -e " ${TICK} Removed /etc/cron.d/pihole"
|
||||
fi
|
||||
|
||||
package_check lighttpd > /dev/null
|
||||
if [[ $? -eq 1 ]]; then
|
||||
${SUDO} rm -rf /etc/lighttpd/ &> /dev/null
|
||||
echo -e " ${TICK} Removed lighttpd"
|
||||
else
|
||||
if [ -f /etc/lighttpd/lighttpd.conf.orig ]; then
|
||||
${SUDO} mv /etc/lighttpd/lighttpd.conf.orig /etc/lighttpd/lighttpd.conf
|
||||
fi
|
||||
fi
|
||||
|
||||
${SUDO} rm -f /etc/dnsmasq.d/adList.conf &> /dev/null
|
||||
${SUDO} rm -f /etc/dnsmasq.d/01-pihole.conf &> /dev/null
|
||||
${SUDO} rm -rf /var/log/*pihole* &> /dev/null
|
||||
${SUDO} rm -rf /etc/pihole/ &> /dev/null
|
||||
${SUDO} rm -rf /etc/.pihole/ &> /dev/null
|
||||
${SUDO} rm -rf /opt/pihole/ &> /dev/null
|
||||
${SUDO} rm -f /usr/local/bin/pihole &> /dev/null
|
||||
${SUDO} rm -f /etc/bash_completion.d/pihole &> /dev/null
|
||||
${SUDO} rm -f /etc/sudoers.d/pihole &> /dev/null
|
||||
echo -e " ${TICK} Removed config files"
|
||||
|
||||
# Remove FTL
|
||||
if command -v pihole-FTL &> /dev/null; then
|
||||
echo -ne " ${INFO} Removing pihole-FTL..."
|
||||
|
||||
if [[ -x "$(command -v systemctl)" ]]; then
|
||||
systemctl stop pihole-FTL
|
||||
else
|
||||
service pihole-FTL stop
|
||||
# If the web directory is empty after removing these files, then the parent html directory can be removed.
|
||||
if [ -d "/var/www/html" ]; then
|
||||
if [[ ! "$(ls -A /var/www/html)" ]]; then
|
||||
${SUDO} rm -rf /var/www/html &> /dev/null
|
||||
fi
|
||||
fi
|
||||
echo -e "${OVER} ${TICK} Removed Web Interface"
|
||||
|
||||
# Attempt to preserve backwards compatibility with older versions
|
||||
# to guarantee no additional changes were made to /etc/crontab after
|
||||
# the installation of pihole, /etc/crontab.pihole should be permanently
|
||||
# preserved.
|
||||
if [[ -f /etc/crontab.orig ]]; then
|
||||
${SUDO} mv /etc/crontab /etc/crontab.pihole
|
||||
${SUDO} mv /etc/crontab.orig /etc/crontab
|
||||
${SUDO} service cron restart
|
||||
echo -e " ${TICK} Restored the default system cron"
|
||||
fi
|
||||
|
||||
${SUDO} rm -f /etc/init.d/pihole-FTL
|
||||
${SUDO} rm -f /usr/bin/pihole-FTL
|
||||
echo -e "${OVER} ${TICK} Removed pihole-FTL"
|
||||
fi
|
||||
|
||||
# If the pihole user exists, then remove
|
||||
if id "pihole" &> /dev/null; then
|
||||
${SUDO} userdel -r pihole 2> /dev/null
|
||||
if [[ "$?" -eq 0 ]]; then
|
||||
echo -e " ${TICK} Removed 'pihole' user"
|
||||
else
|
||||
echo -e " ${CROSS} Unable to remove 'pihole' user"
|
||||
# Attempt to preserve backwards compatibility with older versions
|
||||
if [[ -f /etc/cron.d/pihole ]];then
|
||||
${SUDO} rm -f /etc/cron.d/pihole &> /dev/null
|
||||
echo -e " ${TICK} Removed /etc/cron.d/pihole"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo -e "\n We're sorry to see you go, but thanks for checking out Pi-hole!
|
||||
If you need help, reach out to us on Github, Discourse, Reddit or Twitter
|
||||
Reinstall at any time: ${COL_WHITE}curl -sSL https://install.pi-hole.net | bash${COL_NC}
|
||||
if package_check lighttpd > /dev/null; then
|
||||
if [[ -f /etc/lighttpd/lighttpd.conf.orig ]]; then
|
||||
${SUDO} mv /etc/lighttpd/lighttpd.conf.orig /etc/lighttpd/lighttpd.conf
|
||||
fi
|
||||
|
||||
${COL_LIGHT_RED}Please reset the DNS on your router/clients to restore internet connectivity
|
||||
${COL_LIGHT_GREEN}Uninstallation Complete! ${COL_NC}"
|
||||
if [[ -f /etc/lighttpd/external.conf ]]; then
|
||||
${SUDO} rm /etc/lighttpd/external.conf
|
||||
fi
|
||||
|
||||
echo -e " ${TICK} Removed lighttpd configs"
|
||||
fi
|
||||
|
||||
${SUDO} rm -f /etc/dnsmasq.d/adList.conf &> /dev/null
|
||||
${SUDO} rm -f /etc/dnsmasq.d/01-pihole.conf &> /dev/null
|
||||
${SUDO} rm -rf /var/log/*pihole* &> /dev/null
|
||||
${SUDO} rm -rf /etc/pihole/ &> /dev/null
|
||||
${SUDO} rm -rf /etc/.pihole/ &> /dev/null
|
||||
${SUDO} rm -rf /opt/pihole/ &> /dev/null
|
||||
${SUDO} rm -f /usr/local/bin/pihole &> /dev/null
|
||||
${SUDO} rm -f /etc/bash_completion.d/pihole &> /dev/null
|
||||
${SUDO} rm -f /etc/sudoers.d/pihole &> /dev/null
|
||||
echo -e " ${TICK} Removed config files"
|
||||
|
||||
# Restore Resolved
|
||||
if [[ -e /etc/systemd/resolved.conf.orig ]]; then
|
||||
${SUDO} cp -p /etc/systemd/resolved.conf.orig /etc/systemd/resolved.conf
|
||||
systemctl reload-or-restart systemd-resolved
|
||||
fi
|
||||
|
||||
# Remove FTL
|
||||
if command -v pihole-FTL &> /dev/null; then
|
||||
echo -ne " ${INFO} Removing pihole-FTL..."
|
||||
if [[ -x "$(command -v systemctl)" ]]; then
|
||||
systemctl stop pihole-FTL
|
||||
else
|
||||
service pihole-FTL stop
|
||||
fi
|
||||
${SUDO} rm -f /etc/init.d/pihole-FTL
|
||||
${SUDO} rm -f /usr/bin/pihole-FTL
|
||||
echo -e "${OVER} ${TICK} Removed pihole-FTL"
|
||||
fi
|
||||
|
||||
# If the pihole manpage exists, then delete and rebuild man-db
|
||||
if [[ -f /usr/local/share/man/man8/pihole.8 ]]; then
|
||||
${SUDO} rm -f /usr/local/share/man/man8/pihole.8 /usr/local/share/man/man8/pihole-FTL.8 /usr/local/share/man/man5/pihole-FTL.conf.5
|
||||
${SUDO} mandb -q &>/dev/null
|
||||
echo -e " ${TICK} Removed pihole man page"
|
||||
fi
|
||||
|
||||
# If the pihole user exists, then remove
|
||||
if id "pihole" &> /dev/null; then
|
||||
if ${SUDO} userdel -r pihole 2> /dev/null; then
|
||||
echo -e " ${TICK} Removed 'pihole' user"
|
||||
else
|
||||
echo -e " ${CROSS} Unable to remove 'pihole' user"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo -e "\\n We're sorry to see you go, but thanks for checking out Pi-hole!
|
||||
If you need help, reach out to us on Github, Discourse, Reddit or Twitter
|
||||
Reinstall at any time: ${COL_WHITE}curl -sSL https://install.pi-hole.net | bash${COL_NC}
|
||||
|
||||
${COL_LIGHT_RED}Please reset the DNS on your router/clients to restore internet connectivity
|
||||
${COL_LIGHT_GREEN}Uninstallation Complete! ${COL_NC}"
|
||||
}
|
||||
|
||||
######### SCRIPT ###########
|
||||
if command -v vcgencmd &> /dev/null; then
|
||||
echo -e " ${INFO} All dependencies are safe to remove on Raspbian"
|
||||
echo -e " ${INFO} All dependencies are safe to remove on Raspbian"
|
||||
else
|
||||
echo -e " ${INFO} Be sure to confirm if any dependencies should not be removed"
|
||||
echo -e " ${INFO} Be sure to confirm if any dependencies should not be removed"
|
||||
fi
|
||||
while true; do
|
||||
echo -e " ${INFO} ${COL_YELLOW}The following dependencies may have been added by the Pi-hole install:"
|
||||
echo -n " "
|
||||
for i in "${DEPS[@]}"; do
|
||||
echo -n "${i} "
|
||||
done
|
||||
echo "${COL_NC}"
|
||||
read -rp " ${QST} Do you wish to go through each dependency for removal? (Choosing No will leave all dependencies installed) [Y/n] " yn
|
||||
case ${yn} in
|
||||
[Yy]* ) removeAndPurge; break;;
|
||||
[Nn]* ) removeNoPurge; break;;
|
||||
* ) removeAndPurge; break;;
|
||||
esac
|
||||
echo -e " ${INFO} ${COL_YELLOW}The following dependencies may have been added by the Pi-hole install:"
|
||||
echo -n " "
|
||||
for i in "${DEPS[@]}"; do
|
||||
echo -n "${i} "
|
||||
done
|
||||
echo "${COL_NC}"
|
||||
read -rp " ${QST} Do you wish to go through each dependency for removal? (Choosing No will leave all dependencies installed) [Y/n] " yn
|
||||
case ${yn} in
|
||||
[Yy]* ) removeAndPurge; break;;
|
||||
[Nn]* ) removeNoPurge; break;;
|
||||
* ) removeAndPurge; break;;
|
||||
esac
|
||||
done
|
||||
|
741
gravity.sh
741
gravity.sh
@@ -15,34 +15,33 @@ export LC_ALL=C
|
||||
|
||||
coltable="/opt/pihole/COL_TABLE"
|
||||
source "${coltable}"
|
||||
regexconverter="/opt/pihole/wildcard_regex_converter.sh"
|
||||
source "${regexconverter}"
|
||||
# shellcheck disable=SC1091
|
||||
source "/etc/.pihole/advanced/Scripts/database_migration/gravity-db.sh"
|
||||
|
||||
basename="pihole"
|
||||
PIHOLE_COMMAND="/usr/local/bin/${basename}"
|
||||
|
||||
piholeDir="/etc/${basename}"
|
||||
piholeRepo="/etc/.${basename}"
|
||||
|
||||
adListFile="${piholeDir}/adlists.list"
|
||||
adListDefault="${piholeDir}/adlists.default"
|
||||
adListRepoDefault="${piholeRepo}/adlists.default"
|
||||
|
||||
# Legacy (pre v5.0) list file locations
|
||||
whitelistFile="${piholeDir}/whitelist.txt"
|
||||
blacklistFile="${piholeDir}/blacklist.txt"
|
||||
wildcardFile="/etc/dnsmasq.d/03-pihole-wildcard.conf"
|
||||
regexFile="${piholeDir}/regex.list"
|
||||
adListFile="${piholeDir}/adlists.list"
|
||||
|
||||
adList="${piholeDir}/gravity.list"
|
||||
blackList="${piholeDir}/black.list"
|
||||
localList="${piholeDir}/local.list"
|
||||
VPNList="/etc/openvpn/ipp.txt"
|
||||
|
||||
domainsExtension="domains"
|
||||
matterAndLight="${basename}.0.matterandlight.txt"
|
||||
parsedMatter="${basename}.1.parsedmatter.txt"
|
||||
whitelistMatter="${basename}.2.whitelistmatter.txt"
|
||||
accretionDisc="${basename}.3.accretionDisc.txt"
|
||||
preEventHorizon="list.preEventHorizon"
|
||||
piholeGitDir="/etc/.pihole"
|
||||
gravityDBfile="${piholeDir}/gravity.db"
|
||||
gravityTEMPfile="${piholeDir}/gravity_temp.db"
|
||||
gravityDBschema="${piholeGitDir}/advanced/Templates/gravity.db.sql"
|
||||
gravityDBcopy="${piholeGitDir}/advanced/Templates/gravity_copy.sql"
|
||||
optimize_database=false
|
||||
|
||||
skipDownload="false"
|
||||
domainsExtension="domains"
|
||||
|
||||
resolver="pihole-FTL"
|
||||
|
||||
@@ -66,22 +65,201 @@ else
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Source pihole-FTL from install script
|
||||
pihole_FTL="${piholeDir}/pihole-FTL.conf"
|
||||
if [[ -f "${pihole_FTL}" ]]; then
|
||||
source "${pihole_FTL}"
|
||||
fi
|
||||
|
||||
if [[ -z "${BLOCKINGMODE}" ]] ; then
|
||||
BLOCKINGMODE="NULL"
|
||||
fi
|
||||
|
||||
# Determine if superseded pihole.conf exists
|
||||
if [[ -r "${piholeDir}/pihole.conf" ]]; then
|
||||
echo -e " ${COL_LIGHT_RED}Ignoring overrides specified within pihole.conf! ${COL_NC}"
|
||||
fi
|
||||
|
||||
# Generate new sqlite3 file from schema template
|
||||
generate_gravity_database() {
|
||||
sqlite3 "${1}" < "${gravityDBschema}"
|
||||
}
|
||||
|
||||
# Copy data from old to new database file and swap them
|
||||
gravity_swap_databases() {
|
||||
local str
|
||||
str="Building tree"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
|
||||
# The index is intentionally not UNIQUE as prro quality adlists may contain domains more than once
|
||||
output=$( { sqlite3 "${gravityTEMPfile}" "CREATE INDEX idx_gravity ON gravity (domain, adlist_id);"; } 2>&1 )
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to build gravity tree in ${gravityTEMPfile}\\n ${output}"
|
||||
return 1
|
||||
fi
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
|
||||
str="Swapping databases"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
|
||||
output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBcopy}"; } 2>&1 )
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to copy data from ${gravityDBfile} to ${gravityTEMPfile}\\n ${output}"
|
||||
return 1
|
||||
fi
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
|
||||
# Swap databases and remove old database
|
||||
rm "${gravityDBfile}"
|
||||
mv "${gravityTEMPfile}" "${gravityDBfile}"
|
||||
}
|
||||
|
||||
# Update timestamp when the gravity table was last updated successfully
|
||||
update_gravity_timestamp() {
|
||||
output=$( { printf ".timeout 30000\\nINSERT OR REPLACE INTO info (property,value) values ('updated',cast(strftime('%%s', 'now') as int));" | sqlite3 "${gravityDBfile}"; } 2>&1 )
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to update gravity timestamp in database ${gravityDBfile}\\n ${output}"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# Import domains from file and store them in the specified database table
|
||||
database_table_from_file() {
|
||||
# Define locals
|
||||
local table source backup_path backup_file tmpFile type
|
||||
table="${1}"
|
||||
source="${2}"
|
||||
backup_path="${piholeDir}/migration_backup"
|
||||
backup_file="${backup_path}/$(basename "${2}")"
|
||||
tmpFile="$(mktemp -p "/tmp" --suffix=".gravity")"
|
||||
|
||||
local timestamp
|
||||
timestamp="$(date --utc +'%s')"
|
||||
|
||||
local rowid
|
||||
declare -i rowid
|
||||
rowid=1
|
||||
|
||||
# Special handling for domains to be imported into the common domainlist table
|
||||
if [[ "${table}" == "whitelist" ]]; then
|
||||
type="0"
|
||||
table="domainlist"
|
||||
elif [[ "${table}" == "blacklist" ]]; then
|
||||
type="1"
|
||||
table="domainlist"
|
||||
elif [[ "${table}" == "regex" ]]; then
|
||||
type="3"
|
||||
table="domainlist"
|
||||
fi
|
||||
|
||||
# Get MAX(id) from domainlist when INSERTing into this table
|
||||
if [[ "${table}" == "domainlist" ]]; then
|
||||
rowid="$(sqlite3 "${gravityDBfile}" "SELECT MAX(id) FROM domainlist;")"
|
||||
if [[ -z "$rowid" ]]; then
|
||||
rowid=0
|
||||
fi
|
||||
rowid+=1
|
||||
fi
|
||||
|
||||
# Loop over all domains in ${source} file
|
||||
# Read file line by line
|
||||
grep -v '^ *#' < "${source}" | while IFS= read -r domain
|
||||
do
|
||||
# Only add non-empty lines
|
||||
if [[ -n "${domain}" ]]; then
|
||||
if [[ "${table}" == "domain_audit" ]]; then
|
||||
# domain_audit table format (no enable or modified fields)
|
||||
echo "${rowid},\"${domain}\",${timestamp}" >> "${tmpFile}"
|
||||
elif [[ "${table}" == "adlist" ]]; then
|
||||
# Adlist table format
|
||||
echo "${rowid},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}"
|
||||
else
|
||||
# White-, black-, and regexlist table format
|
||||
echo "${rowid},${type},\"${domain}\",1,${timestamp},${timestamp},\"Migrated from ${source}\"" >> "${tmpFile}"
|
||||
fi
|
||||
rowid+=1
|
||||
fi
|
||||
done
|
||||
|
||||
# Store domains in database table specified by ${table}
|
||||
# Use printf as .mode and .import need to be on separate lines
|
||||
# see https://unix.stackexchange.com/a/445615/83260
|
||||
output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" %s\\n" "${tmpFile}" "${table}" | sqlite3 "${gravityDBfile}"; } 2>&1 )
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to fill table ${table}${type} in database ${gravityDBfile}\\n ${output}"
|
||||
gravity_Cleanup "error"
|
||||
fi
|
||||
|
||||
# Move source file to backup directory, create directory if not existing
|
||||
mkdir -p "${backup_path}"
|
||||
mv "${source}" "${backup_file}" 2> /dev/null || \
|
||||
echo -e " ${CROSS} Unable to backup ${source} to ${backup_path}"
|
||||
|
||||
# Delete tmpFile
|
||||
rm "${tmpFile}" > /dev/null 2>&1 || \
|
||||
echo -e " ${CROSS} Unable to remove ${tmpFile}"
|
||||
}
|
||||
|
||||
# Migrate pre-v5.0 list files to database-based Pi-hole versions
|
||||
migrate_to_database() {
|
||||
# Create database file only if not present
|
||||
if [ ! -e "${gravityDBfile}" ]; then
|
||||
# Create new database file - note that this will be created in version 1
|
||||
echo -e " ${INFO} Creating new gravity database"
|
||||
generate_gravity_database "${gravityDBfile}"
|
||||
|
||||
# Check if gravity database needs to be updated
|
||||
upgrade_gravityDB "${gravityDBfile}" "${piholeDir}"
|
||||
|
||||
# Migrate list files to new database
|
||||
if [ -e "${adListFile}" ]; then
|
||||
# Store adlist domains in database
|
||||
echo -e " ${INFO} Migrating content of ${adListFile} into new database"
|
||||
database_table_from_file "adlist" "${adListFile}"
|
||||
fi
|
||||
if [ -e "${blacklistFile}" ]; then
|
||||
# Store blacklisted domains in database
|
||||
echo -e " ${INFO} Migrating content of ${blacklistFile} into new database"
|
||||
database_table_from_file "blacklist" "${blacklistFile}"
|
||||
fi
|
||||
if [ -e "${whitelistFile}" ]; then
|
||||
# Store whitelisted domains in database
|
||||
echo -e " ${INFO} Migrating content of ${whitelistFile} into new database"
|
||||
database_table_from_file "whitelist" "${whitelistFile}"
|
||||
fi
|
||||
if [ -e "${regexFile}" ]; then
|
||||
# Store regex domains in database
|
||||
# Important note: We need to add the domains to the "regex" table
|
||||
# as it will only later be renamed to "regex_blacklist"!
|
||||
echo -e " ${INFO} Migrating content of ${regexFile} into new database"
|
||||
database_table_from_file "regex" "${regexFile}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if gravity database needs to be updated
|
||||
upgrade_gravityDB "${gravityDBfile}" "${piholeDir}"
|
||||
}
|
||||
|
||||
# Determine if DNS resolution is available before proceeding
|
||||
gravity_CheckDNSResolutionAvailable() {
|
||||
local lookupDomain="pi.hole"
|
||||
|
||||
# Determine if $localList does not exist
|
||||
if [[ ! -e "${localList}" ]]; then
|
||||
# Determine if $localList does not exist, and ensure it is not empty
|
||||
if [[ ! -e "${localList}" ]] || [[ -s "${localList}" ]]; then
|
||||
lookupDomain="raw.githubusercontent.com"
|
||||
fi
|
||||
|
||||
# Determine if $lookupDomain is resolvable
|
||||
if timeout 1 getent hosts "${lookupDomain}" &> /dev/null; then
|
||||
if timeout 4 getent hosts "${lookupDomain}" &> /dev/null; then
|
||||
# Print confirmation of resolvability if it had previously failed
|
||||
if [[ -n "${secs:-}" ]]; then
|
||||
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
|
||||
@@ -95,7 +273,7 @@ gravity_CheckDNSResolutionAvailable() {
|
||||
# If the /etc/resolv.conf contains resolvers other than 127.0.0.1 then the local dnsmasq will not be queried and pi.hole is NXDOMAIN.
|
||||
# This means that even though name resolution is working, the getent hosts check fails and the holddown timer keeps ticking and eventualy fails
|
||||
# So we check the output of the last command and if it failed, attempt to use dig +short as a fallback
|
||||
if timeout 1 dig +short "${lookupDomain}" &> /dev/null; then
|
||||
if timeout 4 dig +short "${lookupDomain}" &> /dev/null; then
|
||||
if [[ -n "${secs:-}" ]]; then
|
||||
echo -e "${OVER} ${TICK} DNS resolution is now available\\n"
|
||||
fi
|
||||
@@ -127,27 +305,14 @@ gravity_CheckDNSResolutionAvailable() {
|
||||
gravity_CheckDNSResolutionAvailable
|
||||
}
|
||||
|
||||
# Retrieve blocklist URLs and parse domains from adlists.list
|
||||
gravity_GetBlocklistUrls() {
|
||||
# Retrieve blocklist URLs and parse domains from adlist.list
|
||||
gravity_DownloadBlocklists() {
|
||||
echo -e " ${INFO} ${COL_BOLD}Neutrino emissions detected${COL_NC}..."
|
||||
|
||||
# Determine if adlists file needs handling
|
||||
if [[ ! -f "${adListFile}" ]]; then
|
||||
# Create "adlists.list" by copying "adlists.default" from internal core repo
|
||||
cp "${adListRepoDefault}" "${adListFile}" 2> /dev/null || \
|
||||
echo -e " ${CROSS} Unable to copy ${adListFile##*/} from ${piholeRepo}"
|
||||
elif [[ -f "${adListDefault}" ]] && [[ -f "${adListFile}" ]]; then
|
||||
# Remove superceded $adListDefault file
|
||||
rm "${adListDefault}" 2> /dev/null || \
|
||||
echo -e " ${CROSS} Unable to remove ${adListDefault}"
|
||||
fi
|
||||
|
||||
local str="Pulling blocklist source list into range"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
|
||||
# Retrieve source URLs from $adListFile
|
||||
# Logic: Remove comments and empty lines
|
||||
mapfile -t sources <<< "$(grep -v -E "^(#|$)" "${adListFile}" 2> /dev/null)"
|
||||
# Retrieve source URLs from gravity database
|
||||
# We source only enabled adlists, sqlite3 stores boolean values as 0 (false) or 1 (true)
|
||||
mapfile -t sources <<< "$(sqlite3 "${gravityDBfile}" "SELECT address FROM vw_adlist;" 2> /dev/null)"
|
||||
mapfile -t sourceIDs <<< "$(sqlite3 "${gravityDBfile}" "SELECT id FROM vw_adlist;" 2> /dev/null)"
|
||||
|
||||
# Parse source domains from $sources
|
||||
mapfile -t sourceDomains <<< "$(
|
||||
@@ -160,20 +325,36 @@ gravity_GetBlocklistUrls() {
|
||||
}' <<< "$(printf '%s\n' "${sources[@]}")" 2> /dev/null
|
||||
)"
|
||||
|
||||
local str="Pulling blocklist source list into range"
|
||||
|
||||
if [[ -n "${sources[*]}" ]] && [[ -n "${sourceDomains[*]}" ]]; then
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
else
|
||||
echo -e "${OVER} ${CROSS} ${str}"
|
||||
gravity_Cleanup "error"
|
||||
echo -e " ${INFO} No source list found, or it is empty"
|
||||
echo ""
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Define options for when retrieving blocklists
|
||||
gravity_SetDownloadOptions() {
|
||||
local url domain agent cmd_ext str
|
||||
|
||||
local url domain agent cmd_ext str target
|
||||
echo ""
|
||||
|
||||
# Prepare new gravity database
|
||||
str="Preparing new gravity database"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
rm "${gravityTEMPfile}" > /dev/null 2>&1
|
||||
output=$( { sqlite3 "${gravityTEMPfile}" < "${gravityDBschema}"; } 2>&1 )
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to create new database ${gravityTEMPfile}\\n ${output}"
|
||||
gravity_Cleanup "error"
|
||||
else
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
fi
|
||||
|
||||
target="$(mktemp -p "/tmp" --suffix=".gravity")"
|
||||
|
||||
# Loop through $sources and download each one
|
||||
for ((i = 0; i < "${#sources[@]}"; i++)); do
|
||||
url="${sources[$i]}"
|
||||
@@ -184,7 +365,7 @@ gravity_SetDownloadOptions() {
|
||||
activeDomains[$i]="${saveLocation}"
|
||||
|
||||
# Default user-agent (for Cloudflare's Browser Integrity Check: https://support.cloudflare.com/hc/en-us/articles/200170086-What-does-the-Browser-Integrity-Check-do-)
|
||||
agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36"
|
||||
agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36"
|
||||
|
||||
# Provide special commands for blocklists which may need them
|
||||
case "${domain}" in
|
||||
@@ -192,18 +373,90 @@ gravity_SetDownloadOptions() {
|
||||
*) cmd_ext="";;
|
||||
esac
|
||||
|
||||
if [[ "${skipDownload}" == false ]]; then
|
||||
echo -e " ${INFO} Target: ${domain} (${url##*/})"
|
||||
gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}"
|
||||
echo ""
|
||||
echo -e " ${INFO} Target: ${url}"
|
||||
local regex
|
||||
# Check for characters NOT allowed in URLs
|
||||
regex="[^a-zA-Z0-9:/?&%=~._()-]"
|
||||
if [[ "${url}" =~ ${regex} ]]; then
|
||||
echo -e " ${CROSS} Invalid Target"
|
||||
else
|
||||
gravity_DownloadBlocklistFromUrl "${url}" "${cmd_ext}" "${agent}" "${sourceIDs[$i]}" "${saveLocation}" "${target}"
|
||||
fi
|
||||
echo ""
|
||||
done
|
||||
|
||||
str="Storing downloaded domains in new gravity database"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
output=$( { printf ".timeout 30000\\n.mode csv\\n.import \"%s\" gravity\\n" "${target}" | sqlite3 "${gravityTEMPfile}"; } 2>&1 )
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to fill gravity table in database ${gravityTEMPfile}\\n ${output}"
|
||||
gravity_Cleanup "error"
|
||||
else
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
fi
|
||||
|
||||
if [[ "${status}" -eq 0 && -n "${output}" ]]; then
|
||||
echo -e " Encountered non-critical SQL warnings. Please check the suitability of the lists you're using!\\n\\n SQL warnings:"
|
||||
local warning file line lineno
|
||||
while IFS= read -r line; do
|
||||
echo " - ${line}"
|
||||
warning="$(grep -oh "^[^:]*:[0-9]*" <<< "${line}")"
|
||||
file="${warning%:*}"
|
||||
lineno="${warning#*:}"
|
||||
if [[ -n "${file}" && -n "${lineno}" ]]; then
|
||||
echo -n " Line contains: "
|
||||
awk "NR==${lineno}" < "${file}"
|
||||
fi
|
||||
done <<< "${output}"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
rm "${target}" > /dev/null 2>&1 || \
|
||||
echo -e " ${CROSS} Unable to remove ${target}"
|
||||
|
||||
gravity_Blackbody=true
|
||||
}
|
||||
|
||||
total_num=0
|
||||
parseList() {
|
||||
local adlistID="${1}" src="${2}" target="${3}" incorrect_lines
|
||||
# This sed does the following things:
|
||||
# 1. Remove all domains containing invalid characters. Valid are: a-z, A-Z, 0-9, dot (.), minus (-), underscore (_)
|
||||
# 2. Append ,adlistID to every line
|
||||
# 3. Ensures there is a newline on the last line
|
||||
sed -e "/[^a-zA-Z0-9.\_-]/d;s/$/,${adlistID}/;/.$/a\\" "${src}" >> "${target}"
|
||||
# Find (up to) five domains containing invalid characters (see above)
|
||||
incorrect_lines="$(sed -e "/[^a-zA-Z0-9.\_-]/!d" "${src}" | head -n 5)"
|
||||
|
||||
local num_lines num_target_lines num_correct_lines num_invalid
|
||||
# Get number of lines in source file
|
||||
num_lines="$(grep -c "^" "${src}")"
|
||||
# Get number of lines in destination file
|
||||
num_target_lines="$(grep -c "^" "${target}")"
|
||||
num_correct_lines="$(( num_target_lines-total_num ))"
|
||||
total_num="$num_target_lines"
|
||||
num_invalid="$(( num_lines-num_correct_lines ))"
|
||||
if [[ "${num_invalid}" -eq 0 ]]; then
|
||||
echo " ${INFO} Received ${num_lines} domains"
|
||||
else
|
||||
echo " ${INFO} Received ${num_lines} domains, ${num_invalid} domains invalid!"
|
||||
fi
|
||||
|
||||
# Display sample of invalid lines if we found some
|
||||
if [[ -n "${incorrect_lines}" ]]; then
|
||||
echo " Sample of invalid domains:"
|
||||
while IFS= read -r line; do
|
||||
echo " - ${line}"
|
||||
done <<< "${incorrect_lines}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Download specified URL and perform checks on HTTP status and file content
|
||||
gravity_DownloadBlocklistFromUrl() {
|
||||
local url="${1}" cmd_ext="${2}" agent="${3}" heisenbergCompensator="" patternBuffer str httpCode success=""
|
||||
local url="${1}" cmd_ext="${2}" agent="${3}" adlistID="${4}" saveLocation="${5}" target="${6}"
|
||||
local heisenbergCompensator="" patternBuffer str httpCode success=""
|
||||
|
||||
# Create temp file to store content on disk instead of RAM
|
||||
patternBuffer=$(mktemp -p "/tmp" --suffix=".phgpb")
|
||||
@@ -218,12 +471,52 @@ gravity_DownloadBlocklistFromUrl() {
|
||||
|
||||
str="Status:"
|
||||
echo -ne " ${INFO} ${str} Pending..."
|
||||
blocked=false
|
||||
case $BLOCKINGMODE in
|
||||
"IP-NODATA-AAAA"|"IP")
|
||||
if [[ $(dig "${domain}" +short | grep "${IPV4_ADDRESS}" -c) -ge 1 ]]; then
|
||||
blocked=true
|
||||
fi;;
|
||||
"NXDOMAIN")
|
||||
if [[ $(dig "${domain}" | grep "NXDOMAIN" -c) -ge 1 ]]; then
|
||||
blocked=true
|
||||
fi;;
|
||||
"NULL"|*)
|
||||
if [[ $(dig "${domain}" +short | grep "0.0.0.0" -c) -ge 1 ]]; then
|
||||
blocked=true
|
||||
fi;;
|
||||
esac
|
||||
|
||||
if [[ "${blocked}" == true ]]; then
|
||||
printf -v ip_addr "%s" "${PIHOLE_DNS_1%#*}"
|
||||
if [[ ${PIHOLE_DNS_1} != *"#"* ]]; then
|
||||
port=53
|
||||
else
|
||||
printf -v port "%s" "${PIHOLE_DNS_1#*#}"
|
||||
fi
|
||||
ip=$(dig "@${ip_addr}" -p "${port}" +short "${domain}" | tail -1)
|
||||
if [[ $(echo "${url}" | awk -F '://' '{print $1}') = "https" ]]; then
|
||||
port=443;
|
||||
else port=80
|
||||
fi
|
||||
bad_list=$(pihole -q -adlist "${domain}" | head -n1 | awk -F 'Match found in ' '{print $2}')
|
||||
echo -e "${OVER} ${CROSS} ${str} ${domain} is blocked by ${bad_list%:}. Using DNS on ${PIHOLE_DNS_1} to download ${url}";
|
||||
echo -ne " ${INFO} ${str} Pending..."
|
||||
cmd_ext="--resolve $domain:$port:$ip $cmd_ext"
|
||||
fi
|
||||
# shellcheck disable=SC2086
|
||||
httpCode=$(curl -s -L ${cmd_ext} ${heisenbergCompensator} -w "%{http_code}" -A "${agent}" "${url}" -o "${patternBuffer}" 2> /dev/null)
|
||||
|
||||
case $url in
|
||||
# Did we "download" a local file?
|
||||
"file"*)
|
||||
if [[ -s "${patternBuffer}" ]]; then
|
||||
echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true
|
||||
else
|
||||
echo -e "${OVER} ${CROSS} ${str} Not found / empty list"
|
||||
fi;;
|
||||
# Did we "download" a remote file?
|
||||
"http"*)
|
||||
*)
|
||||
# Determine "Status:" output based on HTTP response
|
||||
case "${httpCode}" in
|
||||
"200") echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true;;
|
||||
@@ -237,26 +530,21 @@ gravity_DownloadBlocklistFromUrl() {
|
||||
"504") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Gateway)";;
|
||||
"521") echo -e "${OVER} ${CROSS} ${str} Web Server Is Down (Cloudflare)";;
|
||||
"522") echo -e "${OVER} ${CROSS} ${str} Connection Timed Out (Cloudflare)";;
|
||||
* ) echo -e "${OVER} ${CROSS} ${str} ${httpCode}";;
|
||||
* ) echo -e "${OVER} ${CROSS} ${str} ${url} (${httpCode})";;
|
||||
esac;;
|
||||
# Did we "download" a local file?
|
||||
"file"*)
|
||||
if [[ -s "${patternBuffer}" ]]; then
|
||||
echo -e "${OVER} ${TICK} ${str} Retrieval successful"; success=true
|
||||
else
|
||||
echo -e "${OVER} ${CROSS} ${str} Not found / empty list"
|
||||
fi;;
|
||||
*) echo -e "${OVER} ${CROSS} ${str} ${url} ${httpCode}";;
|
||||
esac
|
||||
|
||||
# Determine if the blocklist was downloaded and saved correctly
|
||||
if [[ "${success}" == true ]]; then
|
||||
if [[ "${httpCode}" == "304" ]]; then
|
||||
: # Do not attempt to re-parse file
|
||||
# Add domains to database table file
|
||||
parseList "${adlistID}" "${saveLocation}" "${target}"
|
||||
# Check if $patternbuffer is a non-zero length file
|
||||
elif [[ -s "${patternBuffer}" ]]; then
|
||||
# Determine if blocklist is non-standard and parse as appropriate
|
||||
gravity_ParseFileIntoDomains "${patternBuffer}" "${saveLocation}"
|
||||
# Add domains to database table file
|
||||
parseList "${adlistID}" "${saveLocation}" "${target}"
|
||||
else
|
||||
# Fall back to previously cached list if $patternBuffer is empty
|
||||
echo -e " ${INFO} Received empty file: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
||||
@@ -265,6 +553,8 @@ gravity_DownloadBlocklistFromUrl() {
|
||||
# Determine if cached list has read permission
|
||||
if [[ -r "${saveLocation}" ]]; then
|
||||
echo -e " ${CROSS} List download failed: ${COL_LIGHT_GREEN}using previously cached list${COL_NC}"
|
||||
# Add domains to database table file
|
||||
parseList "${adlistID}" "${saveLocation}" "${target}"
|
||||
else
|
||||
echo -e " ${CROSS} List download failed: ${COL_LIGHT_RED}no cached list available${COL_NC}"
|
||||
fi
|
||||
@@ -273,24 +563,29 @@ gravity_DownloadBlocklistFromUrl() {
|
||||
|
||||
# Parse source files into domains format
|
||||
gravity_ParseFileIntoDomains() {
|
||||
local source="${1}" destination="${2}" firstLine abpFilter
|
||||
local source="${1}" destination="${2}" firstLine
|
||||
|
||||
# Determine if we are parsing a consolidated list
|
||||
if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then
|
||||
#if [[ "${source}" == "${piholeDir}/${matterAndLight}" ]]; then
|
||||
# Remove comments and print only the domain name
|
||||
# Most of the lists downloaded are already in hosts file format but the spacing/formating is not contigious
|
||||
# This helps with that and makes it easier to read
|
||||
# It also helps with debugging so each stage of the script can be researched more in depth
|
||||
#Awk -F splits on given IFS, we grab the right hand side (chops trailing #coments and /'s to grab the domain only.
|
||||
#Last awk command takes non-commented lines and if they have 2 fields, take the left field (the domain) and leave
|
||||
#+ the right (IP address), otherwise grab the single field.
|
||||
|
||||
< ${source} awk -F '#' '{print $1}' | \
|
||||
awk -F '/' '{print $1}' | \
|
||||
awk '($1 !~ /^#/) { if (NF>1) {print $2} else {print $1}}' | \
|
||||
sed -nr -e 's/\.{2,}/./g' -e '/\./p' > ${destination}
|
||||
# 1) Remove carriage returns
|
||||
# 2) Convert all characters to lowercase
|
||||
# 3) Remove comments (text starting with "#", include possible spaces before the hash sign)
|
||||
# 4) Remove lines containing "/"
|
||||
# 5) Remove leading tabs, spaces, etc.
|
||||
# 6) Delete lines not matching domain names
|
||||
< "${source}" tr -d '\r' | \
|
||||
tr '[:upper:]' '[:lower:]' | \
|
||||
sed 's/\s*#.*//g' | \
|
||||
sed -r '/(\/).*$/d' | \
|
||||
sed -r 's/^.*\s+//g' | \
|
||||
sed -r '/([^\.]+\.)+[^\.]{2,}/!d' > "${destination}"
|
||||
chmod 644 "${destination}"
|
||||
return 0
|
||||
fi
|
||||
#fi
|
||||
|
||||
# Individual file parsing: Keep comments, while parsing domains from each line
|
||||
# We keep comments to respect the list maintainer's licensing
|
||||
@@ -299,46 +594,7 @@ gravity_ParseFileIntoDomains() {
|
||||
# Determine how to parse individual source file formats
|
||||
if [[ "${firstLine,,}" =~ (adblock|ublock|^!) ]]; then
|
||||
# Compare $firstLine against lower case words found in Adblock lists
|
||||
echo -ne " ${INFO} Format: Adblock"
|
||||
|
||||
# Define symbols used as comments: [!
|
||||
# "||.*^" includes the "Example 2" domains we can extract
|
||||
# https://adblockplus.org/filter-cheatsheet
|
||||
abpFilter="/^(\\[|!)|^(\\|\\|.*\\^)/"
|
||||
|
||||
# Parse Adblock lists by extracting "Example 2" domains
|
||||
# Logic: Ignore lines which do not include comments or domain name anchor
|
||||
awk ''"${abpFilter}"' {
|
||||
# Remove valid adblock type options
|
||||
gsub(/\$?~?(important|third-party|popup|subdocument|websocket),?/, "", $0)
|
||||
# Remove starting domain name anchor "||" and ending seperator "^"
|
||||
gsub(/^(\|\|)|(\^)/, "", $0)
|
||||
# Remove invalid characters (*/,=$)
|
||||
if($0 ~ /[*\/,=\$]/) { $0="" }
|
||||
# Remove lines which are only IPv4 addresses
|
||||
if($0 ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) { $0="" }
|
||||
if($0) { print $0 }
|
||||
}' "${source}" > "${destination}"
|
||||
|
||||
# Determine if there are Adblock exception rules
|
||||
# https://adblockplus.org/filters
|
||||
if grep -q "^@@||" "${source}" &> /dev/null; then
|
||||
# Parse Adblock lists by extracting exception rules
|
||||
# Logic: Ignore lines which do not include exception format "@@||example.com^"
|
||||
awk -F "[|^]" '/^@@\|\|.*\^/ {
|
||||
# Remove valid adblock type options
|
||||
gsub(/\$?~?(third-party)/, "", $0)
|
||||
# Remove invalid characters (*/,=$)
|
||||
if($0 ~ /[*\/,=\$]/) { $0="" }
|
||||
if($3) { print $3 }
|
||||
}' "${source}" > "${destination}.exceptionsFile.tmp"
|
||||
|
||||
# Remove exceptions
|
||||
comm -23 "${destination}" <(sort "${destination}.exceptionsFile.tmp") > "${source}"
|
||||
mv "${source}" "${destination}"
|
||||
fi
|
||||
|
||||
echo -e "${OVER} ${TICK} Format: Adblock"
|
||||
echo -e " ${CROSS} Format: Adblock (list type not supported)"
|
||||
elif grep -q "^address=/" "${source}" &> /dev/null; then
|
||||
# Parse Dnsmasq format lists
|
||||
echo -e " ${CROSS} Format: Dnsmasq (list type not supported)"
|
||||
@@ -347,18 +603,25 @@ gravity_ParseFileIntoDomains() {
|
||||
# Scanning for "^IPv4$" is too slow with large (1M) lists on low-end hardware
|
||||
echo -ne " ${INFO} Format: URL"
|
||||
|
||||
awk '{
|
||||
# Remove URL protocol, optional "username:password@", and ":?/;"
|
||||
if ($0 ~ /[:?\/;]/) { gsub(/(^.*:\/\/(.*:.*@)?|[:?\/;].*)/, "", $0) }
|
||||
# Remove lines which are only IPv4 addresses
|
||||
if ($0 ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) { $0="" }
|
||||
if ($0) { print $0 }
|
||||
}' "${source}" 2> /dev/null > "${destination}"
|
||||
awk '
|
||||
# Remove URL scheme, optional "username:password@", and ":?/;"
|
||||
# The scheme must be matched carefully to avoid blocking the wrong URL
|
||||
# in cases like:
|
||||
# http://www.evil.com?http://www.good.com
|
||||
# See RFC 3986 section 3.1 for details.
|
||||
/[:?\/;]/ { gsub(/(^[a-zA-Z][a-zA-Z0-9+.-]*:\/\/(.*:.*@)?|[:?\/;].*)/, "", $0) }
|
||||
# Skip lines which are only IPv4 addresses
|
||||
/^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/ { next }
|
||||
# Print if nonempty
|
||||
length { print }
|
||||
' "${source}" 2> /dev/null > "${destination}"
|
||||
chmod 644 "${destination}"
|
||||
|
||||
echo -e "${OVER} ${TICK} Format: URL"
|
||||
else
|
||||
# Default: Keep hosts/domains file in same format as it was downloaded
|
||||
output=$( { mv "${source}" "${destination}"; } 2>&1 )
|
||||
chmod 644 "${destination}"
|
||||
|
||||
if [[ ! -e "${destination}" ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to move tmp file to ${piholeDir}
|
||||
@@ -368,95 +631,29 @@ gravity_ParseFileIntoDomains() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Create (unfiltered) "Matter and Light" consolidated list
|
||||
gravity_ConsolidateDownloadedBlocklists() {
|
||||
local str lastLine
|
||||
|
||||
str="Consolidating blocklists"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
|
||||
# Empty $matterAndLight if it already exists, otherwise, create it
|
||||
: > "${piholeDir}/${matterAndLight}"
|
||||
|
||||
# Loop through each *.domains file
|
||||
for i in "${activeDomains[@]}"; do
|
||||
# Determine if file has read permissions, as download might have failed
|
||||
if [[ -r "${i}" ]]; then
|
||||
# Remove windows CRs from file, convert list to lower case, and append into $matterAndLight
|
||||
tr -d '\r' < "${i}" | tr '[:upper:]' '[:lower:]' >> "${piholeDir}/${matterAndLight}"
|
||||
|
||||
# Ensure that the first line of a new list is on a new line
|
||||
lastLine=$(tail -1 "${piholeDir}/${matterAndLight}")
|
||||
if [[ "${#lastLine}" -gt 0 ]]; then
|
||||
echo "" >> "${piholeDir}/${matterAndLight}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
}
|
||||
|
||||
# Parse consolidated list into (filtered, unique) domains-only format
|
||||
gravity_SortAndFilterConsolidatedList() {
|
||||
local str num
|
||||
|
||||
str="Extracting domains from blocklists"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
|
||||
# Parse into hosts file
|
||||
gravity_ParseFileIntoDomains "${piholeDir}/${matterAndLight}" "${piholeDir}/${parsedMatter}"
|
||||
|
||||
# Format $parsedMatter line total as currency
|
||||
num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${parsedMatter}")")
|
||||
echo -e "${OVER} ${TICK} ${str}
|
||||
${INFO} Number of domains being pulled in by gravity: ${COL_BLUE}${num}${COL_NC}"
|
||||
|
||||
str="Removing duplicate domains"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
sort -u "${piholeDir}/${parsedMatter}" > "${piholeDir}/${preEventHorizon}"
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
|
||||
# Format $preEventHorizon line total as currency
|
||||
num=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")")
|
||||
echo -e " ${INFO} Number of unique domains trapped in the Event Horizon: ${COL_BLUE}${num}${COL_NC}"
|
||||
}
|
||||
|
||||
# Whitelist user-defined domains
|
||||
gravity_Whitelist() {
|
||||
local num str
|
||||
|
||||
if [[ ! -f "${whitelistFile}" ]]; then
|
||||
echo -e " ${INFO} Nothing to whitelist!"
|
||||
return 0
|
||||
fi
|
||||
|
||||
num=$(wc -l < "${whitelistFile}")
|
||||
str="Number of whitelisted domains: ${num}"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
|
||||
# Print everything from preEventHorizon into whitelistMatter EXCEPT domains in $whitelistFile
|
||||
comm -23 "${piholeDir}/${preEventHorizon}" <(sort "${whitelistFile}") > "${piholeDir}/${whitelistMatter}"
|
||||
|
||||
echo -e "${OVER} ${INFO} ${str}"
|
||||
}
|
||||
|
||||
# Output count of blacklisted domains and wildcards
|
||||
gravity_ShowBlockCount() {
|
||||
# Report number of entries in a table
|
||||
gravity_Table_Count() {
|
||||
local table="${1}"
|
||||
local str="${2}"
|
||||
local num
|
||||
|
||||
if [[ -f "${blacklistFile}" ]]; then
|
||||
num=$(printf "%'.0f" "$(wc -l < "${blacklistFile}")")
|
||||
echo -e " ${INFO} Number of blacklisted domains: ${num}"
|
||||
num="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(*) FROM ${table};")"
|
||||
if [[ "${table}" == "vw_gravity" ]]; then
|
||||
local unique
|
||||
unique="$(sqlite3 "${gravityDBfile}" "SELECT COUNT(DISTINCT domain) FROM ${table};")"
|
||||
echo -e " ${INFO} Number of ${str}: ${num} (${COL_BOLD}${unique} unique domains${COL_NC})"
|
||||
sqlite3 "${gravityDBfile}" "INSERT OR REPLACE INTO info (property,value) VALUES ('gravity_count',${unique});"
|
||||
else
|
||||
echo -e " ${INFO} Number of ${str}: ${num}"
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ -f "${wildcardFile}" ]]; then
|
||||
num=$(grep -c "^" "${wildcardFile}")
|
||||
# If IPv4 and IPv6 is used, divide total wildcard count by 2
|
||||
if [[ -n "${IPV4_ADDRESS}" ]] && [[ -n "${IPV6_ADDRESS}" ]];then
|
||||
num=$(( num/2 ))
|
||||
fi
|
||||
echo -e " ${INFO} Number of wildcard blocked domains: ${num}"
|
||||
fi
|
||||
# Output count of blacklisted domains and regex filters
|
||||
gravity_ShowCount() {
|
||||
gravity_Table_Count "vw_gravity" "gravity domains" ""
|
||||
gravity_Table_Count "vw_blacklist" "exact blacklisted domains"
|
||||
gravity_Table_Count "vw_regex_blacklist" "regex blacklist filters"
|
||||
gravity_Table_Count "vw_whitelist" "exact whitelisted domains"
|
||||
gravity_Table_Count "vw_regex_whitelist" "regex whitelist filters"
|
||||
}
|
||||
|
||||
# Parse list of domains into hosts format
|
||||
@@ -476,7 +673,7 @@ gravity_ParseDomainsIntoHosts() {
|
||||
}
|
||||
|
||||
# Create "localhost" entries into hosts format
|
||||
gravity_ParseLocalDomains() {
|
||||
gravity_generateLocalList() {
|
||||
local hostname
|
||||
|
||||
if [[ -s "/etc/hostname" ]]; then
|
||||
@@ -492,6 +689,7 @@ gravity_ParseLocalDomains() {
|
||||
|
||||
# Empty $localList if it already exists, otherwise, create it
|
||||
: > "${localList}"
|
||||
chmod 644 "${localList}"
|
||||
|
||||
gravity_ParseDomainsIntoHosts "${localList}.tmp" "${localList}"
|
||||
|
||||
@@ -501,44 +699,6 @@ gravity_ParseLocalDomains() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Create primary blacklist entries
|
||||
gravity_ParseBlacklistDomains() {
|
||||
local output status
|
||||
|
||||
# Empty $accretionDisc if it already exists, otherwise, create it
|
||||
: > "${piholeDir}/${accretionDisc}"
|
||||
|
||||
if [[ -f "${piholeDir}/${whitelistMatter}" ]]; then
|
||||
gravity_ParseDomainsIntoHosts "${piholeDir}/${whitelistMatter}" "${piholeDir}/${accretionDisc}"
|
||||
grep -c "^" "${piholeDir}/${whitelistMatter}" > "${piholeDir}/numBlocked" 2> /dev/null
|
||||
else
|
||||
# There was no whitelist file, so use preEventHorizon instead of whitelistMatter.
|
||||
gravity_ParseDomainsIntoHosts "${piholeDir}/${preEventHorizon}" "${piholeDir}/${accretionDisc}"
|
||||
grep -c "^" "${piholeDir}/${preEventHorizon}" > "${piholeDir}/numBlocked" 2> /dev/null
|
||||
fi
|
||||
|
||||
# Move the file over as /etc/pihole/gravity.list so dnsmasq can use it
|
||||
output=$( { mv "${piholeDir}/${accretionDisc}" "${adList}"; } 2>&1 )
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to move ${accretionDisc} from ${piholeDir}\\n ${output}"
|
||||
gravity_Cleanup "error"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create user-added blacklist entries
|
||||
gravity_ParseUserDomains() {
|
||||
if [[ ! -f "${blacklistFile}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
gravity_ParseDomainsIntoHosts "${blacklistFile}" "${blackList}.tmp"
|
||||
# Copy the file over as /etc/pihole/black.list so dnsmasq can use it
|
||||
mv "${blackList}.tmp" "${blackList}" 2> /dev/null || \
|
||||
echo -e "\\n ${CROSS} Unable to move ${blackList##*/}.tmp to ${piholeDir}"
|
||||
}
|
||||
|
||||
# Trap Ctrl-C
|
||||
gravity_Trap() {
|
||||
trap '{ echo -e "\\n\\n ${INFO} ${COL_LIGHT_RED}User-abort detected${COL_NC}"; gravity_Cleanup "error"; }' INT
|
||||
@@ -559,7 +719,7 @@ gravity_Cleanup() {
|
||||
# Ensure this function only runs when gravity_SetDownloadOptions() has completed
|
||||
if [[ "${gravity_Blackbody:-}" == true ]]; then
|
||||
# Remove any unused .domains files
|
||||
for file in ${piholeDir}/*.${domainsExtension}; do
|
||||
for file in "${piholeDir}"/*."${domainsExtension}"; do
|
||||
# If list is not in active array, then remove it
|
||||
if [[ ! "${activeDomains[*]}" == *"${file}"* ]]; then
|
||||
rm -f "${file}" 2> /dev/null || \
|
||||
@@ -570,6 +730,21 @@ gravity_Cleanup() {
|
||||
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
|
||||
if ${optimize_database} ; then
|
||||
str="Optimizing domains database"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
# Run VACUUM command on database to optimize it
|
||||
output=$( { sqlite3 "${gravityDBfile}" "VACUUM;"; } 2>&1 )
|
||||
status="$?"
|
||||
|
||||
if [[ "${status}" -ne 0 ]]; then
|
||||
echo -e "\\n ${CROSS} Unable to optimize gravity database ${gravityDBfile}\\n ${output}"
|
||||
error="error"
|
||||
else
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Only restart DNS service if offline
|
||||
if ! pidof ${resolver} &> /dev/null; then
|
||||
"${PIHOLE_COMMAND}" restartdns
|
||||
@@ -596,17 +771,28 @@ Options:
|
||||
for var in "$@"; do
|
||||
case "${var}" in
|
||||
"-f" | "--force" ) forceDelete=true;;
|
||||
"-o" | "--optimize" ) optimize_database=true;;
|
||||
"-r" | "--recreate" ) recreate_database=true;;
|
||||
"-h" | "--help" ) helpFunc;;
|
||||
"-sd" | "--skip-download" ) skipDownload=true;;
|
||||
"-b" | "--blacklist-only" ) listType="blacklist";;
|
||||
"-w" | "--whitelist-only" ) listType="whitelist";;
|
||||
"-wild" | "--wildcard-only" ) listType="wildcard"; dnsRestartType="restart";;
|
||||
esac
|
||||
done
|
||||
|
||||
# Trap Ctrl-C
|
||||
gravity_Trap
|
||||
|
||||
if [[ "${recreate_database:-}" == true ]]; then
|
||||
str="Restoring from migration backup"
|
||||
echo -ne "${INFO} ${str}..."
|
||||
rm "${gravityDBfile}"
|
||||
pushd "${piholeDir}" > /dev/null || exit
|
||||
cp migration_backup/* .
|
||||
popd > /dev/null || exit
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
fi
|
||||
|
||||
# Move possibly existing legacy files to the gravity database
|
||||
migrate_to_database
|
||||
|
||||
if [[ "${forceDelete:-}" == true ]]; then
|
||||
str="Deleting existing list cache"
|
||||
echo -ne "${INFO} ${str}..."
|
||||
@@ -615,51 +801,32 @@ if [[ "${forceDelete:-}" == true ]]; then
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
fi
|
||||
|
||||
# Determine which functions to run
|
||||
if [[ "${skipDownload}" == false ]]; then
|
||||
# Gravity needs to download blocklists
|
||||
gravity_CheckDNSResolutionAvailable
|
||||
gravity_GetBlocklistUrls
|
||||
gravity_SetDownloadOptions
|
||||
gravity_ConsolidateDownloadedBlocklists
|
||||
gravity_SortAndFilterConsolidatedList
|
||||
else
|
||||
# Gravity needs to modify Blacklist/Whitelist/Wildcards
|
||||
echo -e " ${INFO} Using cached Event Horizon list..."
|
||||
numberOf=$(printf "%'.0f" "$(wc -l < "${piholeDir}/${preEventHorizon}")")
|
||||
echo -e " ${INFO} ${COL_BLUE}${numberOf}${COL_NC} unique domains trapped in the Event Horizon"
|
||||
fi
|
||||
# Gravity downloads blocklists next
|
||||
gravity_CheckDNSResolutionAvailable
|
||||
gravity_DownloadBlocklists
|
||||
|
||||
# Perform when downloading blocklists, or modifying the whitelist
|
||||
if [[ "${skipDownload}" == false ]] || [[ "${listType}" == "whitelist" ]]; then
|
||||
gravity_Whitelist
|
||||
fi
|
||||
# Create local.list
|
||||
gravity_generateLocalList
|
||||
|
||||
gravity_ShowBlockCount
|
||||
# Migrate rest of the data from old to new database
|
||||
gravity_swap_databases
|
||||
|
||||
# Perform when downloading blocklists, or modifying the white/blacklist (not wildcards)
|
||||
if [[ "${skipDownload}" == false ]] || [[ "${listType}" == *"list" ]]; then
|
||||
str="Parsing domains into hosts format"
|
||||
echo -ne " ${INFO} ${str}..."
|
||||
# Update gravity timestamp
|
||||
update_gravity_timestamp
|
||||
|
||||
gravity_ParseUserDomains
|
||||
# Ensure proper permissions are set for the database
|
||||
chown pihole:pihole "${gravityDBfile}"
|
||||
chmod g+w "${piholeDir}" "${gravityDBfile}"
|
||||
|
||||
# Perform when downloading blocklists
|
||||
if [[ ! "${listType:-}" == "blacklist" ]]; then
|
||||
gravity_ParseLocalDomains
|
||||
gravity_ParseBlacklistDomains
|
||||
fi
|
||||
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
|
||||
gravity_Cleanup
|
||||
fi
|
||||
|
||||
echo ""
|
||||
# Compute numbers to be displayed
|
||||
gravity_ShowCount
|
||||
|
||||
# Determine if DNS has been restarted by this instance of gravity
|
||||
if [[ -z "${dnsWasOffline:-}" ]]; then
|
||||
# Use "force-reload" when restarting dnsmasq for everything but Wildcards
|
||||
"${PIHOLE_COMMAND}" restartdns "${dnsRestartType:-force-reload}"
|
||||
"${PIHOLE_COMMAND}" restartdns reload
|
||||
fi
|
||||
|
||||
gravity_Cleanup
|
||||
echo ""
|
||||
|
||||
"${PIHOLE_COMMAND}" status
|
||||
|
112
manpages/pihole-FTL.8
Normal file
112
manpages/pihole-FTL.8
Normal file
@@ -0,0 +1,112 @@
|
||||
.TH "Pihole-FTL" "8" "pihole-FTL" "Pi-hole" "June 2018"
|
||||
.SH "NAME"
|
||||
pihole-FTL - Pi-hole : The Faster-Than-Light (FTL) Engine
|
||||
.br
|
||||
.SH "SYNOPSIS"
|
||||
\fBservice pihole-FTL \fR(\fBstart\fR|\fBstop\fR|\fBrestart\fR)
|
||||
.br
|
||||
|
||||
\fBpihole-FTL debug\fR
|
||||
.br
|
||||
\fBpihole-FTL test\fR
|
||||
.br
|
||||
\fBpihole-FTL -v\fR
|
||||
.br
|
||||
\fBpihole-FTL -t\fR
|
||||
.br
|
||||
\fBpihole-FTL -b\fR
|
||||
.br
|
||||
\fBpihole-FTL -f\fR
|
||||
.br
|
||||
\fBpihole-FTL -h\fR
|
||||
.br
|
||||
\fBpihole-FTL dnsmasq-test\fR
|
||||
.br
|
||||
\fBpihole-FTL --\fR (\fBoptions\fR)
|
||||
.br
|
||||
|
||||
.SH "DESCRIPTION"
|
||||
Pi-hole : The Faster-Than-Light (FTL) Engine is a lightweight, purpose-built daemon used to provide statistics needed for the Pi-hole Web Interface, and its API can be easily integrated into your own projects. Although it is an optional component of the Pi-hole ecosystem, it will be installed by default to provide statistics. As the name implies, FTL does its work \fIvery\fR \fIquickly\fR!
|
||||
.br
|
||||
|
||||
Usage
|
||||
.br
|
||||
|
||||
\fBservice pihole-FTL start\fR
|
||||
.br
|
||||
Start the pihole-FTL daemon
|
||||
.br
|
||||
|
||||
\fBservice pihole-FTL stop\fR
|
||||
.br
|
||||
Stop the pihole-FTL daemon
|
||||
.br
|
||||
|
||||
\fBservice pihole-FTL restart\fR
|
||||
.br
|
||||
If the pihole-FTP daemon is running, stop and then start, otherwise start.
|
||||
.br
|
||||
|
||||
Command line arguments
|
||||
.br
|
||||
|
||||
\fBdebug\fR
|
||||
.br
|
||||
Don't go into daemon mode (stay in foreground) + more verbose logging
|
||||
.br
|
||||
|
||||
\fBtest\fR
|
||||
.br
|
||||
Start FTL and process everything, but shut down immediately afterwards
|
||||
.br
|
||||
|
||||
\fB-v, version\fR
|
||||
.br
|
||||
Don't start FTL, show only version
|
||||
.br
|
||||
|
||||
\fB-t, tag\fR
|
||||
.br
|
||||
Don't start FTL, show only git tag
|
||||
.br
|
||||
|
||||
\fB-b, branch\fR
|
||||
.br
|
||||
Don't start FTL, show only git branch FTL was compiled from
|
||||
.br
|
||||
|
||||
\fB-f, no-daemon\fR
|
||||
.br
|
||||
Don't go into background (daemon mode)
|
||||
.br
|
||||
|
||||
\fB-h, help\fR
|
||||
.br
|
||||
Don't start FTL, show help
|
||||
.br
|
||||
|
||||
\fBdnsmasq-test\fR
|
||||
.br
|
||||
Test resolver config file syntax
|
||||
.br
|
||||
|
||||
\fB--\fR (options)
|
||||
.br
|
||||
Pass options to internal dnsmasq resolver
|
||||
.br
|
||||
.SH "EXAMPLE"
|
||||
Command line arguments can be arbitrarily combined, e.g:
|
||||
.br
|
||||
|
||||
\fBpihole-FTL debug test\fR
|
||||
.br
|
||||
|
||||
Start ftl in foreground with more verbose logging, process everything and shutdown immediately
|
||||
.br
|
||||
.SH "SEE ALSO"
|
||||
\fBpihole\fR(8), \fBpihole-FTL.conf\fR(5)
|
||||
.br
|
||||
.SH "COLOPHON"
|
||||
|
||||
Get sucked into the latest news and community activity by entering Pi-hole's orbit. Information about Pi-hole, and the latest version of the software can be found at https://pi-hole.net
|
||||
.br
|
104
manpages/pihole-FTL.conf.5
Normal file
104
manpages/pihole-FTL.conf.5
Normal file
@@ -0,0 +1,104 @@
|
||||
.TH "pihole-FTL.conf" "5" "pihole-FTL.conf" "pihole-FTL.conf" "June 2018"
|
||||
.SH "NAME"
|
||||
|
||||
pihole-FTL.conf - FTL's config file
|
||||
.br
|
||||
.SH "DESCRIPTION"
|
||||
|
||||
/etc/pihole/pihole-FTL.conf will be read by \fBpihole-FTL(8)\fR on startup.
|
||||
.br
|
||||
|
||||
\fBSOCKET_LISTENING=localonly|all\fR
|
||||
.br
|
||||
Listen only for local socket connections or permit all connections
|
||||
.br
|
||||
|
||||
\fBQUERY_DISPLAY=yes|no\fR
|
||||
.br
|
||||
Display all queries? Set to no to hide query display
|
||||
.br
|
||||
|
||||
\fBAAAA_QUERY_ANALYSIS=yes|no\fR
|
||||
.br
|
||||
Allow FTL to analyze AAAA queries from pihole.log?
|
||||
.br
|
||||
|
||||
\fBRESOLVE_IPV6=yes|no\fR
|
||||
.br
|
||||
Should FTL try to resolve IPv6 addresses to host names?
|
||||
.br
|
||||
|
||||
\fBRESOLVE_IPV4=yes|no\fR
|
||||
.br
|
||||
Should FTL try to resolve IPv4 addresses to host names?
|
||||
.br
|
||||
|
||||
\fBMAXDBDAYS=365\fR
|
||||
.br
|
||||
How long should queries be stored in the database?
|
||||
.br
|
||||
Setting this to 0 disables the database
|
||||
.br
|
||||
|
||||
\fBDBINTERVAL=1.0\fR
|
||||
.br
|
||||
How often do we store queries in FTL's database [minutes]?
|
||||
.br
|
||||
|
||||
\fBDBFILE=/etc/pihole/pihole-FTL.db\fR
|
||||
.br
|
||||
Specify path and filename of FTL's SQLite long-term database.
|
||||
.br
|
||||
Setting this to DBFILE= disables the database altogether
|
||||
.br
|
||||
|
||||
\fBMAXLOGAGE=24.0\fR
|
||||
.br
|
||||
Up to how many hours of queries should be imported from the database and logs?
|
||||
.br
|
||||
Maximum is 744 (31 days)
|
||||
.br
|
||||
|
||||
\fBFTLPORT=4711\fR
|
||||
.br
|
||||
On which port should FTL be listening?
|
||||
.br
|
||||
|
||||
\fBPRIVACYLEVEL=0|1|2|3|4\fR
|
||||
.br
|
||||
Which privacy level is used?
|
||||
.br
|
||||
0 - show everything
|
||||
.br
|
||||
1 - hide domains
|
||||
.br
|
||||
2 - hide domains and clients
|
||||
.br
|
||||
3 - anonymous mode (hide everything)
|
||||
.br
|
||||
4 - disable all statistics
|
||||
.br
|
||||
|
||||
\fBIGNORE_LOCALHOST=no|yes\fR
|
||||
.br
|
||||
Should FTL ignore queries coming from the local machine?
|
||||
.br
|
||||
|
||||
\fBBLOCKINGMODE=IP|IP-AAAA-NODATA|NXDOMAIN|NULL\fR
|
||||
.br
|
||||
How should FTL reply to blocked queries?
|
||||
.br
|
||||
|
||||
For each setting, the option shown first is the default.
|
||||
.br
|
||||
.SH "SEE ALSO"
|
||||
|
||||
\fBpihole\fR(8), \fBpihole-FTL\fR(8)
|
||||
.br
|
||||
.SH "COLOPHON"
|
||||
|
||||
Pi-hole : The Faster-Than-Light (FTL) Engine is a lightweight, purpose-built daemon used to provide statistics needed for the Pi-hole Web Interface, and its API can be easily integrated into your own projects. Although it is an optional component of the Pi-hole ecosystem, it will be installed by default to provide statistics. As the name implies, FTL does its work \fIvery quickly\fR!
|
||||
.br
|
||||
|
||||
Get sucked into the latest news and community activity by entering Pi-hole's orbit. Information about Pi-hole, and the latest version of the software can be found at https://pi-hole.net
|
||||
.br
|
379
manpages/pihole.8
Normal file
379
manpages/pihole.8
Normal file
@@ -0,0 +1,379 @@
|
||||
.TH "Pi-hole" "8" "Pi-hole" "Pi-hole" "April 2020"
|
||||
.SH "NAME"
|
||||
|
||||
Pi-hole : A black-hole for internet advertisements
|
||||
.br
|
||||
.SH "SYNOPSIS"
|
||||
|
||||
\fBpihole\fR (\fB-w\fR|\fB-b\fR|\fB--wild\fR|\fB--regex\fR) [options] domain(s)
|
||||
.br
|
||||
\fBpihole -a\fR \fB-p\fR password
|
||||
.br
|
||||
\fBpihole -a\fR (\fB-c|-f|-k\fR)
|
||||
.br
|
||||
\fBpihole -a -e\fR email
|
||||
.br
|
||||
\fBpihole -a -i\fR interface
|
||||
.br
|
||||
\fBpihole -a -l\fR privacylevel
|
||||
.br
|
||||
\fBpihole -c\fR [-j|-r|-e]
|
||||
.br
|
||||
\fBpihole\fR \fB-d\fR [-a]
|
||||
.br
|
||||
\fBpihole -f
|
||||
.br
|
||||
pihole -r
|
||||
.br
|
||||
pihole -t
|
||||
.br
|
||||
pihole -g\fR
|
||||
.br
|
||||
\fBpihole\fR -\fBq\fR [options]
|
||||
.br
|
||||
\fBpihole\fR \fB-l\fR (\fBon|off|off noflush\fR)
|
||||
.br
|
||||
\fBpihole -up \fR[--check-only]
|
||||
.br
|
||||
\fBpihole -v\fR [-p|-a|-f] [-c|-l|-hash]
|
||||
.br
|
||||
\fBpihole uninstall
|
||||
.br
|
||||
pihole status
|
||||
.br
|
||||
pihole restartdns\fR [options]
|
||||
.br
|
||||
\fBpihole\fR (\fBenable\fR|\fBdisable\fR [time])
|
||||
.br
|
||||
\fBpihole\fR \fBcheckout\fR repo [branch]
|
||||
.br
|
||||
\fBpihole\fR \fBhelp\fR
|
||||
.br
|
||||
.SH "DESCRIPTION"
|
||||
|
||||
Available commands and options:
|
||||
.br
|
||||
|
||||
\fB-w, whitelist\fR [options] [<domain1> <domain2 ...>]
|
||||
.br
|
||||
Adds or removes specified domain or domains tho the Whitelist
|
||||
.br
|
||||
|
||||
\fB-b, blacklist\fR [options] [<domain1> <domain2 ...>]
|
||||
.br
|
||||
Adds or removes specified domain or domains to the blacklist
|
||||
.br
|
||||
|
||||
\fB--regex, regex\fR [options] [<regex1> <regex2 ...>]
|
||||
.br
|
||||
Add or removes specified regex filter to the regex blacklist
|
||||
.br
|
||||
|
||||
\fB--white-regex\fR [options] [<regex1> <regex2 ...>]
|
||||
.br
|
||||
Add or removes specified regex filter to the regex whitelist
|
||||
.br
|
||||
|
||||
\fB--wild, wildcard\fR [options] [<domain1> <domain2 ...>]
|
||||
.br
|
||||
Add or removes specified domain to the wildcard blacklist
|
||||
.br
|
||||
|
||||
\fB--white-wild\fR [options] [<domain1> <domain2 ...>]
|
||||
.br
|
||||
Add or removes specified domain to the wildcard whitelist
|
||||
.br
|
||||
|
||||
(Whitelist/Blacklist manipulation options):
|
||||
.br
|
||||
-d, --delmode Remove domain(s) from the list
|
||||
.br
|
||||
-nr, --noreload Update list without refreshing dnsmasq
|
||||
.br
|
||||
-q, --quiet Make output less verbose
|
||||
.br
|
||||
-l, --list Display all your listed domains
|
||||
.br
|
||||
--nuke Removes all entries in a list
|
||||
.br
|
||||
|
||||
\fB-d, debug\fR [-a]
|
||||
.br
|
||||
Start a debugging session
|
||||
.br
|
||||
|
||||
-a Enable automated debugging
|
||||
.br
|
||||
|
||||
\fB-f, flush\fR
|
||||
.br
|
||||
Flush the Pi-hole log
|
||||
.br
|
||||
|
||||
\fB-r, reconfigure\fR
|
||||
.br
|
||||
Reconfigure or Repair Pi-hole subsystems
|
||||
.br
|
||||
|
||||
\fB-t, tail\fR
|
||||
.br
|
||||
View the live output of the Pi-hole log
|
||||
.br
|
||||
|
||||
\fB-a, admin\fR [options]
|
||||
.br
|
||||
|
||||
(Admin options):
|
||||
.br
|
||||
-p, password Set Web Interface password
|
||||
.br
|
||||
-c, celsius Set Celsius as preferred temperature unit
|
||||
.br
|
||||
-f, fahrenheit Set Fahrenheit as preferred temperature unit
|
||||
.br
|
||||
-k, kelvin Set Kelvin as preferred temperature unit
|
||||
.br
|
||||
-e, email Set an administrative contact address for the
|
||||
Block Page
|
||||
.br
|
||||
-i, interface Specify dnsmasq's interface listening behavior
|
||||
.br
|
||||
-l, privacylevel <level> Set privacy level
|
||||
(0 = lowest, 4 = highest)
|
||||
.br
|
||||
|
||||
\fB-c, chronometer\fR [options]
|
||||
.br
|
||||
Calculates stats and displays to an LCD
|
||||
.br
|
||||
|
||||
(Chronometer Options):
|
||||
.br
|
||||
-j, --json Output stats as JSON formatted string
|
||||
.br
|
||||
-r, --refresh Set update frequency (in seconds)
|
||||
.br
|
||||
-e, --exit Output stats and exit witout refreshing
|
||||
.br
|
||||
|
||||
\fB-g, updateGravity\fR
|
||||
.br
|
||||
Update the list of ad-serving domains
|
||||
.br
|
||||
|
||||
\fB-q, query\fR [option]
|
||||
.br
|
||||
Query the adlists for a specified domain
|
||||
.br
|
||||
|
||||
(Query options):
|
||||
.br
|
||||
-adlist Print the name of the block list URL
|
||||
.br
|
||||
-exact Search the block lists for exact domain matches
|
||||
.br
|
||||
-all Return all query matches within a block list
|
||||
.br
|
||||
|
||||
\fB-h, --help, help\fR
|
||||
.br
|
||||
Show a help dialog
|
||||
.br
|
||||
|
||||
\fB-l, logging\fR [on|off|off noflush]
|
||||
.br
|
||||
Specify whether the Pi-hole log should be used
|
||||
.br
|
||||
|
||||
(Logging options):
|
||||
.br
|
||||
on Enable the Pi-hole log at /var/log/pihole.log
|
||||
.br
|
||||
off Disable and flush the Pi-hole log at
|
||||
/var/log/pihole.log
|
||||
.br
|
||||
off noflush Disable the Pi-hole log at /var/log/pihole.log
|
||||
.br
|
||||
|
||||
\fB-up, updatePihole\fR [--check-only]
|
||||
.br
|
||||
Update Pi-hole subsystems
|
||||
.br
|
||||
|
||||
--check-only Exit script before update is performed.
|
||||
.br
|
||||
|
||||
\fB-v, version\fR [repo] [options]
|
||||
.br
|
||||
Show installed versions of Pi-hole, Web Interface & FTL
|
||||
.br
|
||||
|
||||
.br
|
||||
(repo options):
|
||||
.br
|
||||
-p, --pihole Only retrieve info regarding Pi-hole repository
|
||||
.br
|
||||
-a, --admin Only retrieve info regarding AdminLTE
|
||||
repository
|
||||
.br
|
||||
-f, --ftl Only retrieve info regarding FTL repository
|
||||
.br
|
||||
(version options):
|
||||
.br
|
||||
-c, --current Return the current version
|
||||
.br
|
||||
-l, --latest Return the latest version
|
||||
.br
|
||||
--hash Return the Github hash from your local
|
||||
repositories
|
||||
.br
|
||||
|
||||
\fBuninstall\fR
|
||||
.br
|
||||
Uninstall Pi-hole from your system
|
||||
.br
|
||||
|
||||
\fBstatus\fR
|
||||
.br
|
||||
Display the running status of Pi-hole subsystems
|
||||
.br
|
||||
|
||||
\fBenable\fR
|
||||
.br
|
||||
Enable Pi-hole subsystems
|
||||
.br
|
||||
|
||||
\fBdisable\fR [time]
|
||||
.br
|
||||
Disable Pi-hole subsystems, optionally for a set duration
|
||||
.br
|
||||
|
||||
(time options):
|
||||
.br
|
||||
#s Disable Pi-hole functionality for # second(s)
|
||||
.br
|
||||
#m Disable Pi-hole functionality for # minute(s)
|
||||
.br
|
||||
|
||||
\fBrestartdns\fR [options]
|
||||
.br
|
||||
Full restart Pi-hole subsystems
|
||||
.br
|
||||
|
||||
(restart options):
|
||||
.br
|
||||
reload Updates the lists and flushes DNS cache
|
||||
.br
|
||||
reload-lists Updates the lists WITHOUT flushing the DNS cache
|
||||
.br
|
||||
|
||||
\fBcheckout\fR [repo] [branch]
|
||||
.br
|
||||
Switch Pi-hole subsystems to a different Github branch
|
||||
.br
|
||||
|
||||
(repo options):
|
||||
.br
|
||||
core Change the branch of Pi-hole's core subsystem
|
||||
.br
|
||||
web Change the branch of Admin Console subsystem
|
||||
.br
|
||||
ftl Change the branch of Pi-hole's FTL subsystem
|
||||
.br
|
||||
(branch options):
|
||||
.br
|
||||
master Update subsystems to the latest stable release
|
||||
.br
|
||||
dev Update subsystems to the latest development
|
||||
release
|
||||
.br
|
||||
branchname Update subsystems to the specified branchname
|
||||
.br
|
||||
.SH "EXAMPLE"
|
||||
|
||||
Some usage examples
|
||||
.br
|
||||
|
||||
Whitelist/blacklist manipulation
|
||||
.br
|
||||
|
||||
\fBpihole -w iloveads.example.com\fR
|
||||
.br
|
||||
Adds "iloveads.example.com" to whitelist
|
||||
.br
|
||||
|
||||
\fBpihole -b -d noads.example.com\fR
|
||||
.br
|
||||
Removes "noads.example.com" from blacklist
|
||||
.br
|
||||
|
||||
\fBpihole --wild example.com\fR
|
||||
.br
|
||||
Adds example.com as a wildcard - would block all subdomains of
|
||||
example.com, including example.com itself.
|
||||
.br
|
||||
|
||||
\fBpihole --regex "ad.*\\.example\\.com$"\fR
|
||||
.br
|
||||
Adds "ad.*\\.example\\.com$" to the regex blacklist.
|
||||
Would block all subdomains of example.com which start with "ad"
|
||||
.br
|
||||
|
||||
Changing the Web Interface password
|
||||
.br
|
||||
|
||||
\fBpihole -a -p ExamplePassword\fR
|
||||
.br
|
||||
Change the password to "ExamplePassword"
|
||||
.br
|
||||
|
||||
Updating lists from internet sources
|
||||
.br
|
||||
|
||||
\fBpihole -g\fR
|
||||
.br
|
||||
Update the list of ad-serving domains
|
||||
.br
|
||||
|
||||
Displaying version information
|
||||
.br
|
||||
|
||||
\fBpihole -v -a -c\fR
|
||||
.br
|
||||
Display the current version of AdminLTE
|
||||
.br
|
||||
|
||||
Temporarily disabling Pi-hole
|
||||
.br
|
||||
|
||||
\fBpihole disable 5m\fR
|
||||
.br
|
||||
Disable Pi-hole functionality for five minutes
|
||||
.br
|
||||
|
||||
Switching Pi-hole subsystem branches
|
||||
.br
|
||||
|
||||
\fBpihole checkout master\fR
|
||||
.br
|
||||
Switch to master branch
|
||||
.br
|
||||
|
||||
\fBpihole checkout core dev\fR
|
||||
.br
|
||||
Switch to core development branch
|
||||
.br
|
||||
|
||||
\fBpihole arpflush\fR
|
||||
.br
|
||||
Flush information stored in Pi-hole's network tables
|
||||
.br
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
\fBlighttpd\fR(8), \fBpihole-FTL\fR(8)
|
||||
.br
|
||||
.SH "COLOPHON"
|
||||
|
||||
Get sucked into the latest news and community activity by entering Pi-hole's orbit. Information about Pi-hole, and the latest version of the software can be found at https://pi-hole.net.
|
||||
.br
|
402
pihole
402
pihole
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Pi-hole: A black hole for Internet advertisements
|
||||
# (c) 2017 Pi-hole, LLC (https://pi-hole.net)
|
||||
@@ -10,40 +10,25 @@
|
||||
# Please see LICENSE file for your rights under this license.
|
||||
|
||||
readonly PI_HOLE_SCRIPT_DIR="/opt/pihole"
|
||||
readonly wildcardlist="/etc/dnsmasq.d/03-pihole-wildcard.conf"
|
||||
|
||||
# setupVars and PI_HOLE_BIN_DIR are not readonly here because in some functions (checkout),
|
||||
# they might get set again when the installer is sourced. This causes an
|
||||
# error due to modifying a readonly variable.
|
||||
setupVars="/etc/pihole/setupVars.conf"
|
||||
PI_HOLE_BIN_DIR="/usr/local/bin"
|
||||
|
||||
readonly colfile="${PI_HOLE_SCRIPT_DIR}/COL_TABLE"
|
||||
source "${colfile}"
|
||||
|
||||
resolver="pihole-FTL"
|
||||
|
||||
# Must be root to use this tool
|
||||
if [[ ! $EUID -eq 0 ]];then
|
||||
if [[ -x "$(command -v sudo)" ]]; then
|
||||
exec sudo bash "$0" "$@"
|
||||
exit $?
|
||||
else
|
||||
echo -e " ${CROSS} sudo is needed to run pihole commands. Please run this script as root or install sudo."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
webpageFunc() {
|
||||
source "${PI_HOLE_SCRIPT_DIR}/webpage.sh"
|
||||
main "$@"
|
||||
exit 0
|
||||
}
|
||||
|
||||
whitelistFunc() {
|
||||
"${PI_HOLE_SCRIPT_DIR}"/list.sh "$@"
|
||||
exit 0
|
||||
}
|
||||
|
||||
blacklistFunc() {
|
||||
"${PI_HOLE_SCRIPT_DIR}"/list.sh "$@"
|
||||
exit 0
|
||||
}
|
||||
|
||||
wildcardFunc() {
|
||||
listFunc() {
|
||||
"${PI_HOLE_SCRIPT_DIR}"/list.sh "$@"
|
||||
exit 0
|
||||
}
|
||||
@@ -70,8 +55,14 @@ flushFunc() {
|
||||
exit 0
|
||||
}
|
||||
|
||||
arpFunc() {
|
||||
"${PI_HOLE_SCRIPT_DIR}"/piholeARPTable.sh "$@"
|
||||
exit 0
|
||||
}
|
||||
|
||||
updatePiholeFunc() {
|
||||
"${PI_HOLE_SCRIPT_DIR}"/update.sh
|
||||
shift
|
||||
"${PI_HOLE_SCRIPT_DIR}"/update.sh "$@"
|
||||
exit 0
|
||||
}
|
||||
|
||||
@@ -82,233 +73,12 @@ reconfigurePiholeFunc() {
|
||||
|
||||
updateGravityFunc() {
|
||||
"${PI_HOLE_SCRIPT_DIR}"/gravity.sh "$@"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Scan an array of files for matching strings
|
||||
scanList(){
|
||||
# Escape full stops
|
||||
local domain="${1//./\\.}" lists="${2}" type="${3:-}"
|
||||
|
||||
# Prevent grep from printing file path
|
||||
cd "/etc/pihole" || exit 1
|
||||
|
||||
# Prevent grep -i matching slowly: http://bit.ly/2xFXtUX
|
||||
export LC_CTYPE=C
|
||||
|
||||
# /dev/null forces filename to be printed when only one list has been generated
|
||||
# shellcheck disable=SC2086
|
||||
case "${type}" in
|
||||
"exact" ) grep -i -E -l "(^|\\s)${domain}($|\\s|#)" ${lists} /dev/null;;
|
||||
"wc" ) grep -i -o -m 1 "/${domain}/" ${lists};;
|
||||
* ) grep -i "${domain}" ${lists} /dev/null;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Print each subdomain
|
||||
# e.g: foo.bar.baz.com = "foo.bar.baz.com bar.baz.com baz.com com"
|
||||
processWildcards() {
|
||||
IFS="." read -r -a array <<< "${1}"
|
||||
for (( i=${#array[@]}-1; i>=0; i-- )); do
|
||||
ar=""
|
||||
for (( j=${#array[@]}-1; j>${#array[@]}-i-2; j-- )); do
|
||||
if [[ $j == $((${#array[@]}-1)) ]]; then
|
||||
ar="${array[$j]}"
|
||||
else
|
||||
ar="${array[$j]}.${ar}"
|
||||
fi
|
||||
done
|
||||
echo "${ar}"
|
||||
done
|
||||
exit $?
|
||||
}
|
||||
|
||||
queryFunc() {
|
||||
shift
|
||||
local options="$*" adlist="" all="" exact="" blockpage="" matchType="match"
|
||||
|
||||
if [[ "${options}" == "-h" ]] || [[ "${options}" == "--help" ]]; then
|
||||
echo "Usage: pihole -q [option] <domain>
|
||||
Example: 'pihole -q -exact domain.com'
|
||||
Query the adlists for a specified domain
|
||||
|
||||
Options:
|
||||
-adlist Print the name of the block list URL
|
||||
-exact Search the block lists for exact domain matches
|
||||
-all Return all query matches within a block list
|
||||
-h, --help Show this help dialog"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ ! -e "/etc/pihole/adlists.list" ]]; then
|
||||
echo -e "${COL_LIGHT_RED}The file '/etc/pihole/adlists.list' was not found${COL_NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Handle valid options
|
||||
if [[ "${options}" == *"-bp"* ]]; then
|
||||
exact="exact"; blockpage=true
|
||||
else
|
||||
[[ "${options}" == *"-adlist"* ]] && adlist=true
|
||||
[[ "${options}" == *"-all"* ]] && all=true
|
||||
if [[ "${options}" == *"-exact"* ]]; then
|
||||
exact="exact"; matchType="exact ${matchType}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Strip valid options, leaving only the domain and invalid options
|
||||
# This allows users to place the options before or after the domain
|
||||
options=$(sed -E 's/ ?-(bp|adlists?|all|exact) ?//g' <<< "${options}")
|
||||
|
||||
# Handle remaining options
|
||||
# If $options contain non ASCII characters, convert to punycode
|
||||
case "${options}" in
|
||||
"" ) str="No domain specified";;
|
||||
*" "* ) str="Unknown query option specified";;
|
||||
*[![:ascii:]]* ) domainQuery=$(idn2 "${options}");;
|
||||
* ) domainQuery="${options}";;
|
||||
esac
|
||||
|
||||
if [[ -n "${str:-}" ]]; then
|
||||
echo -e "${str}${COL_NC}\\nTry 'pihole -q --help' for more information."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Scan Whitelist and Blacklist
|
||||
lists="whitelist.txt blacklist.txt"
|
||||
mapfile -t results <<< "$(scanList "${domainQuery}" "${lists}" "${exact}")"
|
||||
|
||||
if [[ -n "${results[*]}" ]]; then
|
||||
wbMatch=true
|
||||
|
||||
# Loop through each result in order to print unique file title once
|
||||
for result in "${results[@]}"; do
|
||||
fileName="${result%%.*}"
|
||||
|
||||
if [[ -n "${blockpage}" ]]; then
|
||||
echo "π ${result}"
|
||||
exit 0
|
||||
elif [[ -n "${exact}" ]]; then
|
||||
echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}"
|
||||
else
|
||||
# Only print filename title once per file
|
||||
if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then
|
||||
echo " ${matchType^} found in ${COL_BOLD}${fileName^}${COL_NC}"
|
||||
fileName_prev="${fileName}"
|
||||
fi
|
||||
echo " ${result#*:}"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Scan Wildcards
|
||||
if [[ -e "${wildcardlist}" ]]; then
|
||||
# Determine all subdomains, domain and TLDs
|
||||
mapfile -t wildcards <<< "$(processWildcards "${domainQuery}")"
|
||||
|
||||
for match in "${wildcards[@]}"; do
|
||||
# Search wildcard list for matches
|
||||
mapfile -t results <<< "$(scanList "${match}" "${wildcardlist}" "wc")"
|
||||
|
||||
if [[ -n "${results[*]}" ]]; then
|
||||
if [[ -z "${wcMatch:-}" ]] && [[ -z "${blockpage}" ]]; then
|
||||
wcMatch=true
|
||||
echo " ${matchType^} found in ${COL_BOLD}Wildcards${COL_NC}:"
|
||||
fi
|
||||
|
||||
case "${blockpage}" in
|
||||
true ) echo "π ${wildcardlist##*/}"; exit 0;;
|
||||
* ) echo " *.${match}";;
|
||||
esac
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Get version sorted *.domains filenames (without dir path)
|
||||
lists=("$(cd "/etc/pihole" || exit 0; printf "%s\\n" -- *.domains | sort -V)")
|
||||
|
||||
# Query blocklists for occurences of domain
|
||||
mapfile -t results <<< "$(scanList "${domainQuery}" "${lists[*]}" "${exact}")"
|
||||
|
||||
# Handle notices
|
||||
if [[ -z "${wbMatch:-}" ]] && [[ -z "${wcMatch:-}" ]] && [[ -z "${results[*]}" ]]; then
|
||||
echo -e " ${INFO} No ${exact/t/t }results found for ${COL_BOLD}${domainQuery}${COL_NC} found within block lists"
|
||||
exit 0
|
||||
elif [[ -z "${results[*]}" ]]; then
|
||||
# Result found in WL/BL/Wildcards
|
||||
exit 0
|
||||
elif [[ -z "${all}" ]] && [[ "${#results[*]}" -ge 100 ]]; then
|
||||
echo -e " ${INFO} Over 100 ${exact/t/t }results found for ${COL_BOLD}${domainQuery}${COL_NC}
|
||||
This can be overridden using the -all option"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Remove unwanted content from non-exact $results
|
||||
if [[ -z "${exact}" ]]; then
|
||||
# Delete lines starting with #
|
||||
# Remove comments after domain
|
||||
# Remove hosts format IP address
|
||||
mapfile -t results <<< "$(IFS=$'\n'; sed \
|
||||
-e "/:#/d" \
|
||||
-e "s/[ \\t]#.*//g" \
|
||||
-e "s/:.*[ \\t]/:/g" \
|
||||
<<< "${results[*]}")"
|
||||
|
||||
# Exit if result was in a comment
|
||||
[[ -z "${results[*]}" ]] && exit 0
|
||||
fi
|
||||
|
||||
# Get adlist file content as array
|
||||
if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then
|
||||
for adlistUrl in $(< "/etc/pihole/adlists.list"); do
|
||||
if [[ "${adlistUrl:0:4}" =~ (http|www.) ]]; then
|
||||
adlists+=("${adlistUrl}")
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Print "Exact matches for" title
|
||||
if [[ -n "${exact}" ]] && [[ -z "${blockpage}" ]]; then
|
||||
plural=""; [[ "${#results[*]}" -gt 1 ]] && plural="es"
|
||||
echo " ${matchType^}${plural} for ${COL_BOLD}${domainQuery}${COL_NC} found in:"
|
||||
fi
|
||||
|
||||
for result in "${results[@]}"; do
|
||||
fileName="${result/:*/}"
|
||||
|
||||
# Determine *.domains URL using filename's number
|
||||
if [[ -n "${adlist}" ]] || [[ -n "${blockpage}" ]]; then
|
||||
fileNum="${fileName/list./}"; fileNum="${fileNum%%.*}"
|
||||
fileName="${adlists[$fileNum]}"
|
||||
|
||||
# Discrepency occurs when adlists has been modified, but Gravity has not been run
|
||||
if [[ -z "${fileName}" ]]; then
|
||||
fileName="${COL_LIGHT_RED}(no associated adlists URL found)${COL_NC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${blockpage}" ]]; then
|
||||
echo "${fileNum} ${fileName}"
|
||||
elif [[ -n "${exact}" ]]; then
|
||||
echo " ${fileName}"
|
||||
else
|
||||
if [[ ! "${fileName}" == "${fileName_prev:-}" ]]; then
|
||||
count=""
|
||||
echo " ${matchType^} found in ${COL_BOLD}${fileName}${COL_NC}:"
|
||||
fileName_prev="${fileName}"
|
||||
fi
|
||||
: $((count++))
|
||||
|
||||
# Print matching domain if $max_count has not been reached
|
||||
[[ -z "${all}" ]] && max_count="50"
|
||||
if [[ -z "${all}" ]] && [[ "${count}" -ge "${max_count}" ]]; then
|
||||
[[ "${count}" -gt "${max_count}" ]] && continue
|
||||
echo " ${COL_GRAY}Over ${count} results found, skipping rest of file${COL_NC}"
|
||||
else
|
||||
echo " ${result#*:}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
"${PI_HOLE_SCRIPT_DIR}"/query.sh "$@"
|
||||
exit 0
|
||||
}
|
||||
|
||||
@@ -332,24 +102,28 @@ versionFunc() {
|
||||
|
||||
restartDNS() {
|
||||
local svcOption svc str output status
|
||||
svcOption="${1:-}"
|
||||
svcOption="${1:-restart}"
|
||||
|
||||
# Determine if we should reload or restart restart
|
||||
if [[ "${svcOption}" =~ "reload" ]]; then
|
||||
# Using SIGHUP will NOT re-read any *.conf files
|
||||
# Determine if we should reload or restart
|
||||
if [[ "${svcOption}" =~ "reload-lists" ]]; then
|
||||
# Reloading of the lists has been requested
|
||||
# Note: This will NOT re-read any *.conf files
|
||||
# Note 2: We cannot use killall here as it does
|
||||
# not know about real-time signals
|
||||
svc="kill -SIGRTMIN $(pidof ${resolver})"
|
||||
str="Reloading DNS lists"
|
||||
elif [[ "${svcOption}" =~ "reload" ]]; then
|
||||
# Reloading of the DNS cache has been requested
|
||||
# Note: This will NOT re-read any *.conf files
|
||||
svc="killall -s SIGHUP ${resolver}"
|
||||
str="Flushing DNS cache"
|
||||
else
|
||||
# Get PID of resolver to determine if it needs to start or restart
|
||||
if pidof pihole-FTL &> /dev/null; then
|
||||
svcOption="restart"
|
||||
else
|
||||
svcOption="start"
|
||||
fi
|
||||
svc="service ${resolver} ${svcOption}"
|
||||
# A full restart has been requested
|
||||
svc="service ${resolver} restart"
|
||||
str="Restarting DNS server"
|
||||
fi
|
||||
|
||||
# Print output to Terminal, but not to Web Admin
|
||||
str="${svcOption^}ing DNS service"
|
||||
[[ -t 1 ]] && echo -ne " ${INFO} ${str}..."
|
||||
|
||||
output=$( { ${svc}; } 2>&1 )
|
||||
@@ -357,9 +131,11 @@ restartDNS() {
|
||||
|
||||
if [[ "${status}" -eq 0 ]]; then
|
||||
[[ -t 1 ]] && echo -e "${OVER} ${TICK} ${str}"
|
||||
return 0
|
||||
else
|
||||
[[ ! -t 1 ]] && local OVER=""
|
||||
echo -e "${OVER} ${CROSS} ${output}"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -376,10 +152,9 @@ Time:
|
||||
|
||||
elif [[ "${1}" == "0" ]]; then
|
||||
# Disable Pi-hole
|
||||
sed -i 's/^addn-hosts=\/etc\/pihole\/gravity.list/#addn-hosts=\/etc\/pihole\/gravity.list/' /etc/dnsmasq.d/01-pihole.conf
|
||||
sed -i 's/^addn-hosts=\/etc\/pihole\/black.list/#addn-hosts=\/etc\/pihole\/black.list/' /etc/dnsmasq.d/01-pihole.conf
|
||||
if [[ -e "$wildcardlist" ]]; then
|
||||
mv "$wildcardlist" "/etc/pihole/wildcard.list"
|
||||
if grep -cq "BLOCKING_ENABLED=false" "${setupVars}"; then
|
||||
echo -e " ${INFO} Blocking already disabled, nothing to do"
|
||||
exit 0
|
||||
fi
|
||||
if [[ $# > 1 ]]; then
|
||||
local error=false
|
||||
@@ -389,7 +164,7 @@ Time:
|
||||
local str="Disabling blocking for ${tt} seconds"
|
||||
echo -e " ${INFO} ${str}..."
|
||||
local str="Blocking will be re-enabled in ${tt} seconds"
|
||||
nohup bash -c "sleep ${tt}; pihole enable" </dev/null &>/dev/null &
|
||||
nohup bash -c "sleep ${tt}; ${PI_HOLE_BIN_DIR}/pihole enable" </dev/null &>/dev/null &
|
||||
else
|
||||
local error=true
|
||||
fi
|
||||
@@ -400,7 +175,7 @@ Time:
|
||||
echo -e " ${INFO} ${str}..."
|
||||
local str="Blocking will be re-enabled in ${tt} minutes"
|
||||
tt=$((${tt}*60))
|
||||
nohup bash -c "sleep ${tt}; pihole enable" </dev/null &>/dev/null &
|
||||
nohup bash -c "sleep ${tt}; ${PI_HOLE_BIN_DIR}/pihole enable" </dev/null &>/dev/null &
|
||||
else
|
||||
local error=true
|
||||
fi
|
||||
@@ -417,19 +192,23 @@ Time:
|
||||
fi
|
||||
|
||||
local str="Pi-hole Disabled"
|
||||
sed -i "/BLOCKING_ENABLED=/d" "${setupVars}"
|
||||
echo "BLOCKING_ENABLED=false" >> "${setupVars}"
|
||||
fi
|
||||
else
|
||||
# Enable Pi-hole
|
||||
if grep -cq "BLOCKING_ENABLED=true" "${setupVars}"; then
|
||||
echo -e " ${INFO} Blocking already enabled, nothing to do"
|
||||
exit 0
|
||||
fi
|
||||
echo -e " ${INFO} Enabling blocking"
|
||||
local str="Pi-hole Enabled"
|
||||
|
||||
sed -i 's/^#addn-hosts/addn-hosts/' /etc/dnsmasq.d/01-pihole.conf
|
||||
if [[ -e "/etc/pihole/wildcard.list" ]]; then
|
||||
mv "/etc/pihole/wildcard.list" "$wildcardlist"
|
||||
fi
|
||||
sed -i "/BLOCKING_ENABLED=/d" "${setupVars}"
|
||||
echo "BLOCKING_ENABLED=true" >> "${setupVars}"
|
||||
fi
|
||||
|
||||
restartDNS
|
||||
restartDNS reload
|
||||
|
||||
echo -e "${OVER} ${TICK} ${str}"
|
||||
}
|
||||
@@ -452,7 +231,7 @@ Options:
|
||||
sed -i 's/^QUERY_LOGGING=true/QUERY_LOGGING=false/' /etc/pihole/setupVars.conf
|
||||
if [[ "${2}" != "noflush" ]]; then
|
||||
# Flush logs
|
||||
pihole -f
|
||||
"${PI_HOLE_BIN_DIR}"/pihole -f
|
||||
fi
|
||||
echo -e " ${INFO} Disabling logging..."
|
||||
local str="Logging has been disabled!"
|
||||
@@ -472,8 +251,6 @@ Options:
|
||||
}
|
||||
|
||||
statusFunc() {
|
||||
local addnConfigs
|
||||
|
||||
# Determine if service is running on port 53 (Cr: https://superuser.com/a/806331)
|
||||
if (echo > /dev/tcp/127.0.0.1/53) >/dev/null 2>&1; then
|
||||
if [[ "${1}" != "web" ]]; then
|
||||
@@ -487,16 +264,14 @@ statusFunc() {
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Determine if Pi-hole's addn-hosts configs are commented out
|
||||
addnConfigs=$(grep -i "addn-hosts=/" /etc/dnsmasq.d/01-pihole.conf)
|
||||
|
||||
if [[ "${addnConfigs}" =~ "#" ]]; then
|
||||
# Determine if Pi-hole's blocking is enabled
|
||||
if grep -q "BLOCKING_ENABLED=false" /etc/pihole/setupVars.conf; then
|
||||
# A config is commented out
|
||||
case "${1}" in
|
||||
"web") echo 0;;
|
||||
*) echo -e " ${CROSS} Pi-hole blocking is Disabled";;
|
||||
esac
|
||||
elif [[ -n "${addnConfigs}" ]]; then
|
||||
elif grep -q "BLOCKING_ENABLED=true" /etc/pihole/setupVars.conf; then
|
||||
# Configs are set
|
||||
case "${1}" in
|
||||
"web") echo 1;;
|
||||
@@ -506,15 +281,21 @@ statusFunc() {
|
||||
# No configs were found
|
||||
case "${1}" in
|
||||
"web") echo 99;;
|
||||
*) echo -e " ${INFO} No hosts file linked to dnsmasq, adding it in enabled state";;
|
||||
*) echo -e " ${INFO} Pi-hole blocking will be enabled";;
|
||||
esac
|
||||
# Add addn-host= to dnsmasq
|
||||
echo "addn-hosts=/etc/pihole/gravity.list" >> /etc/dnsmasq.d/01-pihole.conf
|
||||
restartDNS
|
||||
# Enable blocking
|
||||
"${PI_HOLE_BIN_DIR}"/pihole enable
|
||||
fi
|
||||
}
|
||||
|
||||
tailFunc() {
|
||||
# Warn user if Pi-hole's logging is disabled
|
||||
local logging_enabled=$(grep -c "^log-queries" /etc/dnsmasq.d/01-pihole.conf)
|
||||
if [[ "${logging_enabled}" == "0" ]]; then
|
||||
# No "log-queries" lines are found.
|
||||
# Commented out lines (such as "#log-queries") are ignored
|
||||
echo " ${CROSS} Warning: Query logging is disabled"
|
||||
fi
|
||||
echo -e " ${INFO} Press Ctrl-C to exit"
|
||||
|
||||
# Retrieve IPv4/6 addresses
|
||||
@@ -525,8 +306,8 @@ tailFunc() {
|
||||
# Colour A/AAAA/DHCP strings as white
|
||||
# Colour everything else as gray
|
||||
tail -f /var/log/pihole.log | sed -E \
|
||||
-e "s,($(date +'%b %d ')| dnsmasq[.*[0-9]]),,g" \
|
||||
-e "s,(.*(gravity.list|black.list| config ).* is (${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \
|
||||
-e "s,($(date +'%b %d ')| dnsmasq\[[0-9]*\]),,g" \
|
||||
-e "s,(.*(blacklisted |gravity blocked ).* is (0.0.0.0|::|NXDOMAIN|${IPV4_ADDRESS%/*}|${IPV6_ADDRESS:-NULL}).*),${COL_RED}&${COL_NC}," \
|
||||
-e "s,.*(query\\[A|DHCP).*,${COL_NC}&${COL_NC}," \
|
||||
-e "s,.*,${COL_GRAY}&${COL_NC},"
|
||||
exit 0
|
||||
@@ -540,12 +321,13 @@ Switch Pi-hole subsystems to a different Github branch
|
||||
|
||||
Repositories:
|
||||
core [branch] Change the branch of Pi-hole's core subsystem
|
||||
web [branch] Change the branch of Admin Console subsystem
|
||||
web [branch] Change the branch of Web Interface subsystem
|
||||
ftl [branch] Change the branch of Pi-hole's FTL subsystem
|
||||
|
||||
Branches:
|
||||
master Update subsystems to the latest stable release
|
||||
dev Update subsystems to the latest development release"
|
||||
dev Update subsystems to the latest development release
|
||||
branchname Update subsystems to the specified branchname"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
@@ -598,7 +380,10 @@ Add '-h' after specific commands for more information on usage
|
||||
Whitelist/Blacklist Options:
|
||||
-w, whitelist Whitelist domain(s)
|
||||
-b, blacklist Blacklist domain(s)
|
||||
-wild, wildcard Blacklist domain(s), and all its subdomains
|
||||
--regex, regex Regex blacklist domains(s)
|
||||
--white-regex Regex whitelist domains(s)
|
||||
--wild, wildcard Wildcard blacklist domain(s)
|
||||
--white-wild Wildcard whitelist domain(s)
|
||||
Add '-h' for more info on whitelist/blacklist usage
|
||||
|
||||
Debugging Options:
|
||||
@@ -609,8 +394,8 @@ Debugging Options:
|
||||
-t, tail View the live output of the Pi-hole log
|
||||
|
||||
Options:
|
||||
-a, admin Admin Console options
|
||||
Add '-h' for more info on admin console usage
|
||||
-a, admin Web interface options
|
||||
Add '-h' for more info on Web Interface usage
|
||||
-c, chronometer Calculates stats and displays to an LCD
|
||||
Add '-h' for more info on chronometer usage
|
||||
-g, updateGravity Update the list of ad-serving domains
|
||||
@@ -620,16 +405,20 @@ Options:
|
||||
-q, query Query the adlists for a specified domain
|
||||
Add '-h' for more info on query usage
|
||||
-up, updatePihole Update Pi-hole subsystems
|
||||
-v, version Show installed versions of Pi-hole, Admin Console & FTL
|
||||
Add '--check-only' to exit script before update is performed.
|
||||
-v, version Show installed versions of Pi-hole, Web Interface & FTL
|
||||
Add '-h' for more info on version usage
|
||||
uninstall Uninstall Pi-hole from your system
|
||||
status Display the running status of Pi-hole subsystems
|
||||
enable Enable Pi-hole subsystems
|
||||
disable Disable Pi-hole subsystems
|
||||
Add '-h' for more info on disable usage
|
||||
restartdns Restart Pi-hole subsystems
|
||||
restartdns Full restart Pi-hole subsystems
|
||||
Add 'reload' to update the lists and flush the cache without restarting the DNS server
|
||||
Add 'reload-lists' to only update the lists WITHOUT flushing the cache or restarting the DNS server
|
||||
checkout Switch Pi-hole subsystems to a different Github branch
|
||||
Add '-h' for more info on checkout usage";
|
||||
Add '-h' for more info on checkout usage
|
||||
arpflush Flush information stored in Pi-hole's network tables";
|
||||
exit 0
|
||||
}
|
||||
|
||||
@@ -637,14 +426,32 @@ if [[ $# = 0 ]]; then
|
||||
helpFunc
|
||||
fi
|
||||
|
||||
case "${1}" in
|
||||
"-h" | "help" | "--help" ) helpFunc;;
|
||||
esac
|
||||
|
||||
# Must be root to use this tool
|
||||
if [[ ! $EUID -eq 0 ]];then
|
||||
if [[ -x "$(command -v sudo)" ]]; then
|
||||
exec sudo bash "$0" "$@"
|
||||
exit $?
|
||||
else
|
||||
echo -e " ${CROSS} sudo is needed to run pihole commands. Please run this script as root or install sudo."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Handle redirecting to specific functions based on arguments
|
||||
case "${1}" in
|
||||
"-w" | "whitelist" ) whitelistFunc "$@";;
|
||||
"-b" | "blacklist" ) blacklistFunc "$@";;
|
||||
"-wild" | "wildcard" ) wildcardFunc "$@";;
|
||||
"-w" | "whitelist" ) listFunc "$@";;
|
||||
"-b" | "blacklist" ) listFunc "$@";;
|
||||
"--wild" | "wildcard" ) listFunc "$@";;
|
||||
"--regex" | "regex" ) listFunc "$@";;
|
||||
"--white-regex" | "white-regex" ) listFunc "$@";;
|
||||
"--white-wild" | "white-wild" ) listFunc "$@";;
|
||||
"-d" | "debug" ) debugFunc "$@";;
|
||||
"-f" | "flush" ) flushFunc "$@";;
|
||||
"-up" | "updatePihole" ) updatePiholeFunc;;
|
||||
"-up" | "updatePihole" ) updatePiholeFunc "$@";;
|
||||
"-r" | "reconfigure" ) reconfigurePiholeFunc;;
|
||||
"-g" | "updateGravity" ) updateGravityFunc "$@";;
|
||||
"-c" | "chronometer" ) chronometerFunc "$@";;
|
||||
@@ -662,5 +469,6 @@ case "${1}" in
|
||||
"checkout" ) piholeCheckoutFunc "$@";;
|
||||
"tricorder" ) tricorderFunc;;
|
||||
"updatechecker" ) updateCheckFunc "$@";;
|
||||
"arpflush" ) arpFunc "$@";;
|
||||
* ) helpFunc;;
|
||||
esac
|
||||
|
@@ -1,5 +1,6 @@
|
||||
docker-compose
|
||||
pytest
|
||||
pytest-xdist
|
||||
pytest-cov
|
||||
testinfra
|
||||
docker-compose==1.23.2
|
||||
pytest==4.3.0
|
||||
pytest-xdist==1.26.1
|
||||
pytest-cov==2.6.1
|
||||
testinfra==1.19.0
|
||||
tox==3.7.0
|
||||
|
6
setup.py
Normal file
6
setup.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
setup_requires=['pytest-runner'],
|
||||
tests_require=['pytest'],
|
||||
)
|
25
test/README.md
Normal file
25
test/README.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Recommended way to run tests
|
||||
|
||||
Make sure you have Docker and Python w/pip package manager.
|
||||
|
||||
From command line all you need to do is:
|
||||
|
||||
- `pip install tox`
|
||||
- `tox`
|
||||
|
||||
Tox handles setting up a virtual environment for python dependancies, installing dependancies, building the docker images used by tests, and finally running tests. It's an easy way to have travis-ci like build behavior locally.
|
||||
|
||||
## Alternative py.test method of running tests
|
||||
|
||||
You're responsible for setting up your virtual env and dependancies in this situation.
|
||||
|
||||
```
|
||||
py.test -vv -n auto -m "build_stage"
|
||||
py.test -vv -n auto -m "not build_stage"
|
||||
```
|
||||
|
||||
The build_stage tests have to run first to create the docker images, followed by the actual tests which utilize said images. Unless you're changing your dockerfiles you shouldn't have to run the build_stage every time - but it's a good idea to rebuild at least once a day in case the base Docker images or packages change.
|
||||
|
||||
# How do I debug python?
|
||||
|
||||
Highly recommended: Setup PyCharm on a **Docker enabled** machine. Having a python debugger like PyCharm changes your life if you've never used it :)
|
113
test/conftest.py
113
test/conftest.py
@@ -1,14 +1,30 @@
|
||||
import pytest
|
||||
import testinfra
|
||||
from textwrap import dedent
|
||||
|
||||
check_output = testinfra.get_backend(
|
||||
"local://"
|
||||
).get_module("Command").check_output
|
||||
|
||||
SETUPVARS = {
|
||||
'PIHOLE_INTERFACE': 'eth99',
|
||||
'IPV4_ADDRESS': '1.1.1.1',
|
||||
'IPV6_ADDRESS': 'FE80::240:D0FF:FE48:4672',
|
||||
'PIHOLE_DNS_1': '4.2.2.1',
|
||||
'PIHOLE_DNS_2': '4.2.2.2'
|
||||
}
|
||||
|
||||
tick_box = "[\x1b[1;32m\xe2\x9c\x93\x1b[0m]".decode("utf-8")
|
||||
cross_box = "[\x1b[1;31m\xe2\x9c\x97\x1b[0m]".decode("utf-8")
|
||||
info_box = "[i]".decode("utf-8")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def Pihole(Docker):
|
||||
''' used to contain some script stubbing, now pretty much an alias.
|
||||
Also provides bash as the default run function shell '''
|
||||
'''
|
||||
used to contain some script stubbing, now pretty much an alias.
|
||||
Also provides bash as the default run function shell
|
||||
'''
|
||||
def run_bash(self, command, *args, **kwargs):
|
||||
cmd = self.get_command(command, *args)
|
||||
if self.user is not None:
|
||||
@@ -22,12 +38,18 @@ def Pihole(Docker):
|
||||
return out
|
||||
|
||||
funcType = type(Docker.run)
|
||||
Docker.run = funcType(run_bash, Docker, testinfra.backend.docker.DockerBackend)
|
||||
Docker.run = funcType(run_bash,
|
||||
Docker,
|
||||
testinfra.backend.docker.DockerBackend)
|
||||
return Docker
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def Docker(request, args, image, cmd):
|
||||
''' combine our fixtures into a docker run command and setup finalizer to cleanup '''
|
||||
'''
|
||||
combine our fixtures into a docker run command and setup finalizer to
|
||||
cleanup
|
||||
'''
|
||||
assert 'docker' in check_output('id'), "Are you in the docker group?"
|
||||
docker_run = "docker run {} {} {}".format(args, image, cmd)
|
||||
docker_id = check_output(docker_run)
|
||||
@@ -40,22 +62,95 @@ def Docker(request, args, image, cmd):
|
||||
docker_container.id = docker_id
|
||||
return docker_container
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def args(request):
|
||||
''' -t became required when tput began being used '''
|
||||
'''
|
||||
-t became required when tput began being used
|
||||
'''
|
||||
return '-t -d'
|
||||
|
||||
@pytest.fixture(params=['debian', 'centos'])
|
||||
|
||||
@pytest.fixture(params=['debian', 'centos', 'fedora'])
|
||||
def tag(request):
|
||||
''' consumed by image to make the test matrix '''
|
||||
'''
|
||||
consumed by image to make the test matrix
|
||||
'''
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def image(request, tag):
|
||||
''' built by test_000_build_containers.py '''
|
||||
'''
|
||||
built by test_000_build_containers.py
|
||||
'''
|
||||
return 'pytest_pihole:{}'.format(tag)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def cmd(request):
|
||||
''' default to doing nothing by tailing null, but don't exit '''
|
||||
'''
|
||||
default to doing nothing by tailing null, but don't exit
|
||||
'''
|
||||
return 'tail -f /dev/null'
|
||||
|
||||
|
||||
# Helper functions
|
||||
def mock_command(script, args, container):
|
||||
'''
|
||||
Allows for setup of commands we don't really want to have to run for real
|
||||
in unit tests
|
||||
'''
|
||||
full_script_path = '/usr/local/bin/{}'.format(script)
|
||||
mock_script = dedent('''\
|
||||
#!/bin/bash -e
|
||||
echo "\$0 \$@" >> /var/log/{script}
|
||||
case "\$1" in'''.format(script=script))
|
||||
for k, v in args.iteritems():
|
||||
case = dedent('''
|
||||
{arg})
|
||||
echo {res}
|
||||
exit {retcode}
|
||||
;;'''.format(arg=k, res=v[0], retcode=v[1]))
|
||||
mock_script += case
|
||||
mock_script += dedent('''
|
||||
esac''')
|
||||
container.run('''
|
||||
cat <<EOF> {script}\n{content}\nEOF
|
||||
chmod +x {script}
|
||||
rm -f /var/log/{scriptlog}'''.format(script=full_script_path,
|
||||
content=mock_script,
|
||||
scriptlog=script))
|
||||
|
||||
|
||||
def mock_command_2(script, args, container):
|
||||
'''
|
||||
Allows for setup of commands we don't really want to have to run for real
|
||||
in unit tests
|
||||
'''
|
||||
full_script_path = '/usr/local/bin/{}'.format(script)
|
||||
mock_script = dedent('''\
|
||||
#!/bin/bash -e
|
||||
echo "\$0 \$@" >> /var/log/{script}
|
||||
case "\$1 \$2" in'''.format(script=script))
|
||||
for k, v in args.iteritems():
|
||||
case = dedent('''
|
||||
\"{arg}\")
|
||||
echo \"{res}\"
|
||||
exit {retcode}
|
||||
;;'''.format(arg=k, res=v[0], retcode=v[1]))
|
||||
mock_script += case
|
||||
mock_script += dedent('''
|
||||
esac''')
|
||||
container.run('''
|
||||
cat <<EOF> {script}\n{content}\nEOF
|
||||
chmod +x {script}
|
||||
rm -f /var/log/{scriptlog}'''.format(script=full_script_path,
|
||||
content=mock_script,
|
||||
scriptlog=script))
|
||||
|
||||
|
||||
def run_script(Pihole, script):
|
||||
result = Pihole.run(script)
|
||||
assert result.rc == 0
|
||||
return result
|
||||
|
16
test/fedora.Dockerfile
Normal file
16
test/fedora.Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
FROM fedora:30
|
||||
|
||||
ENV GITDIR /etc/.pihole
|
||||
ENV SCRIPTDIR /opt/pihole
|
||||
|
||||
RUN mkdir -p $GITDIR $SCRIPTDIR /etc/pihole
|
||||
ADD . $GITDIR
|
||||
RUN cp $GITDIR/advanced/Scripts/*.sh $GITDIR/gravity.sh $GITDIR/pihole $GITDIR/automated\ install/*.sh $SCRIPTDIR/
|
||||
ENV PATH /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$SCRIPTDIR
|
||||
|
||||
RUN true && \
|
||||
chmod +x $SCRIPTDIR/*
|
||||
|
||||
ENV PH_TEST true
|
||||
|
||||
#sed '/# Start the installer/Q' /opt/pihole/basic-install.sh > /opt/pihole/stub_basic-install.sh && \
|
@@ -6,10 +6,15 @@ run_local = testinfra.get_backend(
|
||||
"local://"
|
||||
).get_module("Command").run
|
||||
|
||||
|
||||
@pytest.mark.parametrize("image,tag", [
|
||||
( 'test/debian.Dockerfile', 'pytest_pihole:debian' ),
|
||||
( 'test/centos.Dockerfile', 'pytest_pihole:centos' ),
|
||||
('test/debian.Dockerfile', 'pytest_pihole:debian'),
|
||||
('test/centos.Dockerfile', 'pytest_pihole:centos'),
|
||||
('test/fedora.Dockerfile', 'pytest_pihole:fedora'),
|
||||
])
|
||||
# mark as 'build_stage' so we can ensure images are build first when tests
|
||||
# are executed in parallel. (not required when tests are executed serially)
|
||||
@pytest.mark.build_stage
|
||||
def test_build_pihole_image(image, tag):
|
||||
build_cmd = run_local('docker build -f {} -t {} .'.format(image, tag))
|
||||
if build_cmd.rc != 0:
|
||||
|
@@ -1,24 +1,40 @@
|
||||
import pytest
|
||||
from textwrap import dedent
|
||||
import re
|
||||
from conftest import (
|
||||
SETUPVARS,
|
||||
tick_box,
|
||||
info_box,
|
||||
cross_box,
|
||||
mock_command,
|
||||
mock_command_2,
|
||||
run_script
|
||||
)
|
||||
|
||||
SETUPVARS = {
|
||||
'PIHOLE_INTERFACE' : 'eth99',
|
||||
'IPV4_ADDRESS' : '1.1.1.1',
|
||||
'IPV6_ADDRESS' : 'FE80::240:D0FF:FE48:4672',
|
||||
'PIHOLE_DNS_1' : '4.2.2.1',
|
||||
'PIHOLE_DNS_2' : '4.2.2.2'
|
||||
}
|
||||
|
||||
tick_box="[\x1b[1;32m\xe2\x9c\x93\x1b[0m]".decode("utf-8")
|
||||
cross_box="[\x1b[1;31m\xe2\x9c\x97\x1b[0m]".decode("utf-8")
|
||||
info_box="[i]".decode("utf-8")
|
||||
def test_supported_operating_system(Pihole):
|
||||
'''
|
||||
confirm installer exists on unsupported distribution
|
||||
'''
|
||||
# break supported package managers to emulate an unsupported distribution
|
||||
Pihole.run('rm -rf /usr/bin/apt-get')
|
||||
Pihole.run('rm -rf /usr/bin/rpm')
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
''')
|
||||
expected_stdout = cross_box + ' OS distribution not supported'
|
||||
assert expected_stdout in distro_check.stdout
|
||||
# assert distro_check.rc == 1
|
||||
|
||||
|
||||
def test_setupVars_are_sourced_to_global_scope(Pihole):
|
||||
''' currently update_dialogs sources setupVars with a dot,
|
||||
'''
|
||||
currently update_dialogs sources setupVars with a dot,
|
||||
then various other functions use the variables.
|
||||
This confirms the sourced variables are in scope between functions '''
|
||||
This confirms the sourced variables are in scope between functions
|
||||
'''
|
||||
setup_var_file = 'cat <<EOF> /etc/pihole/setupVars.conf\n'
|
||||
for k,v in SETUPVARS.iteritems():
|
||||
for k, v in SETUPVARS.iteritems():
|
||||
setup_var_file += "{}={}\n".format(k, v)
|
||||
setup_var_file += "EOF\n"
|
||||
Pihole.run(setup_var_file)
|
||||
@@ -43,13 +59,17 @@ def test_setupVars_are_sourced_to_global_scope(Pihole):
|
||||
|
||||
output = run_script(Pihole, script).stdout
|
||||
|
||||
for k,v in SETUPVARS.iteritems():
|
||||
for k, v in SETUPVARS.iteritems():
|
||||
assert "{}={}".format(k, v) in output
|
||||
|
||||
|
||||
def test_setupVars_saved_to_file(Pihole):
|
||||
''' confirm saved settings are written to a file for future updates to re-use '''
|
||||
set_setup_vars = '\n' # dedent works better with this and padding matching script below
|
||||
for k,v in SETUPVARS.iteritems():
|
||||
'''
|
||||
confirm saved settings are written to a file for future updates to re-use
|
||||
'''
|
||||
# dedent works better with this and padding matching script below
|
||||
set_setup_vars = '\n'
|
||||
for k, v in SETUPVARS.iteritems():
|
||||
set_setup_vars += " {}={}\n".format(k, v)
|
||||
Pihole.run(set_setup_vars).stdout
|
||||
|
||||
@@ -61,163 +81,109 @@ def test_setupVars_saved_to_file(Pihole):
|
||||
{}
|
||||
mkdir -p /etc/dnsmasq.d
|
||||
version_check_dnsmasq
|
||||
echo "" > /etc/pihole/pihole-FTL.conf
|
||||
finalExports
|
||||
cat /etc/pihole/setupVars.conf
|
||||
'''.format(set_setup_vars))
|
||||
|
||||
output = run_script(Pihole, script).stdout
|
||||
|
||||
for k,v in SETUPVARS.iteritems():
|
||||
for k, v in SETUPVARS.iteritems():
|
||||
assert "{}={}".format(k, v) in output
|
||||
|
||||
def test_configureFirewall_firewalld_running_no_errors(Pihole):
|
||||
''' confirms firewalld rules are applied when firewallD is running '''
|
||||
# firewallD returns 'running' as status
|
||||
mock_command('firewall-cmd', {'*':('running', 0)}, Pihole)
|
||||
# Whiptail dialog returns Ok for user prompt
|
||||
mock_command('whiptail', {'*':('', 0)}, Pihole)
|
||||
configureFirewall = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
configureFirewall
|
||||
''')
|
||||
expected_stdout = 'Configuring FirewallD for httpd and pihole-FTL'
|
||||
assert expected_stdout in configureFirewall.stdout
|
||||
firewall_calls = Pihole.run('cat /var/log/firewall-cmd').stdout
|
||||
assert 'firewall-cmd --state' in firewall_calls
|
||||
assert 'firewall-cmd --permanent --add-service=http --add-service=dns' in firewall_calls
|
||||
assert 'firewall-cmd --reload' in firewall_calls
|
||||
|
||||
def test_configureFirewall_firewalld_disabled_no_errors(Pihole):
|
||||
''' confirms firewalld rules are not applied when firewallD is not running '''
|
||||
# firewallD returns non-running status
|
||||
mock_command('firewall-cmd', {'*':('not running', '1')}, Pihole)
|
||||
configureFirewall = Pihole.run('''
|
||||
def test_selinux_not_detected(Pihole):
|
||||
'''
|
||||
confirms installer continues when SELinux configuration file does not exist
|
||||
'''
|
||||
check_selinux = Pihole.run('''
|
||||
rm -f /etc/selinux/config
|
||||
source /opt/pihole/basic-install.sh
|
||||
configureFirewall
|
||||
checkSelinux
|
||||
''')
|
||||
expected_stdout = 'No active firewall detected.. skipping firewall configuration'
|
||||
assert expected_stdout in configureFirewall.stdout
|
||||
expected_stdout = info_box + ' SELinux not detected'
|
||||
assert expected_stdout in check_selinux.stdout
|
||||
assert check_selinux.rc == 0
|
||||
|
||||
def test_configureFirewall_firewalld_enabled_declined_no_errors(Pihole):
|
||||
''' confirms firewalld rules are not applied when firewallD is running, user declines ruleset '''
|
||||
# firewallD returns running status
|
||||
mock_command('firewall-cmd', {'*':('running', 0)}, Pihole)
|
||||
# Whiptail dialog returns Cancel for user prompt
|
||||
mock_command('whiptail', {'*':('', 1)}, Pihole)
|
||||
configureFirewall = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
configureFirewall
|
||||
''')
|
||||
expected_stdout = 'Not installing firewall rulesets.'
|
||||
assert expected_stdout in configureFirewall.stdout
|
||||
|
||||
def test_configureFirewall_no_firewall(Pihole):
|
||||
''' confirms firewall skipped no daemon is running '''
|
||||
configureFirewall = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
configureFirewall
|
||||
''')
|
||||
expected_stdout = 'No active firewall detected'
|
||||
assert expected_stdout in configureFirewall.stdout
|
||||
|
||||
def test_configureFirewall_IPTables_enabled_declined_no_errors(Pihole):
|
||||
''' confirms IPTables rules are not applied when IPTables is running, user declines ruleset '''
|
||||
# iptables command exists
|
||||
mock_command('iptables', {'*':('', '0')}, Pihole)
|
||||
# modinfo returns always true (ip_tables module check)
|
||||
mock_command('modinfo', {'*':('', '0')}, Pihole)
|
||||
# Whiptail dialog returns Cancel for user prompt
|
||||
mock_command('whiptail', {'*':('', '1')}, Pihole)
|
||||
configureFirewall = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
configureFirewall
|
||||
''')
|
||||
expected_stdout = 'Not installing firewall rulesets.'
|
||||
assert expected_stdout in configureFirewall.stdout
|
||||
|
||||
def test_configureFirewall_IPTables_enabled_rules_exist_no_errors(Pihole):
|
||||
''' confirms IPTables rules are not applied when IPTables is running and rules exist '''
|
||||
# iptables command exists and returns 0 on calls (should return 0 on iptables -C)
|
||||
mock_command('iptables', {'-S':('-P INPUT DENY', '0')}, Pihole)
|
||||
# modinfo returns always true (ip_tables module check)
|
||||
mock_command('modinfo', {'*':('', '0')}, Pihole)
|
||||
# Whiptail dialog returns Cancel for user prompt
|
||||
mock_command('whiptail', {'*':('', '0')}, Pihole)
|
||||
configureFirewall = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
configureFirewall
|
||||
''')
|
||||
expected_stdout = 'Installing new IPTables firewall rulesets'
|
||||
assert expected_stdout in configureFirewall.stdout
|
||||
firewall_calls = Pihole.run('cat /var/log/iptables').stdout
|
||||
assert 'iptables -I INPUT 1 -p tcp -m tcp --dport 80 -j ACCEPT' not in firewall_calls
|
||||
assert 'iptables -I INPUT 1 -p tcp -m tcp --dport 53 -j ACCEPT' not in firewall_calls
|
||||
assert 'iptables -I INPUT 1 -p udp -m udp --dport 53 -j ACCEPT' not in firewall_calls
|
||||
|
||||
def test_configureFirewall_IPTables_enabled_not_exist_no_errors(Pihole):
|
||||
''' confirms IPTables rules are applied when IPTables is running and rules do not exist '''
|
||||
# iptables command and returns 0 on calls (should return 1 on iptables -C)
|
||||
mock_command('iptables', {'-S':('-P INPUT DENY', '0'), '-C':('', 1), '-I':('', 0)}, Pihole)
|
||||
# modinfo returns always true (ip_tables module check)
|
||||
mock_command('modinfo', {'*':('', '0')}, Pihole)
|
||||
# Whiptail dialog returns Cancel for user prompt
|
||||
mock_command('whiptail', {'*':('', '0')}, Pihole)
|
||||
configureFirewall = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
configureFirewall
|
||||
''')
|
||||
expected_stdout = 'Installing new IPTables firewall rulesets'
|
||||
assert expected_stdout in configureFirewall.stdout
|
||||
firewall_calls = Pihole.run('cat /var/log/iptables').stdout
|
||||
assert 'iptables -I INPUT 1 -p tcp -m tcp --dport 80 -j ACCEPT' in firewall_calls
|
||||
assert 'iptables -I INPUT 1 -p tcp -m tcp --dport 53 -j ACCEPT' in firewall_calls
|
||||
assert 'iptables -I INPUT 1 -p udp -m udp --dport 53 -j ACCEPT' in firewall_calls
|
||||
|
||||
def test_installPiholeWeb_fresh_install_no_errors(Pihole):
|
||||
''' confirms all web page assets from Core repo are installed on a fresh build '''
|
||||
'''
|
||||
confirms all web page assets from Core repo are installed on a fresh build
|
||||
'''
|
||||
installWeb = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
installPiholeWeb
|
||||
''')
|
||||
assert info_box + ' Installing blocking page...' in installWeb.stdout
|
||||
assert tick_box + ' Creating directory for blocking page, and copying files' in installWeb.stdout
|
||||
assert cross_box + ' Backing up index.lighttpd.html' in installWeb.stdout
|
||||
assert 'No default index.lighttpd.html file found... not backing up' in installWeb.stdout
|
||||
assert tick_box + ' Installing sudoer file' in installWeb.stdout
|
||||
expected_stdout = info_box + ' Installing blocking page...'
|
||||
assert expected_stdout in installWeb.stdout
|
||||
expected_stdout = tick_box + (' Creating directory for blocking page, '
|
||||
'and copying files')
|
||||
assert expected_stdout in installWeb.stdout
|
||||
expected_stdout = info_box + ' Backing up index.lighttpd.html'
|
||||
assert expected_stdout in installWeb.stdout
|
||||
expected_stdout = ('No default index.lighttpd.html file found... '
|
||||
'not backing up')
|
||||
assert expected_stdout in installWeb.stdout
|
||||
expected_stdout = tick_box + ' Installing sudoer file'
|
||||
assert expected_stdout in installWeb.stdout
|
||||
web_directory = Pihole.run('ls -r /var/www/html/pihole').stdout
|
||||
assert 'index.php' in web_directory
|
||||
assert 'blockingpage.css' in web_directory
|
||||
|
||||
|
||||
def test_update_package_cache_success_no_errors(Pihole):
|
||||
''' confirms package cache was updated without any errors'''
|
||||
'''
|
||||
confirms package cache was updated without any errors
|
||||
'''
|
||||
updateCache = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
update_package_cache
|
||||
''')
|
||||
assert tick_box + ' Update local cache of available packages' in updateCache.stdout
|
||||
assert 'Error: Unable to update package cache.' not in updateCache.stdout
|
||||
expected_stdout = tick_box + ' Update local cache of available packages'
|
||||
assert expected_stdout in updateCache.stdout
|
||||
assert 'error' not in updateCache.stdout.lower()
|
||||
|
||||
|
||||
def test_update_package_cache_failure_no_errors(Pihole):
|
||||
''' confirms package cache was not updated'''
|
||||
mock_command('apt-get', {'update':('', '1')}, Pihole)
|
||||
'''
|
||||
confirms package cache was not updated
|
||||
'''
|
||||
mock_command('apt-get', {'update': ('', '1')}, Pihole)
|
||||
updateCache = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
update_package_cache
|
||||
''')
|
||||
assert cross_box + ' Update local cache of available packages' in updateCache.stdout
|
||||
expected_stdout = cross_box + ' Update local cache of available packages'
|
||||
assert expected_stdout in updateCache.stdout
|
||||
assert 'Error: Unable to update package cache.' in updateCache.stdout
|
||||
|
||||
|
||||
def test_FTL_detect_aarch64_no_errors(Pihole):
|
||||
''' confirms only aarch64 package is downloaded for FTL engine '''
|
||||
'''
|
||||
confirms only aarch64 package is downloaded for FTL engine
|
||||
'''
|
||||
# mock uname to return aarch64 platform
|
||||
mock_command('uname', {'-m':('aarch64', '0')}, Pihole)
|
||||
mock_command('uname', {'-m': ('aarch64', '0')}, Pihole)
|
||||
# mock ldd to respond with aarch64 shared library
|
||||
mock_command('ldd', {'/bin/ls':('/lib/ld-linux-aarch64.so.1', '0')}, Pihole)
|
||||
mock_command(
|
||||
'ldd',
|
||||
{
|
||||
'/bin/ls': (
|
||||
'/lib/ld-linux-aarch64.so.1',
|
||||
'0'
|
||||
)
|
||||
},
|
||||
Pihole
|
||||
)
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
FTLdetect
|
||||
create_pihole_user
|
||||
funcOutput=$(get_binary_name)
|
||||
binary="pihole-FTL${funcOutput##*pihole-FTL}"
|
||||
theRest="${funcOutput%pihole-FTL*}"
|
||||
FTLdetect "${binary}" "${theRest}"
|
||||
''')
|
||||
expected_stdout = info_box + ' FTL Checks...'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
@@ -226,32 +192,47 @@ def test_FTL_detect_aarch64_no_errors(Pihole):
|
||||
expected_stdout = tick_box + ' Downloading and Installing FTL'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
|
||||
def test_FTL_detect_armv6l_no_errors(Pihole):
|
||||
''' confirms only armv6l package is downloaded for FTL engine '''
|
||||
'''
|
||||
confirms only armv6l package is downloaded for FTL engine
|
||||
'''
|
||||
# mock uname to return armv6l platform
|
||||
mock_command('uname', {'-m':('armv6l', '0')}, Pihole)
|
||||
mock_command('uname', {'-m': ('armv6l', '0')}, Pihole)
|
||||
# mock ldd to respond with aarch64 shared library
|
||||
mock_command('ldd', {'/bin/ls':('/lib/ld-linux-armhf.so.3', '0')}, Pihole)
|
||||
mock_command('ldd', {'/bin/ls': ('/lib/ld-linux-armhf.so.3', '0')}, Pihole)
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
FTLdetect
|
||||
create_pihole_user
|
||||
funcOutput=$(get_binary_name)
|
||||
binary="pihole-FTL${funcOutput##*pihole-FTL}"
|
||||
theRest="${funcOutput%pihole-FTL*}"
|
||||
FTLdetect "${binary}" "${theRest}"
|
||||
''')
|
||||
expected_stdout = info_box + ' FTL Checks...'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
expected_stdout = tick_box + ' Detected ARM-hf architecture (armv6 or lower)'
|
||||
expected_stdout = tick_box + (' Detected ARM-hf architecture '
|
||||
'(armv6 or lower)')
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
expected_stdout = tick_box + ' Downloading and Installing FTL'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
|
||||
def test_FTL_detect_armv7l_no_errors(Pihole):
|
||||
''' confirms only armv7l package is downloaded for FTL engine '''
|
||||
'''
|
||||
confirms only armv7l package is downloaded for FTL engine
|
||||
'''
|
||||
# mock uname to return armv7l platform
|
||||
mock_command('uname', {'-m':('armv7l', '0')}, Pihole)
|
||||
mock_command('uname', {'-m': ('armv7l', '0')}, Pihole)
|
||||
# mock ldd to respond with aarch64 shared library
|
||||
mock_command('ldd', {'/bin/ls':('/lib/ld-linux-armhf.so.3', '0')}, Pihole)
|
||||
mock_command('ldd', {'/bin/ls': ('/lib/ld-linux-armhf.so.3', '0')}, Pihole)
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
FTLdetect
|
||||
create_pihole_user
|
||||
funcOutput=$(get_binary_name)
|
||||
binary="pihole-FTL${funcOutput##*pihole-FTL}"
|
||||
theRest="${funcOutput%pihole-FTL*}"
|
||||
FTLdetect "${binary}" "${theRest}"
|
||||
''')
|
||||
expected_stdout = info_box + ' FTL Checks...'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
@@ -260,11 +241,18 @@ def test_FTL_detect_armv7l_no_errors(Pihole):
|
||||
expected_stdout = tick_box + ' Downloading and Installing FTL'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
|
||||
def test_FTL_detect_x86_64_no_errors(Pihole):
|
||||
''' confirms only x86_64 package is downloaded for FTL engine '''
|
||||
'''
|
||||
confirms only x86_64 package is downloaded for FTL engine
|
||||
'''
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
FTLdetect
|
||||
create_pihole_user
|
||||
funcOutput=$(get_binary_name)
|
||||
binary="pihole-FTL${funcOutput##*pihole-FTL}"
|
||||
theRest="${funcOutput%pihole-FTL*}"
|
||||
FTLdetect "${binary}" "${theRest}"
|
||||
''')
|
||||
expected_stdout = info_box + ' FTL Checks...'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
@@ -273,55 +261,65 @@ def test_FTL_detect_x86_64_no_errors(Pihole):
|
||||
expected_stdout = tick_box + ' Downloading and Installing FTL'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
|
||||
def test_FTL_detect_unknown_no_errors(Pihole):
|
||||
''' confirms only generic package is downloaded for FTL engine '''
|
||||
# mock uname to return generic platform
|
||||
mock_command('uname', {'-m':('mips', '0')}, Pihole)
|
||||
mock_command('uname', {'-m': ('mips', '0')}, Pihole)
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
FTLdetect
|
||||
create_pihole_user
|
||||
funcOutput=$(get_binary_name)
|
||||
binary="pihole-FTL${funcOutput##*pihole-FTL}"
|
||||
theRest="${funcOutput%pihole-FTL*}"
|
||||
FTLdetect "${binary}" "${theRest}"
|
||||
''')
|
||||
expected_stdout = 'Not able to detect architecture (unknown: mips)'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
|
||||
def test_FTL_download_aarch64_no_errors(Pihole):
|
||||
''' confirms only aarch64 package is downloaded for FTL engine '''
|
||||
# mock uname to return generic platform
|
||||
'''
|
||||
confirms only aarch64 package is downloaded for FTL engine
|
||||
'''
|
||||
# mock whiptail answers and ensure installer dependencies
|
||||
mock_command('whiptail', {'*': ('', '0')}, Pihole)
|
||||
Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
install_dependent_packages ${INSTALLER_DEPS[@]}
|
||||
''')
|
||||
download_binary = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
FTLinstall pihole-FTL-aarch64-linux-gnu
|
||||
create_pihole_user
|
||||
FTLinstall "pihole-FTL-aarch64-linux-gnu"
|
||||
''')
|
||||
expected_stdout = tick_box + ' Downloading and Installing FTL'
|
||||
assert expected_stdout in download_binary.stdout
|
||||
error = 'Error: Download of binary from Github failed'
|
||||
assert error not in download_binary.stdout
|
||||
error = 'Error: URL not found'
|
||||
assert error not in download_binary.stdout
|
||||
assert 'error' not in download_binary.stdout.lower()
|
||||
|
||||
def test_FTL_download_unknown_fails_no_errors(Pihole):
|
||||
''' confirms unknown binary is not downloaded for FTL engine '''
|
||||
# mock uname to return generic platform
|
||||
download_binary = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
FTLinstall pihole-FTL-mips
|
||||
''')
|
||||
expected_stdout = cross_box + ' Downloading and Installing FTL'
|
||||
assert expected_stdout in download_binary.stdout
|
||||
error = 'Error: URL not found'
|
||||
assert error in download_binary.stdout
|
||||
|
||||
def test_FTL_binary_installed_and_responsive_no_errors(Pihole):
|
||||
''' confirms FTL binary is copied and functional in installed location '''
|
||||
'''
|
||||
confirms FTL binary is copied and functional in installed location
|
||||
'''
|
||||
installed_binary = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
FTLdetect
|
||||
create_pihole_user
|
||||
funcOutput=$(get_binary_name)
|
||||
binary="pihole-FTL${funcOutput##*pihole-FTL}"
|
||||
theRest="${funcOutput%pihole-FTL*}"
|
||||
FTLdetect "${binary}" "${theRest}"
|
||||
pihole-FTL version
|
||||
''')
|
||||
expected_stdout = 'v'
|
||||
assert expected_stdout in installed_binary.stdout
|
||||
|
||||
|
||||
# def test_FTL_support_files_installed(Pihole):
|
||||
# ''' confirms FTL support files are installed '''
|
||||
# '''
|
||||
# confirms FTL support files are installed
|
||||
# '''
|
||||
# support_files = Pihole.run('''
|
||||
# source /opt/pihole/basic-install.sh
|
||||
# FTLdetect
|
||||
@@ -334,21 +332,46 @@ def test_FTL_binary_installed_and_responsive_no_errors(Pihole):
|
||||
# assert '644 /run/pihole-FTL.pid' in support_files.stdout
|
||||
# assert '644 /var/log/pihole-FTL.log' in support_files.stdout
|
||||
|
||||
|
||||
def test_IPv6_only_link_local(Pihole):
|
||||
''' confirms IPv6 blocking is disabled for Link-local address '''
|
||||
'''
|
||||
confirms IPv6 blocking is disabled for Link-local address
|
||||
'''
|
||||
# mock ip -6 address to return Link-local address
|
||||
mock_command_2('ip', {'-6 address':('inet6 fe80::d210:52fa:fe00:7ad7/64 scope link', '0')}, Pihole)
|
||||
mock_command_2(
|
||||
'ip',
|
||||
{
|
||||
'-6 address': (
|
||||
'inet6 fe80::d210:52fa:fe00:7ad7/64 scope link',
|
||||
'0'
|
||||
)
|
||||
},
|
||||
Pihole
|
||||
)
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
useIPv6dialog
|
||||
''')
|
||||
expected_stdout = 'Unable to find IPv6 ULA/GUA address, IPv6 adblocking will not be enabled'
|
||||
expected_stdout = ('Unable to find IPv6 ULA/GUA address, '
|
||||
'IPv6 adblocking will not be enabled')
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
|
||||
def test_IPv6_only_ULA(Pihole):
|
||||
''' confirms IPv6 blocking is enabled for ULA addresses '''
|
||||
'''
|
||||
confirms IPv6 blocking is enabled for ULA addresses
|
||||
'''
|
||||
# mock ip -6 address to return ULA address
|
||||
mock_command_2('ip', {'-6 address':('inet6 fda2:2001:5555:0:d210:52fa:fe00:7ad7/64 scope global', '0')}, Pihole)
|
||||
mock_command_2(
|
||||
'ip',
|
||||
{
|
||||
'-6 address': (
|
||||
'inet6 fda2:2001:5555:0:d210:52fa:fe00:7ad7/64 scope global',
|
||||
'0'
|
||||
)
|
||||
},
|
||||
Pihole
|
||||
)
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
useIPv6dialog
|
||||
@@ -356,10 +379,22 @@ def test_IPv6_only_ULA(Pihole):
|
||||
expected_stdout = 'Found IPv6 ULA address, using it for blocking IPv6 ads'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
|
||||
def test_IPv6_only_GUA(Pihole):
|
||||
''' confirms IPv6 blocking is enabled for GUA addresses '''
|
||||
'''
|
||||
confirms IPv6 blocking is enabled for GUA addresses
|
||||
'''
|
||||
# mock ip -6 address to return GUA address
|
||||
mock_command_2('ip', {'-6 address':('inet6 2003:12:1e43:301:d210:52fa:fe00:7ad7/64 scope global', '0')}, Pihole)
|
||||
mock_command_2(
|
||||
'ip',
|
||||
{
|
||||
'-6 address': (
|
||||
'inet6 2003:12:1e43:301:d210:52fa:fe00:7ad7/64 scope global',
|
||||
'0'
|
||||
)
|
||||
},
|
||||
Pihole
|
||||
)
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
useIPv6dialog
|
||||
@@ -367,10 +402,23 @@ def test_IPv6_only_GUA(Pihole):
|
||||
expected_stdout = 'Found IPv6 GUA address, using it for blocking IPv6 ads'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
|
||||
def test_IPv6_GUA_ULA_test(Pihole):
|
||||
''' confirms IPv6 blocking is enabled for GUA and ULA addresses '''
|
||||
'''
|
||||
confirms IPv6 blocking is enabled for GUA and ULA addresses
|
||||
'''
|
||||
# mock ip -6 address to return GUA and ULA addresses
|
||||
mock_command_2('ip', {'-6 address':('inet6 2003:12:1e43:301:d210:52fa:fe00:7ad7/64 scope global\ninet6 fda2:2001:5555:0:d210:52fa:fe00:7ad7/64 scope global', '0')}, Pihole)
|
||||
mock_command_2(
|
||||
'ip',
|
||||
{
|
||||
'-6 address': (
|
||||
'inet6 2003:12:1e43:301:d210:52fa:fe00:7ad7/64 scope global\n'
|
||||
'inet6 fda2:2001:5555:0:d210:52fa:fe00:7ad7/64 scope global',
|
||||
'0'
|
||||
)
|
||||
},
|
||||
Pihole
|
||||
)
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
useIPv6dialog
|
||||
@@ -378,10 +426,23 @@ def test_IPv6_GUA_ULA_test(Pihole):
|
||||
expected_stdout = 'Found IPv6 ULA address, using it for blocking IPv6 ads'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
|
||||
def test_IPv6_ULA_GUA_test(Pihole):
|
||||
''' confirms IPv6 blocking is enabled for GUA and ULA addresses '''
|
||||
'''
|
||||
confirms IPv6 blocking is enabled for GUA and ULA addresses
|
||||
'''
|
||||
# mock ip -6 address to return ULA and GUA addresses
|
||||
mock_command_2('ip', {'-6 address':('inet6 fda2:2001:5555:0:d210:52fa:fe00:7ad7/64 scope global\ninet6 2003:12:1e43:301:d210:52fa:fe00:7ad7/64 scope global', '0')}, Pihole)
|
||||
mock_command_2(
|
||||
'ip',
|
||||
{
|
||||
'-6 address': (
|
||||
'inet6 fda2:2001:5555:0:d210:52fa:fe00:7ad7/64 scope global\n'
|
||||
'inet6 2003:12:1e43:301:d210:52fa:fe00:7ad7/64 scope global',
|
||||
'0'
|
||||
)
|
||||
},
|
||||
Pihole
|
||||
)
|
||||
detectPlatform = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
useIPv6dialog
|
||||
@@ -389,50 +450,41 @@ def test_IPv6_ULA_GUA_test(Pihole):
|
||||
expected_stdout = 'Found IPv6 ULA address, using it for blocking IPv6 ads'
|
||||
assert expected_stdout in detectPlatform.stdout
|
||||
|
||||
# Helper functions
|
||||
def mock_command(script, args, container):
|
||||
''' Allows for setup of commands we don't really want to have to run for real in unit tests '''
|
||||
full_script_path = '/usr/local/bin/{}'.format(script)
|
||||
mock_script = dedent('''\
|
||||
#!/bin/bash -e
|
||||
echo "\$0 \$@" >> /var/log/{script}
|
||||
case "\$1" in'''.format(script=script))
|
||||
for k, v in args.iteritems():
|
||||
case = dedent('''
|
||||
{arg})
|
||||
echo {res}
|
||||
exit {retcode}
|
||||
;;'''.format(arg=k, res=v[0], retcode=v[1]))
|
||||
mock_script += case
|
||||
mock_script += dedent('''
|
||||
esac''')
|
||||
container.run('''
|
||||
cat <<EOF> {script}\n{content}\nEOF
|
||||
chmod +x {script}
|
||||
rm -f /var/log/{scriptlog}'''.format(script=full_script_path, content=mock_script, scriptlog=script))
|
||||
|
||||
def mock_command_2(script, args, container):
|
||||
''' Allows for setup of commands we don't really want to have to run for real in unit tests '''
|
||||
full_script_path = '/usr/local/bin/{}'.format(script)
|
||||
mock_script = dedent('''\
|
||||
#!/bin/bash -e
|
||||
echo "\$0 \$@" >> /var/log/{script}
|
||||
case "\$1 \$2" in'''.format(script=script))
|
||||
for k, v in args.iteritems():
|
||||
case = dedent('''
|
||||
\"{arg}\")
|
||||
echo \"{res}\"
|
||||
exit {retcode}
|
||||
;;'''.format(arg=k, res=v[0], retcode=v[1]))
|
||||
mock_script += case
|
||||
mock_script += dedent('''
|
||||
esac''')
|
||||
container.run('''
|
||||
cat <<EOF> {script}\n{content}\nEOF
|
||||
chmod +x {script}
|
||||
rm -f /var/log/{scriptlog}'''.format(script=full_script_path, content=mock_script, scriptlog=script))
|
||||
def test_validate_ip_valid(Pihole):
|
||||
'''
|
||||
Given a valid IP address, valid_ip returns success
|
||||
'''
|
||||
|
||||
def run_script(Pihole, script):
|
||||
result = Pihole.run(script)
|
||||
assert result.rc == 0
|
||||
return result
|
||||
output = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
valid_ip "192.168.1.1"
|
||||
''')
|
||||
|
||||
assert output.rc == 0
|
||||
|
||||
|
||||
def test_validate_ip_invalid_octet(Pihole):
|
||||
'''
|
||||
Given an invalid IP address (large octet), valid_ip returns an error
|
||||
'''
|
||||
|
||||
output = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
valid_ip "1092.168.1.1"
|
||||
''')
|
||||
|
||||
assert output.rc == 1
|
||||
|
||||
|
||||
def test_validate_ip_invalid_letters(Pihole):
|
||||
'''
|
||||
Given an invalid IP address (contains letters), valid_ip returns an error
|
||||
'''
|
||||
|
||||
output = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
valid_ip "not an IP"
|
||||
''')
|
||||
|
||||
assert output.rc == 1
|
||||
|
265
test/test_centos_fedora_support.py
Normal file
265
test/test_centos_fedora_support.py
Normal file
@@ -0,0 +1,265 @@
|
||||
import pytest
|
||||
from conftest import (
|
||||
tick_box,
|
||||
info_box,
|
||||
cross_box,
|
||||
mock_command,
|
||||
mock_command_2,
|
||||
)
|
||||
|
||||
|
||||
def mock_selinux_config(state, Pihole):
|
||||
'''
|
||||
Creates a mock SELinux config file with expected content
|
||||
'''
|
||||
# validate state string
|
||||
valid_states = ['enforcing', 'permissive', 'disabled']
|
||||
assert state in valid_states
|
||||
# getenforce returns the running state of SELinux
|
||||
mock_command('getenforce', {'*': (state.capitalize(), '0')}, Pihole)
|
||||
# create mock configuration with desired content
|
||||
Pihole.run('''
|
||||
mkdir /etc/selinux
|
||||
echo "SELINUX={state}" > /etc/selinux/config
|
||||
'''.format(state=state.lower()))
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ('fedora'), ])
|
||||
def test_selinux_enforcing_exit(Pihole):
|
||||
'''
|
||||
confirms installer prompts to exit when SELinux is Enforcing by default
|
||||
'''
|
||||
mock_selinux_config("enforcing", Pihole)
|
||||
check_selinux = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
checkSelinux
|
||||
''')
|
||||
expected_stdout = cross_box + ' Current SELinux: Enforcing'
|
||||
assert expected_stdout in check_selinux.stdout
|
||||
expected_stdout = 'SELinux Enforcing detected, exiting installer'
|
||||
assert expected_stdout in check_selinux.stdout
|
||||
assert check_selinux.rc == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ('fedora'), ])
|
||||
def test_selinux_permissive(Pihole):
|
||||
'''
|
||||
confirms installer continues when SELinux is Permissive
|
||||
'''
|
||||
mock_selinux_config("permissive", Pihole)
|
||||
check_selinux = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
checkSelinux
|
||||
''')
|
||||
expected_stdout = tick_box + ' Current SELinux: Permissive'
|
||||
assert expected_stdout in check_selinux.stdout
|
||||
assert check_selinux.rc == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ('fedora'), ])
|
||||
def test_selinux_disabled(Pihole):
|
||||
'''
|
||||
confirms installer continues when SELinux is Disabled
|
||||
'''
|
||||
mock_selinux_config("disabled", Pihole)
|
||||
check_selinux = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
checkSelinux
|
||||
''')
|
||||
expected_stdout = tick_box + ' Current SELinux: Disabled'
|
||||
assert expected_stdout in check_selinux.stdout
|
||||
assert check_selinux.rc == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('fedora'), ])
|
||||
def test_epel_and_remi_not_installed_fedora(Pihole):
|
||||
'''
|
||||
confirms installer does not attempt to install EPEL/REMI repositories
|
||||
on Fedora
|
||||
'''
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
''')
|
||||
assert distro_check.stdout == ''
|
||||
|
||||
epel_package = Pihole.package('epel-release')
|
||||
assert not epel_package.is_installed
|
||||
remi_package = Pihole.package('remi-release')
|
||||
assert not remi_package.is_installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ])
|
||||
def test_release_supported_version_check_centos(Pihole):
|
||||
'''
|
||||
confirms installer exits on unsupported releases of CentOS
|
||||
'''
|
||||
# modify /etc/redhat-release to mock an unsupported CentOS release
|
||||
Pihole.run('echo "CentOS Linux release 6.9" > /etc/redhat-release')
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
''')
|
||||
expected_stdout = cross_box + (' CentOS 6 is not supported.')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
expected_stdout = 'Please update to CentOS release 7 or later'
|
||||
assert expected_stdout in distro_check.stdout
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ])
|
||||
def test_enable_epel_repository_centos(Pihole):
|
||||
'''
|
||||
confirms the EPEL package repository is enabled when installed on CentOS
|
||||
'''
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
''')
|
||||
expected_stdout = info_box + (' Enabling EPEL package repository '
|
||||
'(https://fedoraproject.org/wiki/EPEL)')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
expected_stdout = tick_box + ' Installed epel-release'
|
||||
assert expected_stdout in distro_check.stdout
|
||||
epel_package = Pihole.package('epel-release')
|
||||
assert epel_package.is_installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ])
|
||||
def test_php_upgrade_default_optout_centos(Pihole):
|
||||
'''
|
||||
confirms the default behavior to opt-out of installing PHP7 from REMI
|
||||
'''
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
''')
|
||||
expected_stdout = info_box + (' User opt-out of PHP 7 upgrade on CentOS. '
|
||||
'Deprecated PHP may be in use.')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
remi_package = Pihole.package('remi-release')
|
||||
assert not remi_package.is_installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ])
|
||||
def test_php_upgrade_user_optout_centos(Pihole):
|
||||
'''
|
||||
confirms installer behavior when user opt-out of installing PHP7 from REMI
|
||||
(php not currently installed)
|
||||
'''
|
||||
# Whiptail dialog returns Cancel for user prompt
|
||||
mock_command('whiptail', {'*': ('', '1')}, Pihole)
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
''')
|
||||
expected_stdout = info_box + (' User opt-out of PHP 7 upgrade on CentOS. '
|
||||
'Deprecated PHP may be in use.')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
remi_package = Pihole.package('remi-release')
|
||||
assert not remi_package.is_installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ])
|
||||
def test_php_upgrade_user_optin_centos(Pihole):
|
||||
'''
|
||||
confirms installer behavior when user opt-in to installing PHP7 from REMI
|
||||
(php not currently installed)
|
||||
'''
|
||||
# Whiptail dialog returns Continue for user prompt
|
||||
mock_command('whiptail', {'*': ('', '0')}, Pihole)
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
''')
|
||||
assert 'opt-out' not in distro_check.stdout
|
||||
expected_stdout = info_box + (' Enabling Remi\'s RPM repository '
|
||||
'(https://rpms.remirepo.net)')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
expected_stdout = tick_box + (' Remi\'s RPM repository has '
|
||||
'been enabled for PHP7')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
remi_package = Pihole.package('remi-release')
|
||||
assert remi_package.is_installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ])
|
||||
def test_php_version_lt_7_detected_upgrade_default_optout_centos(Pihole):
|
||||
'''
|
||||
confirms the default behavior to opt-out of upgrading to PHP7 from REMI
|
||||
'''
|
||||
# first we will install the default php version to test installer behavior
|
||||
php_install = Pihole.run('yum install -y php')
|
||||
assert php_install.rc == 0
|
||||
php_package = Pihole.package('php')
|
||||
default_centos_php_version = php_package.version.split('.')[0]
|
||||
if int(default_centos_php_version) >= 7: # PHP7 is supported/recommended
|
||||
pytest.skip("Test deprecated . Detected default PHP version >= 7")
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
''')
|
||||
expected_stdout = info_box + (' User opt-out of PHP 7 upgrade on CentOS. '
|
||||
'Deprecated PHP may be in use.')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
remi_package = Pihole.package('remi-release')
|
||||
assert not remi_package.is_installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ])
|
||||
def test_php_version_lt_7_detected_upgrade_user_optout_centos(Pihole):
|
||||
'''
|
||||
confirms installer behavior when user opt-out to upgrade to PHP7 via REMI
|
||||
'''
|
||||
# first we will install the default php version to test installer behavior
|
||||
php_install = Pihole.run('yum install -y php')
|
||||
assert php_install.rc == 0
|
||||
php_package = Pihole.package('php')
|
||||
default_centos_php_version = php_package.version.split('.')[0]
|
||||
if int(default_centos_php_version) >= 7: # PHP7 is supported/recommended
|
||||
pytest.skip("Test deprecated . Detected default PHP version >= 7")
|
||||
# Whiptail dialog returns Cancel for user prompt
|
||||
mock_command('whiptail', {'*': ('', '1')}, Pihole)
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
''')
|
||||
expected_stdout = info_box + (' User opt-out of PHP 7 upgrade on CentOS. '
|
||||
'Deprecated PHP may be in use.')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
remi_package = Pihole.package('remi-release')
|
||||
assert not remi_package.is_installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", [('centos'), ])
|
||||
def test_php_version_lt_7_detected_upgrade_user_optin_centos(Pihole):
|
||||
'''
|
||||
confirms installer behavior when user opt-in to upgrade to PHP7 via REMI
|
||||
'''
|
||||
# first we will install the default php version to test installer behavior
|
||||
php_install = Pihole.run('yum install -y php')
|
||||
assert php_install.rc == 0
|
||||
php_package = Pihole.package('php')
|
||||
default_centos_php_version = php_package.version.split('.')[0]
|
||||
if int(default_centos_php_version) >= 7: # PHP7 is supported/recommended
|
||||
pytest.skip("Test deprecated . Detected default PHP version >= 7")
|
||||
# Whiptail dialog returns Continue for user prompt
|
||||
mock_command('whiptail', {'*': ('', '0')}, Pihole)
|
||||
distro_check = Pihole.run('''
|
||||
source /opt/pihole/basic-install.sh
|
||||
distro_check
|
||||
install_dependent_packages PIHOLE_WEB_DEPS[@]
|
||||
''')
|
||||
expected_stdout = info_box + (' User opt-out of PHP 7 upgrade on CentOS. '
|
||||
'Deprecated PHP may be in use.')
|
||||
assert expected_stdout not in distro_check.stdout
|
||||
expected_stdout = info_box + (' Enabling Remi\'s RPM repository '
|
||||
'(https://rpms.remirepo.net)')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
expected_stdout = tick_box + (' Remi\'s RPM repository has '
|
||||
'been enabled for PHP7')
|
||||
assert expected_stdout in distro_check.stdout
|
||||
remi_package = Pihole.package('remi-release')
|
||||
assert remi_package.is_installed
|
||||
updated_php_package = Pihole.package('php')
|
||||
updated_php_version = updated_php_package.version.split('.')[0]
|
||||
assert int(updated_php_version) == 7
|
@@ -1,13 +1,18 @@
|
||||
import pytest
|
||||
import testinfra
|
||||
|
||||
run_local = testinfra.get_backend(
|
||||
"local://"
|
||||
).get_module("Command").run
|
||||
|
||||
|
||||
def test_scripts_pass_shellcheck():
|
||||
''' Make sure shellcheck does not find anything wrong with our shell scripts '''
|
||||
shellcheck = "find . -type f -name 'update.sh' | while read file; do shellcheck -x \"$file\" -e SC1090,SC1091; done;"
|
||||
'''
|
||||
Make sure shellcheck does not find anything wrong with our shell scripts
|
||||
'''
|
||||
shellcheck = ("find . -type f -name 'update.sh' "
|
||||
"| while read file; do "
|
||||
"shellcheck -x \"$file\" -e SC1090,SC1091; "
|
||||
"done;")
|
||||
results = run_local(shellcheck)
|
||||
print results.stdout
|
||||
assert '' == results.stdout
|
||||
|
10
tox.ini
Normal file
10
tox.ini
Normal file
@@ -0,0 +1,10 @@
|
||||
[tox]
|
||||
envlist = py27
|
||||
|
||||
[testenv]
|
||||
whitelist_externals = docker
|
||||
deps = -rrequirements.txt
|
||||
commands = docker build -f test/debian.Dockerfile -t pytest_pihole:debian .
|
||||
docker build -f test/centos.Dockerfile -t pytest_pihole:centos .
|
||||
docker build -f test/fedora.Dockerfile -t pytest_pihole:fedora .
|
||||
pytest {posargs:-vv -n auto} -m "not build_stage" ./test/
|
Reference in New Issue
Block a user