Compare commits
3829 commits
v0.3.4
...
py3-latest
Author | SHA1 | Date | |
---|---|---|---|
7edbda70f5 | |||
![]() |
290025958f | ||
![]() |
25c5658b72 | ||
![]() |
2970e3a205 | ||
![]() |
866179f6a3 | ||
![]() |
e8cf14bcf5 | ||
![]() |
fedcf9c1c6 | ||
![]() |
117bcf25d9 | ||
![]() |
a429349cd4 | ||
![]() |
d8e52eaabd | ||
![]() |
f2ef6e5d9c | ||
![]() |
dd2bb07cfb | ||
![]() |
06a9d1e0ff | ||
![]() |
c354f9e24d | ||
![]() |
77b4297224 | ||
![]() |
edc5310cd2 | ||
![]() |
99a8409513 | ||
![]() |
3550a64837 | ||
![]() |
85ef28e6fb | ||
![]() |
1500d9356b | ||
![]() |
f1a71770fa | ||
![]() |
f79a73cef4 | ||
![]() |
0731787518 | ||
![]() |
ad95eede10 | ||
![]() |
459b0a73ca | ||
![]() |
b7870edd2e | ||
![]() |
d5703541be | ||
![]() |
ba96654e1d | ||
![]() |
ac72d623f0 | ||
![]() |
fd857985f6 | ||
![]() |
966f671efe | ||
![]() |
86109ae4b2 | ||
![]() |
611fc774c8 | ||
![]() |
0ed0b746a4 | ||
![]() |
49e68c3a78 | ||
![]() |
3ac677c9a7 | ||
![]() |
016cfe9e16 | ||
![]() |
712ee18634 | ||
![]() |
5579c6b3cc | ||
![]() |
c3815c56ea | ||
![]() |
b257338b0a | ||
![]() |
ac70f83879 | ||
![]() |
2ad80afa10 | ||
![]() |
fe048cd08c | ||
![]() |
f9d7ccd83c | ||
![]() |
b29884db78 | ||
![]() |
a5190234ab | ||
![]() |
00db9c9f87 | ||
![]() |
02ceb70a4f | ||
![]() |
7ce118d645 | ||
![]() |
eb397cf4c7 | ||
![]() |
f8c9f2da4f | ||
![]() |
69d7eacfa4 | ||
![]() |
f498aedb96 | ||
![]() |
5ee928852b | ||
![]() |
7078badefa | ||
![]() |
edd2760fed | ||
![]() |
7d1ec41d09 | ||
![]() |
7acd8df906 | ||
![]() |
a1eb6eede5 | ||
![]() |
eab7fc2be4 | ||
![]() |
454c0b2e7e | ||
![]() |
03da34c5d6 | ||
![]() |
c3f4591f91 | ||
![]() |
3ad7bc87e5 | ||
![]() |
b4f4c12521 | ||
![]() |
39d86fec9c | ||
![]() |
02c27b841f | ||
![]() |
8dafbef6ad | ||
![]() |
c831d175ad | ||
![]() |
3cbfbae42d | ||
![]() |
a1105562cd | ||
![]() |
b3c9de5e47 | ||
![]() |
3ffa3c2f79 | ||
![]() |
03273527da | ||
![]() |
2795e20b0c | ||
![]() |
eb86df5fb6 | ||
![]() |
ecfb6b6b3a | ||
![]() |
1b4f93f14b | ||
![]() |
40db30a260 | ||
![]() |
3a4a5404c0 | ||
![]() |
2bb12a247b | ||
![]() |
38a3ea6373 | ||
![]() |
2798ad6fb2 | ||
![]() |
2e7b0071a3 | ||
![]() |
12e82bc9c4 | ||
![]() |
774691fa39 | ||
![]() |
52d6c9fedf | ||
![]() |
d68c635e9a | ||
![]() |
21557b5517 | ||
![]() |
11fe0ece67 | ||
![]() |
92363d5227 | ||
![]() |
cf0c5db5b9 | ||
![]() |
57dda4e6d6 | ||
![]() |
0a3bf43e1c | ||
![]() |
48455e3e45 | ||
![]() |
c515e26cd6 | ||
![]() |
5cac059ef4 | ||
![]() |
8f6e27904c | ||
![]() |
e757f2a2d4 | ||
![]() |
b104d5dd41 | ||
![]() |
f6106be733 | ||
![]() |
9305a2e7ac | ||
![]() |
4f6833c488 | ||
![]() |
e2b1cf3938 | ||
![]() |
6fcfe5b394 | ||
![]() |
12013d64c8 | ||
![]() |
54fb2fde7c | ||
![]() |
147dd4bc35 | ||
![]() |
32c9d5fa70 | ||
![]() |
dd08b89c81 | ||
![]() |
6770b450b3 | ||
![]() |
9b2772b171 | ||
![]() |
29dac8a188 | ||
![]() |
352da6bf62 | ||
![]() |
288050e5b4 | ||
![]() |
785d2351eb | ||
![]() |
9d1bed11af | ||
![]() |
b2342e64bd | ||
![]() |
dcbfb8afe0 | ||
![]() |
4b8dfc5114 | ||
![]() |
c0baf8b68d | ||
![]() |
73dc69605b | ||
![]() |
52ed8c18ca | ||
![]() |
19bc0358b5 | ||
![]() |
392350ff79 | ||
![]() |
f0b0f57643 | ||
![]() |
85790f8866 | ||
![]() |
ce5b4c3eda | ||
![]() |
fde3b51129 | ||
![]() |
550d02d473 | ||
![]() |
4da89580c1 | ||
![]() |
bf092b83ab | ||
![]() |
0309b81695 | ||
![]() |
e74fdc4036 | ||
![]() |
b9c65d75ef | ||
![]() |
49f8e0bc3a | ||
![]() |
c4f8c0177e | ||
![]() |
8c20927f68 | ||
![]() |
5b09f7af41 | ||
![]() |
1695571afa | ||
![]() |
8dc5aee8aa | ||
![]() |
94765af0f3 | ||
![]() |
5a226baaa5 | ||
![]() |
b7bc197012 | ||
![]() |
a0dfbe31f6 | ||
![]() |
964545dd1f | ||
![]() |
817ab04941 | ||
![]() |
91d0ce3a50 | ||
![]() |
e97236201c | ||
![]() |
e14f5bf847 | ||
![]() |
79f10ffe0c | ||
![]() |
0bc9374a7d | ||
![]() |
8a71bf65cd | ||
![]() |
9d198ff7f2 | ||
![]() |
cafeebf120 | ||
![]() |
46fba195da | ||
![]() |
501bd51bd1 | ||
![]() |
f7874e1ca3 | ||
![]() |
8d964d1b8e | ||
![]() |
051e404a80 | ||
![]() |
6c1abf4004 | ||
![]() |
0907edb6b1 | ||
![]() |
6ff14d1bbd | ||
![]() |
4ad5c065f1 | ||
![]() |
c17b8d53d3 | ||
![]() |
9022a1098a | ||
![]() |
6e758ff363 | ||
![]() |
29c3523353 | ||
![]() |
47ff6c6801 | ||
![]() |
6bd49e8aff | ||
![]() |
ddbd5c7b19 | ||
![]() |
635c3b27cd | ||
![]() |
6776dabdb3 | ||
![]() |
14cbaf47c8 | ||
![]() |
4eb50377c3 | ||
![]() |
ea6016d004 | ||
![]() |
79d26060b3 | ||
![]() |
97ad084c21 | ||
![]() |
179e5cb651 | ||
![]() |
367745b5ea | ||
![]() |
5c38a78b79 | ||
![]() |
a02ed56c69 | ||
![]() |
f868fed51d | ||
![]() |
e4f42b8ce3 | ||
![]() |
eeb48fc72e | ||
![]() |
85733abade | ||
![]() |
8db4344171 | ||
![]() |
0a9391d28b | ||
![]() |
cfef7ab071 | ||
![]() |
38c1727b94 | ||
![]() |
36d96d484e | ||
![]() |
439f8fc476 | ||
![]() |
07faa3d6d3 | ||
![]() |
3c7022ea9d | ||
![]() |
a657afcd47 | ||
![]() |
f3a839f422 | ||
![]() |
ad3920b26a | ||
![]() |
8ffd8d7a3e | ||
![]() |
71001491df | ||
![]() |
701765b53b | ||
![]() |
fa880d99f1 | ||
![]() |
0a9a9b5a57 | ||
![]() |
56acac8cd3 | ||
![]() |
995d3bf717 | ||
![]() |
1de7485858 | ||
![]() |
e1c0fd6984 | ||
![]() |
108a3de433 | ||
![]() |
740fe65355 | ||
![]() |
abde3d4cf7 | ||
![]() |
c90c887f8f | ||
![]() |
a4d91f7081 | ||
![]() |
31d4304915 | ||
![]() |
1eec388252 | ||
![]() |
70de3213d6 | ||
![]() |
f41d022038 | ||
![]() |
723d1f4370 | ||
![]() |
ca94703fc3 | ||
![]() |
a5971adbe6 | ||
![]() |
dfeebbabe8 | ||
![]() |
66194ce435 | ||
![]() |
2de3c9a544 | ||
![]() |
5fb342a825 | ||
![]() |
3156d2f94b | ||
![]() |
ba156bbdec | ||
![]() |
6beb76eac8 | ||
![]() |
d3d18234df | ||
![]() |
faa24a8b41 | ||
![]() |
f749228a2c | ||
![]() |
7e17a4e967 | ||
![]() |
19f003141b | ||
![]() |
53a6063576 | ||
![]() |
33af83b2cd | ||
![]() |
3426d5fe63 | ||
![]() |
f2934c10b4 | ||
![]() |
a2457b2488 | ||
![]() |
193632c3f9 | ||
![]() |
a1c176bb3f | ||
![]() |
02fd1dc4d0 | ||
![]() |
296e4aab57 | ||
![]() |
7ba2c9344d | ||
![]() |
09e65e1d95 | ||
![]() |
c4f65a5d7b | ||
![]() |
37a401fdef | ||
![]() |
e7d1e1f097 | ||
![]() |
6df3036f11 | ||
![]() |
e2a582d892 | ||
![]() |
aaabcb6b1a | ||
![]() |
7bf790003e | ||
![]() |
f46b945cdc | ||
![]() |
27761c5045 | ||
![]() |
e0bf4dc9ec | ||
![]() |
1fc67a3d71 | ||
![]() |
5baacf963d | ||
![]() |
b790bcac9b | ||
![]() |
219b90668f | ||
![]() |
2862587c15 | ||
![]() |
6218a92895 | ||
![]() |
58f03e21ef | ||
![]() |
b85477787d | ||
![]() |
6a1235bd45 | ||
![]() |
33d6a9c402 | ||
![]() |
17f65a5179 | ||
![]() |
f8e2cbe429 | ||
![]() |
f0a706f6ab | ||
![]() |
8b994e42c2 | ||
![]() |
ae9a76a6c9 | ||
![]() |
9b85d8638d | ||
![]() |
a9c75a3146 | ||
![]() |
1cc0ec3f31 | ||
![]() |
b1819ff71d | ||
![]() |
fca1033f83 | ||
![]() |
32855d0479 | ||
![]() |
2c826eba2d | ||
![]() |
8facd9ff84 | ||
![]() |
64e5e0c80e | ||
![]() |
8aa4e27938 | ||
![]() |
bc76bf291a | ||
![]() |
70cc982e2e | ||
![]() |
61ac6a30d3 | ||
![]() |
d2627f36d5 | ||
![]() |
d36324e0d3 | ||
![]() |
113b57415f | ||
![]() |
fefd2474b1 | ||
![]() |
28ce08de8e | ||
![]() |
037f0a3ff4 | ||
![]() |
a3546d56b0 | ||
![]() |
95bf4ecb42 | ||
![]() |
c91f2f0a09 | ||
![]() |
6d425f30fe | ||
![]() |
8e79a7da63 | ||
![]() |
10c02c31c2 | ||
![]() |
a0f5e1bde8 | ||
![]() |
2e9cff928c | ||
![]() |
46210b2f04 | ||
![]() |
6dae187e22 | ||
![]() |
a7e783a26b | ||
![]() |
60af3ceda9 | ||
![]() |
11415fe082 | ||
![]() |
df93fa0ffe | ||
![]() |
849d514f28 | ||
![]() |
4d8ee4bafb | ||
![]() |
ac8aaaff75 | ||
![]() |
238ede9419 | ||
![]() |
835174270e | ||
![]() |
62a2ec7254 | ||
![]() |
a9368bb3c8 | ||
![]() |
e75e199334 | ||
![]() |
2b7aebd89d | ||
![]() |
3e08eabc86 | ||
![]() |
a16d55c863 | ||
![]() |
e51ae580b9 | ||
![]() |
914576b9db | ||
![]() |
3edb34ec56 | ||
![]() |
224093b3dd | ||
![]() |
77c3e43978 | ||
![]() |
03350d7454 | ||
![]() |
2b5e57e840 | ||
![]() |
39442977db | ||
![]() |
0af90aad37 | ||
![]() |
c5d51c9cab | ||
![]() |
0dbcec8092 | ||
![]() |
76e4b75c2d | ||
![]() |
c1ad7914f1 | ||
![]() |
9085a4b0cc | ||
![]() |
820346c98d | ||
![]() |
995d87c167 | ||
![]() |
fe739fa848 | ||
![]() |
b6d0bf8f6b | ||
![]() |
aec1ab4ed2 | ||
![]() |
7d5f3354b6 | ||
![]() |
feb58e4b0e | ||
![]() |
163825c03e | ||
![]() |
3fc80f834d | ||
![]() |
20b0db7ddb | ||
![]() |
b2e7cbb927 | ||
![]() |
5987274edf | ||
![]() |
ba218974c4 | ||
![]() |
721d4a22f1 | ||
![]() |
32b0153d34 | ||
![]() |
71d32d7414 | ||
![]() |
796ee572ce | ||
![]() |
60146a083c | ||
![]() |
df87bd41b4 | ||
![]() |
3d73599deb | ||
![]() |
48124e12d9 | ||
![]() |
17fb740c51 | ||
![]() |
c6b07f1294 | ||
![]() |
3ccce46314 | ||
![]() |
7c1da5da52 | ||
![]() |
c5de1447c8 | ||
![]() |
e16ace433c | ||
![]() |
975f53b95b | ||
![]() |
8a994b5559 | ||
![]() |
2acf24c336 | ||
![]() |
2c3f1ba7ad | ||
![]() |
c01245a4e0 | ||
![]() |
f119f7d0d2 | ||
![]() |
62d4edadf6 | ||
![]() |
8bf17d3a69 | ||
![]() |
0881e274a9 | ||
![]() |
7ca09ba75b | ||
![]() |
bde8b30d5c | ||
![]() |
87d1c736e2 | ||
![]() |
eba81cc7d2 | ||
![]() |
69eb831c7e | ||
![]() |
99e6326974 | ||
![]() |
50bbe47bf2 | ||
![]() |
8bfef12ad4 | ||
![]() |
6085cfd1a7 | ||
![]() |
d660a268e8 | ||
![]() |
c161140a90 | ||
![]() |
7af8d1cd93 | ||
![]() |
845b50915d | ||
![]() |
dbbad3097c | ||
![]() |
1abaa6fddc | ||
![]() |
7ecf09a496 | ||
![]() |
c0639fef75 | ||
![]() |
c08d266822 | ||
![]() |
6bc3c168c6 | ||
![]() |
1fe7127082 | ||
![]() |
cfaaaf57ec | ||
![]() |
93d2ee65fe | ||
![]() |
9c08e41b9e | ||
![]() |
abee87bbec | ||
![]() |
d4b6f79746 | ||
![]() |
a7c26f893f | ||
![]() |
24b8cdf87a | ||
![]() |
fd43aa61ef | ||
![]() |
77869830c5 | ||
![]() |
87fc8ced5e | ||
![]() |
909967629b | ||
![]() |
afe0d82f18 | ||
![]() |
1ad97a6696 | ||
![]() |
e7e8e59c1e | ||
![]() |
f3665b172f | ||
![]() |
23b3cd3986 | ||
![]() |
f7ee6744af | ||
![]() |
ac45217816 | ||
![]() |
8c51e81a0b | ||
![]() |
9d777951dd | ||
![]() |
2778b17f8d | ||
![]() |
98c98fbac7 | ||
![]() |
9b1f6337c3 | ||
![]() |
2019093431 | ||
![]() |
eac25caf28 | ||
![]() |
b421893434 | ||
![]() |
f1b19f5fc7 | ||
![]() |
61f1a741fc | ||
![]() |
f01d335835 | ||
![]() |
5c1b34387c | ||
![]() |
dfd55c3957 | ||
![]() |
b21895fa78 | ||
![]() |
495d695c5a | ||
![]() |
3309489c24 | ||
![]() |
8a5a75e68f | ||
![]() |
c1df78b97f | ||
![]() |
4c31aae97b | ||
![]() |
0839fdfc5e | ||
![]() |
d062f01127 | ||
![]() |
e91fb90a45 | ||
![]() |
6539ca5eb0 | ||
![]() |
b138ebc519 | ||
![]() |
79c1cd15ab | ||
![]() |
10c1986c54 | ||
![]() |
d7cabb47ca | ||
![]() |
8de1714f08 | ||
![]() |
20ba9cd589 | ||
![]() |
af1ac9bce8 | ||
![]() |
31a6e3ee9a | ||
![]() |
dca1dcdd2d | ||
![]() |
a54f5f3e9f | ||
![]() |
51f49cd45a | ||
![]() |
eb63eb7b1d | ||
![]() |
b4f7e51e96 | ||
![]() |
c2d2189039 | ||
![]() |
1eda3258de | ||
![]() |
0171cb0844 | ||
![]() |
08a0a63631 | ||
![]() |
8ed7d0385d | ||
![]() |
02d45e9c39 | ||
![]() |
2a402a0674 | ||
![]() |
1be56b5a39 | ||
![]() |
1e175bc41f | ||
![]() |
c16569a6ab | ||
![]() |
d19cc64611 | ||
![]() |
958882c1c5 | ||
![]() |
2f7323043f | ||
![]() |
fbc7b6fc4f | ||
![]() |
3178b69172 | ||
![]() |
28fcf3c1ea | ||
![]() |
71939097b0 | ||
![]() |
2fd337bb55 | ||
![]() |
5e26161e84 | ||
![]() |
04ecb89e9a | ||
![]() |
23f851343f | ||
![]() |
5ce1782d05 | ||
![]() |
daee14533c | ||
![]() |
31f505b309 | ||
![]() |
c8214bf3ea | ||
![]() |
1935a69c04 | ||
![]() |
ea5f64bfea | ||
![]() |
3dd04b27de | ||
![]() |
9940b7bff3 | ||
![]() |
901ccf2d14 | ||
![]() |
6a1a821ed4 | ||
![]() |
aa9fe09337 | ||
![]() |
bdb655243f | ||
![]() |
566c29363f | ||
![]() |
37b8c0241f | ||
![]() |
1a17645e93 | ||
![]() |
5fba850d74 | ||
![]() |
bd90e0ce52 | ||
![]() |
c24cfa721b | ||
![]() |
1670d96908 | ||
![]() |
ec3c44c5b3 | ||
![]() |
12bfad8fe6 | ||
![]() |
594edc6e9a | ||
![]() |
99304a09ca | ||
![]() |
5c93aadce3 | ||
![]() |
f0c10efca6 | ||
![]() |
c10dd5239e | ||
![]() |
fa0d1a50b5 | ||
![]() |
66a1c4d242 | ||
![]() |
b7c6b84826 | ||
![]() |
1c587bde25 | ||
![]() |
e1dc29c374 | ||
![]() |
59e0ffd8e0 | ||
![]() |
f7c767c1c8 | ||
![]() |
fca9db7972 | ||
![]() |
afd23849a6 | ||
![]() |
1b2eee058c | ||
![]() |
777486a5be | ||
![]() |
8b6f221e22 | ||
![]() |
97ecb7e3aa | ||
![]() |
5df5e25d68 | ||
![]() |
66a950a481 | ||
![]() |
29346cdef5 | ||
![]() |
4f8e941e39 | ||
![]() |
756f5a1608 | ||
![]() |
416e7d6fe0 | ||
![]() |
7b210429b5 | ||
![]() |
c52d47b15f | ||
![]() |
9a43626aa6 | ||
![]() |
c14e722303 | ||
![]() |
07633ba79d | ||
![]() |
6ff7fe55fc | ||
![]() |
a14c36cd3e | ||
![]() |
c21fe3d23a | ||
![]() |
89e8fd3d3a | ||
![]() |
966f393e20 | ||
![]() |
d85c27e67b | ||
![]() |
a5f8a53196 | ||
![]() |
9299e5b614 | ||
![]() |
6c31a3b77e | ||
![]() |
6262c80886 | ||
![]() |
5aa115c88a | ||
![]() |
511587dd8b | ||
![]() |
5d34bb9062 | ||
![]() |
4025d753e3 | ||
![]() |
58214c0ac3 | ||
![]() |
5d113757df | ||
![]() |
b41a03674f | ||
![]() |
8c1f64243f | ||
![]() |
cdd9dd4f6f | ||
![]() |
57f2a43864 | ||
![]() |
74d7fb7835 | ||
![]() |
dd61429e2f | ||
![]() |
8f27f50b34 | ||
![]() |
96e7fbdca1 | ||
![]() |
39352eb97e | ||
![]() |
1c607645c7 | ||
![]() |
2ad3493fb0 | ||
![]() |
331dc99086 | ||
![]() |
4424c8272d | ||
![]() |
16162955af | ||
![]() |
23006e495f | ||
![]() |
4351af35f4 | ||
![]() |
e8af5db2e8 | ||
![]() |
9d048371b7 | ||
![]() |
a187726ba8 | ||
![]() |
0ff1bcfd19 | ||
![]() |
08fee35bcf | ||
![]() |
08574bf676 | ||
![]() |
5c27a0efcc | ||
![]() |
7576f96604 | ||
![]() |
456e330854 | ||
![]() |
8d95eb937f | ||
![]() |
2f50fef787 | ||
![]() |
ac69007292 | ||
![]() |
e8b0a3d1c4 | ||
![]() |
dfbbbd9381 | ||
![]() |
4ab339b375 | ||
![]() |
fce24cedbd | ||
![]() |
f9b62564ca | ||
![]() |
d569d9488a | ||
![]() |
f172751df3 | ||
![]() |
13233d47bd | ||
![]() |
74d7d92a4d | ||
![]() |
d3a0f5c268 | ||
![]() |
63f213a5d5 | ||
![]() |
6d4c4d9f27 | ||
![]() |
b2be4672ec | ||
![]() |
1bfe328a1b | ||
![]() |
1f453b6c13 | ||
![]() |
ee8e3c3c9c | ||
![]() |
37f315dfc2 | ||
![]() |
1e1e560795 | ||
![]() |
8d88cfcd68 | ||
![]() |
74badf9c9c | ||
![]() |
86087550f1 | ||
![]() |
8dfc200f24 | ||
![]() |
cb4a4bd707 | ||
![]() |
24ba2a150b | ||
![]() |
e1d92bf0ec | ||
![]() |
270f3e9ffd | ||
![]() |
e1f73697ff | ||
![]() |
e82155aac4 | ||
![]() |
d7669413af | ||
![]() |
28d4fc5d12 | ||
![]() |
dac4fcd52b | ||
![]() |
8dff33b38a | ||
![]() |
6bae1f8a4b | ||
![]() |
10ceeb7f02 | ||
![]() |
448bb3ce98 | ||
![]() |
0531d47721 | ||
![]() |
b21719e2f2 | ||
![]() |
2960db2352 | ||
![]() |
cb3629343b | ||
![]() |
fa7013fdf7 | ||
![]() |
db868dba81 | ||
![]() |
608a411d97 | ||
![]() |
20c63c73b3 | ||
![]() |
5ca3401eb9 | ||
![]() |
435a3c285e | ||
![]() |
6405cae706 | ||
![]() |
6451e7f9f1 | ||
![]() |
127fa5fa82 | ||
![]() |
63fd0a9fa1 | ||
![]() |
344ad44854 | ||
![]() |
43a5742258 | ||
![]() |
924a61309a | ||
![]() |
9dd5c88da4 | ||
![]() |
0598bcf332 | ||
![]() |
ead1b3e5f5 | ||
![]() |
dd493c87fa | ||
![]() |
29640e614c | ||
![]() |
73e0aa17c4 | ||
![]() |
917a2e59ce | ||
![]() |
119e1a9bf0 | ||
![]() |
6eb79ba75e | ||
![]() |
1f9eafa619 | ||
![]() |
d5da404ed4 | ||
![]() |
1b41aa70cc | ||
![]() |
284b1a4f8a | ||
![]() |
fe432ad843 | ||
![]() |
15fca6bd12 | ||
![]() |
57c0daa294 | ||
![]() |
fee95654fa | ||
![]() |
bb436f9931 | ||
![]() |
3682f0aed4 | ||
![]() |
43c366d2fb | ||
![]() |
b21b885aa9 | ||
![]() |
6bb929a896 | ||
![]() |
f5829f6012 | ||
![]() |
d06b4abecf | ||
![]() |
d7db631b95 | ||
![]() |
93e6ec4933 | ||
![]() |
2fbf2c7771 | ||
![]() |
b474677db1 | ||
![]() |
dbcd8602c5 | ||
![]() |
1793407748 | ||
![]() |
6f0d4a50d1 | ||
![]() |
10817aefae | ||
![]() |
4293a44c93 | ||
![]() |
96759e9303 | ||
![]() |
448483371c | ||
![]() |
0738964e64 | ||
![]() |
deec2e62ce | ||
![]() |
c52da69367 | ||
![]() |
2de35266c4 | ||
![]() |
e6b8097b43 | ||
![]() |
55c7585334 | ||
![]() |
62d278a367 | ||
![]() |
38e20b7c31 | ||
![]() |
d3fce8ca36 | ||
![]() |
2a7d7acce0 | ||
![]() |
eab63c6af8 | ||
![]() |
4f0613689a | ||
![]() |
743463dce9 | ||
![]() |
0b04176f18 | ||
![]() |
166a65e1b1 | ||
![]() |
5da4537d7c | ||
![]() |
b9e71c9f6f | ||
![]() |
500c96abe2 | ||
![]() |
149c4f5c7b | ||
![]() |
76bc9fcddf | ||
![]() |
f999f167b1 | ||
![]() |
9ac96cdd50 | ||
![]() |
3c4bc6ae35 | ||
![]() |
879b504b0f | ||
![]() |
baa5df1d01 | ||
![]() |
912c958ac0 | ||
![]() |
d166a16a24 | ||
![]() |
1bd1ddf410 | ||
![]() |
7890771faa | ||
![]() |
376fd0d439 | ||
![]() |
c414e6caa2 | ||
![]() |
ed7a3b2356 | ||
![]() |
adffbd1973 | ||
![]() |
6750682e4f | ||
![]() |
d1fb4067e7 | ||
![]() |
ab9fe173a8 | ||
![]() |
8a7ae368d8 | ||
![]() |
248fc5f015 | ||
![]() |
e16611f15a | ||
![]() |
24b3651d2e | ||
![]() |
0e236e53fd | ||
![]() |
61ba9848e5 | ||
![]() |
01ff89315b | ||
![]() |
155d8d4dfd | ||
![]() |
c7822ed6e6 | ||
![]() |
1ed40b3b82 | ||
![]() |
18dc359cfc | ||
![]() |
b871849df4 | ||
![]() |
7d1ca3862d | ||
![]() |
2a887870ff | ||
![]() |
1d5bde01cc | ||
![]() |
8537939d26 | ||
![]() |
fcb3ac3917 | ||
![]() |
d63a4b3912 | ||
![]() |
6a245a202c | ||
![]() |
429043f60c | ||
![]() |
8f491fe6e1 | ||
![]() |
92358bafc0 | ||
![]() |
d93e89899b | ||
![]() |
2bdd073608 | ||
![]() |
7801937f74 | ||
![]() |
1d7e0c47dd | ||
![]() |
7b9b48e62d | ||
![]() |
d610f94e7d | ||
![]() |
7742f2f5fb | ||
![]() |
3f7e22497d | ||
![]() |
e745760520 | ||
![]() |
bd5c2b1daa | ||
![]() |
0bbeede975 | ||
![]() |
30865c9d1c | ||
![]() |
1cfe874893 | ||
![]() |
5da46ca29c | ||
![]() |
cc21cbd1bd | ||
![]() |
79ba4a9d23 | ||
![]() |
44ef0cbe59 | ||
![]() |
88f2b39576 | ||
![]() |
bf10cdef63 | ||
![]() |
3696db89ab | ||
![]() |
eeaa5d21d8 | ||
![]() |
f4bec3bb4d | ||
![]() |
dc6f3cf0b2 | ||
![]() |
b5a1310add | ||
![]() |
b22343f65c | ||
![]() |
b9b317e213 | ||
![]() |
6cd18bbf04 | ||
![]() |
8c6400e4d6 | ||
![]() |
b6e1559a80 | ||
![]() |
605ae75dda | ||
![]() |
39f318fbd5 | ||
![]() |
21def81439 | ||
![]() |
7e9ab8321a | ||
![]() |
4094d3a9bf | ||
![]() |
0877fec638 | ||
![]() |
f40c3e6b81 | ||
![]() |
bb705ae863 | ||
![]() |
c4a3a53be0 | ||
![]() |
713ff17e91 | ||
![]() |
0c659a477d | ||
![]() |
26678a65f8 | ||
![]() |
c5116fb318 | ||
![]() |
fa970fa102 | ||
![]() |
fbafd23177 | ||
![]() |
be742c78e7 | ||
![]() |
3e97c154a0 | ||
![]() |
1eb97ea381 | ||
![]() |
f6e06456b0 | ||
![]() |
5e90cd9714 | ||
![]() |
06406fa46c | ||
![]() |
08b7034d6f | ||
![]() |
5b91aef4ec | ||
![]() |
d8a121cd06 | ||
![]() |
902a1b1c88 | ||
![]() |
c9a2b86c16 | ||
![]() |
27fcb70774 | ||
![]() |
e488841031 | ||
![]() |
6cffa1c0ca | ||
![]() |
d3e8fcea47 | ||
![]() |
9526424a47 | ||
![]() |
149278abd0 | ||
![]() |
314c8b22db | ||
![]() |
c502688ce3 | ||
![]() |
866346b059 | ||
![]() |
de8286829a | ||
![]() |
18c407bfc2 | ||
![]() |
076684176b | ||
![]() |
a2cb1615b3 | ||
![]() |
6b5fa140b9 | ||
![]() |
356d0521e6 | ||
![]() |
5a08ab93d3 | ||
![]() |
8185f4dfda | ||
![]() |
f4f0e2afa8 | ||
![]() |
67d6b1e724 | ||
![]() |
e34a9d452a | ||
![]() |
2819a36469 | ||
![]() |
8815b4e0c3 | ||
![]() |
960635b993 | ||
![]() |
f9dcb29e92 | ||
![]() |
5a746769d0 | ||
![]() |
87b4500467 | ||
![]() |
951e47469a | ||
![]() |
c1db963c76 | ||
![]() |
a252ec36f0 | ||
![]() |
33b478199a | ||
![]() |
21f285e099 | ||
![]() |
fec312ed09 | ||
![]() |
eb2627721e | ||
![]() |
ff32f822ba | ||
![]() |
80bfccd9d3 | ||
![]() |
eb5a24064a | ||
![]() |
b971ccc673 | ||
![]() |
945687bdad | ||
![]() |
6f56d0a944 | ||
![]() |
66c48ba4ec | ||
![]() |
f83ade8d33 | ||
![]() |
eae0d1b2a6 | ||
![]() |
743f92d15e | ||
![]() |
822dec5c03 | ||
![]() |
40b84755de | ||
![]() |
1b307166ee | ||
![]() |
7a483e7912 | ||
![]() |
841230fe80 | ||
![]() |
900ae4e1ea | ||
![]() |
72b6d6c676 | ||
![]() |
f979ed133f | ||
![]() |
fb2cf5f04d | ||
![]() |
62401b24ec | ||
![]() |
43f833e604 | ||
![]() |
612a3f4401 | ||
![]() |
4c2cf99fd2 | ||
![]() |
aebd9b410d | ||
![]() |
8eee9caa01 | ||
![]() |
d278a30d19 | ||
![]() |
1117569148 | ||
![]() |
753396ac0c | ||
![]() |
9a267ffcaf | ||
![]() |
6254143fc6 | ||
![]() |
862e19a263 | ||
![]() |
eeef6fe65f | ||
![]() |
c05916477c | ||
![]() |
63d7e73cff | ||
![]() |
8cb629fb55 | ||
![]() |
d596f28f46 | ||
![]() |
8f26c0aeae | ||
![]() |
e4978d8a05 | ||
![]() |
4f43d977ed | ||
![]() |
d0c39e6bf4 | ||
![]() |
0965d98dbd | ||
![]() |
45fea827af | ||
![]() |
350ee13d66 | ||
![]() |
bb7af2e8ed | ||
![]() |
9cda561091 | ||
![]() |
d38846f126 | ||
![]() |
3b764439af | ||
![]() |
d1e404f093 | ||
![]() |
6c4440c2d1 | ||
![]() |
8e2a7c2b2d | ||
![]() |
fce30baa12 | ||
![]() |
c63215c992 | ||
![]() |
815fe02c83 | ||
![]() |
e2e1a5b38c | ||
![]() |
4222c31b3e | ||
![]() |
e5d3b0e7b8 | ||
![]() |
422064e092 | ||
![]() |
d548c6bdfa | ||
![]() |
efb7b147af | ||
![]() |
0f8b220f59 | ||
![]() |
1bba253156 | ||
![]() |
a2d29a4531 | ||
![]() |
589869c5ed | ||
![]() |
7b41922c2d | ||
![]() |
7262fbfb4e | ||
![]() |
9c0e8ee833 | ||
![]() |
9119d72b9b | ||
![]() |
891aac4713 | ||
![]() |
2fa006d74e | ||
![]() |
a6c97a304f | ||
![]() |
cfa4f8fa63 | ||
![]() |
ce0cf09b10 | ||
![]() |
1567fb745d | ||
![]() |
416f563261 | ||
![]() |
25d6eea906 | ||
![]() |
4e819ac035 | ||
![]() |
dbcaa6bf85 | ||
![]() |
3205187090 | ||
![]() |
ed85981409 | ||
![]() |
5d920ff7df | ||
![]() |
5456f0e106 | ||
![]() |
8962c16670 | ||
![]() |
2ed1572c3c | ||
![]() |
89cb673502 | ||
![]() |
98c9c8dd43 | ||
![]() |
fd46f141ea | ||
![]() |
41aec089bc | ||
![]() |
31697022fd | ||
![]() |
948a1c3d03 | ||
![]() |
20371895c9 | ||
![]() |
617027eb52 | ||
![]() |
6b9106b178 | ||
![]() |
6207ccd559 | ||
![]() |
043ac5a510 | ||
![]() |
4eaeade618 | ||
![]() |
f318f76994 | ||
![]() |
fd085d2d37 | ||
![]() |
327f580218 | ||
![]() |
7bef78e10f | ||
![]() |
b54916b1dc | ||
![]() |
c2ab102c0e | ||
![]() |
3f3e73455b | ||
![]() |
4f09a5111b | ||
![]() |
baf820bcdb | ||
![]() |
538f69235f | ||
![]() |
2b9f1257be | ||
![]() |
6e58e8d50f | ||
![]() |
90420f1a89 | ||
![]() |
021b822c4f | ||
![]() |
4ac54845fc | ||
![]() |
4c9d3ee3a6 | ||
![]() |
e688671972 | ||
![]() |
6bd63ff42a | ||
![]() |
efc5211451 | ||
![]() |
f2bf5b12bd | ||
![]() |
dccda1af92 | ||
![]() |
4ca0e6b781 | ||
![]() |
4016e7c217 | ||
![]() |
907a26a8b9 | ||
![]() |
dc23bfeb87 | ||
![]() |
4bfd4bd714 | ||
![]() |
9ddb984004 | ||
![]() |
e618c0e9ef | ||
![]() |
b55d2b53df | ||
![]() |
5733ec8363 | ||
![]() |
d7d75a1fe8 | ||
![]() |
ef6ccb330b | ||
![]() |
afbacdfc96 | ||
![]() |
5842441062 | ||
![]() |
f083301b4c | ||
![]() |
8edbecce3c | ||
![]() |
b114c52c0d | ||
![]() |
4671f47222 | ||
![]() |
4be0e1ee7f | ||
![]() |
dd4c213805 | ||
![]() |
7e57a8f71e | ||
![]() |
86d3d35619 | ||
![]() |
f195111354 | ||
![]() |
ec6fd48b86 | ||
![]() |
1516d55a88 | ||
![]() |
bdb0dc32a7 | ||
![]() |
5ff2f792e6 | ||
![]() |
8246505289 | ||
![]() |
526a5d3fb1 | ||
![]() |
f970815645 | ||
![]() |
f83c77e7ea | ||
![]() |
654cce92cd | ||
![]() |
8f0bfbc553 | ||
![]() |
bc39e52f56 | ||
![]() |
a822238735 | ||
![]() |
b168772d7f | ||
![]() |
034e104c06 | ||
![]() |
0c0f117bc3 | ||
![]() |
1d4ab8833b | ||
![]() |
90fee9788d | ||
![]() |
bf7597e1b2 | ||
![]() |
54ff940c2b | ||
![]() |
446641c31c | ||
![]() |
572d55752c | ||
![]() |
04394d8ced | ||
![]() |
c7ea66bfef | ||
![]() |
bfc5e2dce6 | ||
![]() |
a7e8293d1a | ||
![]() |
698f0cc230 | ||
![]() |
f414f0746c | ||
![]() |
235b8f359c | ||
![]() |
2326cf3de8 | ||
![]() |
b8879853d5 | ||
![]() |
498fd4bf01 | ||
![]() |
996f326c74 | ||
![]() |
43b68faf73 | ||
![]() |
8429ad7db7 | ||
![]() |
879b522914 | ||
![]() |
be584aa3d1 | ||
![]() |
b82f57e7a2 | ||
![]() |
998ec3eb4f | ||
![]() |
f6e3a74567 | ||
![]() |
6d8f55cf75 | ||
![]() |
c4d8466195 | ||
![]() |
36ff506dfe | ||
![]() |
af1fb7aaa6 | ||
![]() |
1a944735df | ||
![]() |
1c8fba4286 | ||
![]() |
0260b30335 | ||
![]() |
0f72085c2a | ||
![]() |
6ad8a10f37 | ||
![]() |
8a38983dfc | ||
![]() |
9f5600b7f7 | ||
![]() |
a7632889a2 | ||
![]() |
58a4bf479c | ||
![]() |
1ce4f99b80 | ||
![]() |
f94ecb3ec5 | ||
![]() |
ce7c22fd57 | ||
![]() |
a5c7e59601 | ||
![]() |
efbf70726f | ||
![]() |
490b1dc01b | ||
![]() |
ec81965393 | ||
![]() |
21536b8948 | ||
![]() |
100c2c8741 | ||
![]() |
d47e4a3e0e | ||
![]() |
0c9ea8f580 | ||
![]() |
2320eb8723 | ||
![]() |
17bbeefeca | ||
![]() |
31372e269d | ||
![]() |
ac799a60da | ||
![]() |
6a1d716ba1 | ||
![]() |
ed12cc1186 | ||
![]() |
8370ac8426 | ||
![]() |
a20ff59572 | ||
![]() |
8587f01caa | ||
![]() |
c7078be407 | ||
![]() |
718a00974b | ||
![]() |
d612676a80 | ||
![]() |
d7bcfb415b | ||
![]() |
6928a17e61 | ||
![]() |
d097092e8e | ||
![]() |
79eb6573be | ||
![]() |
ffed8c9181 | ||
![]() |
ff8573635d | ||
![]() |
643244ffd1 | ||
![]() |
9fd059aef8 | ||
![]() |
6764a7ad2f | ||
![]() |
5642d0aae6 | ||
![]() |
763e5f4ac0 | ||
![]() |
87abdb92e9 | ||
![]() |
447ab47d59 | ||
![]() |
9ed88f25f0 | ||
![]() |
73814550e5 | ||
![]() |
226f7dea65 | ||
![]() |
ed3de771e8 | ||
![]() |
edf3cf3b65 | ||
![]() |
7a54615156 | ||
![]() |
1e1f967292 | ||
![]() |
6f5d4fdc51 | ||
![]() |
e7a6be035e | ||
![]() |
ee762f349c | ||
![]() |
4d98b05e6c | ||
![]() |
4f4591658d | ||
![]() |
0c70e95232 | ||
![]() |
594e8b8c20 | ||
![]() |
4c358b9f08 | ||
![]() |
7b1594c69c | ||
![]() |
6d27feba97 | ||
![]() |
4363dcbbc1 | ||
![]() |
a208f47b6a | ||
![]() |
84268cd43c | ||
![]() |
380c32dee2 | ||
![]() |
bfc7e7c33f | ||
![]() |
8594e4ce4a | ||
![]() |
752dabe554 | ||
![]() |
042db64a00 | ||
![]() |
f55fd8d861 | ||
![]() |
cc41572d48 | ||
![]() |
42de962cbf | ||
![]() |
f527b8225f | ||
![]() |
dd9ccfa3d2 | ||
![]() |
24b6780c1f | ||
![]() |
941571f71f | ||
![]() |
65be9f438b | ||
![]() |
15d13ac9f6 | ||
![]() |
6df3acaf1e | ||
![]() |
b6ee24dcd5 | ||
![]() |
1a3e5b7893 | ||
![]() |
5df8e10b95 | ||
![]() |
bddf2d6537 | ||
![]() |
3d05bdcb63 | ||
![]() |
52e28eefce | ||
![]() |
b5b0626251 | ||
![]() |
4a4f311cf8 | ||
![]() |
bad4d14cf6 | ||
![]() |
b814a633c6 | ||
![]() |
73524d70dc | ||
![]() |
9fad83e46c | ||
![]() |
9fbf4771f2 | ||
![]() |
41cd7da5bd | ||
![]() |
3d975fd767 | ||
![]() |
f8f857c820 | ||
![]() |
9546ed0bb6 | ||
![]() |
a3f957427f | ||
![]() |
9b36c55422 | ||
![]() |
b6286372fb | ||
![]() |
74e71a1971 | ||
![]() |
9050f1a039 | ||
![]() |
de303bf453 | ||
![]() |
3d8d3a9237 | ||
![]() |
cf354d59fb | ||
![]() |
706852d9a7 | ||
![]() |
63e405c27e | ||
![]() |
33e8c6fb73 | ||
![]() |
faba28dd94 | ||
![]() |
16f36824e6 | ||
![]() |
5c1ec0b141 | ||
![]() |
e24d1016a5 | ||
![]() |
a82ee338ef | ||
![]() |
60405bf222 | ||
![]() |
1da6c8c84e | ||
![]() |
32329c1817 | ||
![]() |
4aee7a6c61 | ||
![]() |
e6c2937c1b | ||
![]() |
1bbf9b62ad | ||
![]() |
fa9e024b42 | ||
![]() |
8c52038671 | ||
![]() |
7aff97b6ff | ||
![]() |
77530f13ee | ||
![]() |
0a1c22530a | ||
![]() |
e6c0fe0370 | ||
![]() |
63ba0a5551 | ||
![]() |
c7bfe0d537 | ||
![]() |
05887c976a | ||
![]() |
ccc8fda24f | ||
![]() |
abb458bdd3 | ||
![]() |
41429dd254 | ||
![]() |
268a39e93b | ||
![]() |
8411c60d4a | ||
![]() |
de91f7ec15 | ||
![]() |
6094af819b | ||
![]() |
5f21563d7d | ||
![]() |
5b9afe70b2 | ||
![]() |
27f47460e3 | ||
![]() |
ad1bd045f7 | ||
![]() |
9a9a8bfdc7 | ||
![]() |
c88152cac2 | ||
![]() |
61c72ac3ea | ||
![]() |
84c39f3baa | ||
![]() |
7d6ef195fd | ||
![]() |
52ac972332 | ||
![]() |
9aa599f9d2 | ||
![]() |
a5ce7e5a1f | ||
![]() |
f8511bf199 | ||
![]() |
cfdc6bac7b | ||
![]() |
33e4c088b9 | ||
![]() |
a620bf2174 | ||
![]() |
e77d63294e | ||
![]() |
82c55ba038 | ||
![]() |
e1394d7a7d | ||
![]() |
9f99fa8edc | ||
![]() |
02e67a901f | ||
![]() |
f331f5e92c | ||
![]() |
6e5bf5fef6 | ||
![]() |
c7b4e28f82 | ||
![]() |
9235ecfe7b | ||
![]() |
7f234721ec | ||
![]() |
242b3edbc4 | ||
![]() |
b7894faa96 | ||
![]() |
f3a4b9c709 | ||
![]() |
ea638dd0e0 | ||
![]() |
f0b53c4cbb | ||
![]() |
3eae349a0a | ||
![]() |
ff5004cb8d | ||
![]() |
567855e2d2 | ||
![]() |
d20da5d803 | ||
![]() |
b98a9d2e80 | ||
![]() |
955164aa3c | ||
![]() |
db8f9988eb | ||
![]() |
9b2cae8e33 | ||
![]() |
ac325c5c5e | ||
![]() |
e92f3ea100 | ||
![]() |
75d8338f2d | ||
![]() |
a1b5dad1c8 | ||
![]() |
0e2f7fb122 | ||
![]() |
ee631730c7 | ||
![]() |
f7fd445c73 | ||
![]() |
5c0fc38272 | ||
![]() |
6df0321962 | ||
![]() |
65d19d350c | ||
![]() |
bc93796727 | ||
![]() |
6f0531c663 | ||
![]() |
545acebbaf | ||
![]() |
af49404320 | ||
![]() |
717802860d | ||
![]() |
edd3f35790 | ||
![]() |
20806a8c97 | ||
![]() |
f071cc5c04 | ||
![]() |
90c9078bf5 | ||
![]() |
203e70afbc | ||
![]() |
2599e54fd0 | ||
![]() |
2737425242 | ||
![]() |
40569eee2e | ||
![]() |
883c2851ff | ||
![]() |
4fe4d0a7e7 | ||
![]() |
a46d8fe7f3 | ||
![]() |
dd70d27a0e | ||
![]() |
b46ee0c495 | ||
![]() |
28ffb3fd18 | ||
![]() |
d1456850d1 | ||
![]() |
050e2febab | ||
![]() |
f56c8ef755 | ||
![]() |
1a9529157f | ||
![]() |
8c5c3cb7a6 | ||
![]() |
8ab9b06185 | ||
![]() |
91c5556f21 | ||
![]() |
95cf47d9a4 | ||
![]() |
dfad2370aa | ||
![]() |
331e25cc41 | ||
![]() |
6dcf7e8088 | ||
![]() |
99690a6145 | ||
![]() |
27bcc3c685 | ||
![]() |
bb60558968 | ||
![]() |
af38a3927a | ||
![]() |
bf6771152e | ||
![]() |
48b6c81b36 | ||
![]() |
d95da7372a | ||
![]() |
231037b0fe | ||
![]() |
c481d20ce8 | ||
![]() |
dc32556983 | ||
![]() |
d7b43f4722 | ||
![]() |
ca29fcec7d | ||
![]() |
bd637d661a | ||
![]() |
ef8174af70 | ||
![]() |
dee562437b | ||
![]() |
a1a4a73260 | ||
![]() |
627edeb0f2 | ||
![]() |
16f29b65f2 | ||
![]() |
6d2a863af5 | ||
![]() |
35e61a0c69 | ||
![]() |
c474699695 | ||
![]() |
1e2dadf75e | ||
![]() |
002303a765 | ||
![]() |
59426c31f7 | ||
![]() |
fd895d0ef5 | ||
![]() |
8220272953 | ||
![]() |
c8fc1ebefa | ||
![]() |
ac9531eb98 | ||
![]() |
56d68ce161 | ||
![]() |
a3ef3b34e1 | ||
![]() |
30e348f965 | ||
![]() |
b981ddadca | ||
![]() |
bcd721e2ef | ||
![]() |
a96ff8399f | ||
![]() |
2f4dec45a6 | ||
![]() |
b216e42397 | ||
![]() |
9b6c624554 | ||
![]() |
12154613c2 | ||
![]() |
a42dee5a44 | ||
![]() |
d4d86172f0 | ||
![]() |
a49f454826 | ||
![]() |
cd9a965057 | ||
![]() |
f5bc26e9fe | ||
![]() |
c55d69d587 | ||
![]() |
e508357cfb | ||
![]() |
1c578b2b3f | ||
![]() |
4ce2ef732d | ||
![]() |
65705aba10 | ||
![]() |
e97873fb7e | ||
![]() |
13d1df3cef | ||
![]() |
7ffb7db888 | ||
![]() |
74366379ba | ||
![]() |
6b89d05a3c | ||
![]() |
b0b9a4d33c | ||
![]() |
fc0fe0557b | ||
![]() |
116347ef66 | ||
![]() |
58516913b4 | ||
![]() |
fb836fcf6f | ||
![]() |
1ad44ace0a | ||
![]() |
32ddaed376 | ||
![]() |
4e388e5dc2 | ||
![]() |
44c467f472 | ||
![]() |
f90d0d2dae | ||
![]() |
b47920169f | ||
![]() |
81bf349871 | ||
![]() |
9d849a16ec | ||
![]() |
383749a9c0 | ||
![]() |
5e2feb5803 | ||
![]() |
93645681b0 | ||
![]() |
174e8d3c19 | ||
![]() |
511a5c0d1f | ||
![]() |
c92c840c41 | ||
![]() |
952a1a1da8 | ||
![]() |
e89cb4cc0e | ||
![]() |
89cc70091c | ||
![]() |
cfaa2c542d | ||
![]() |
c6bbe9e020 | ||
![]() |
15d2fd4581 | ||
![]() |
819c2b0bc3 | ||
![]() |
30984c62fa | ||
![]() |
bd649cfb33 | ||
![]() |
5c57cd6541 | ||
![]() |
02e2080ce2 | ||
![]() |
c00601743c | ||
![]() |
523f8dae2a | ||
![]() |
4e56fe9fc3 | ||
![]() |
2829146598 | ||
![]() |
b5ed158cc6 | ||
![]() |
7314f20563 | ||
![]() |
0559eb1a9a | ||
![]() |
40f732cd48 | ||
![]() |
eb837e3685 | ||
![]() |
8ddfbf37c3 | ||
![]() |
0729ea1c38 | ||
![]() |
39541fe348 | ||
![]() |
259c5199d6 | ||
![]() |
af0b563036 | ||
![]() |
c2b879ccab | ||
![]() |
8447c6d79f | ||
![]() |
46fb61c68c | ||
![]() |
860ef650f9 | ||
![]() |
ef6f9b9d70 | ||
![]() |
9f701c8f35 | ||
![]() |
e18d3d0c93 | ||
![]() |
dd46a64e79 | ||
![]() |
31a7effa5f | ||
![]() |
42f3f405ea | ||
![]() |
1b612f7112 | ||
![]() |
6e364ce4b9 | ||
![]() |
89eb44f3ef | ||
![]() |
53364d6ddc | ||
![]() |
4631f2223b | ||
![]() |
20fb9ab97d | ||
![]() |
dcbac42571 | ||
![]() |
0258d926a2 | ||
![]() |
e79e60f0bb | ||
![]() |
ef0c0b1e32 | ||
![]() |
14631c65a6 | ||
![]() |
f706f7508e | ||
![]() |
6662b4f047 | ||
![]() |
13c453d610 | ||
![]() |
e8026ef074 | ||
![]() |
cefc6f8eac | ||
![]() |
bf809ac118 | ||
![]() |
88ba0c1154 | ||
![]() |
43d2d5334a | ||
![]() |
de41efbaf8 | ||
![]() |
81419ca68b | ||
![]() |
276cbc0ace | ||
![]() |
6ada5110a4 | ||
![]() |
f6ee45249d | ||
![]() |
be36a20249 | ||
![]() |
5173597868 | ||
![]() |
86657529c9 | ||
![]() |
6460b7b0a1 | ||
![]() |
3b91be8449 | ||
![]() |
055aea58bb | ||
![]() |
1914c69c47 | ||
![]() |
fdb06ad973 | ||
![]() |
37979b4f9c | ||
![]() |
0a03c30e8a | ||
![]() |
e8dbf1511b | ||
![]() |
f163abb4a5 | ||
![]() |
fa99f3dbbe | ||
![]() |
76e21a6378 | ||
![]() |
0d3c403f08 | ||
![]() |
7c6ff85acd | ||
![]() |
4d60c61512 | ||
![]() |
31376ee2fd | ||
![]() |
b067bbd935 | ||
![]() |
f980170a82 | ||
![]() |
8b7ef14f65 | ||
![]() |
5eb8549aef | ||
![]() |
728b76aa7e | ||
![]() |
c20ed92fab | ||
![]() |
13f6a8da53 | ||
![]() |
28426f3ee3 | ||
![]() |
fb925745ba | ||
![]() |
271cb66c69 | ||
![]() |
b3f2812191 | ||
![]() |
1824363f86 | ||
![]() |
439d2bcf7f | ||
![]() |
447f5fd5c8 | ||
![]() |
ce78e330d3 | ||
![]() |
7a47d58ad4 | ||
![]() |
e9f467ed77 | ||
![]() |
aedfb1d8da | ||
![]() |
0ab424c0d2 | ||
![]() |
674b1efa0f | ||
![]() |
a7509988af | ||
![]() |
f58f738fe4 | ||
![]() |
ba9228b142 | ||
![]() |
0beef302c6 | ||
![]() |
da538f6424 | ||
![]() |
8384c2cba3 | ||
![]() |
fbf430fac9 | ||
![]() |
29516304b0 | ||
![]() |
c853de4315 | ||
![]() |
e2d17fe20f | ||
![]() |
9e5be7ffcf | ||
![]() |
44dc3035a5 | ||
![]() |
468666cdc8 | ||
![]() |
00972f5d73 | ||
![]() |
bffe6e7a32 | ||
![]() |
cc3bb912be | ||
![]() |
9a64b6080d | ||
![]() |
42156edc76 | ||
![]() |
7ae98477d5 | ||
![]() |
626b6f5ccd | ||
![]() |
9c82de7df2 | ||
![]() |
e7de562b93 | ||
![]() |
06d679d1ca | ||
![]() |
33cf30a07b | ||
![]() |
6ac1960c47 | ||
![]() |
1350096545 | ||
![]() |
18b9df62e4 | ||
![]() |
42495a9914 | ||
![]() |
b2f25d7f0a | ||
![]() |
30b71328b9 | ||
![]() |
7c97e37dc3 | ||
![]() |
f42d088fbc | ||
![]() |
a5e63de016 | ||
![]() |
f7d3718457 | ||
![]() |
3fa0f1b9e8 | ||
![]() |
f1a54bd7d0 | ||
![]() |
1803e47c02 | ||
![]() |
296e44deec | ||
![]() |
c546065611 | ||
![]() |
7e759e6ae8 | ||
![]() |
a9608c096e | ||
![]() |
4db723fa6f | ||
![]() |
64e8efbc07 | ||
![]() |
15f4bc5bd6 | ||
![]() |
ce6668ef8c | ||
![]() |
f60ab3e01f | ||
![]() |
78a7aa91c0 | ||
![]() |
a574f3d173 | ||
![]() |
c9223686d2 | ||
![]() |
c86afe29ba | ||
![]() |
0c56a7a23b | ||
![]() |
b66676de85 | ||
![]() |
c7a468eb14 | ||
![]() |
1c5b5e3621 | ||
![]() |
d78c757b52 | ||
![]() |
9871a4e36f | ||
![]() |
3e82595193 | ||
![]() |
044456cd98 | ||
![]() |
c0ba724578 | ||
![]() |
79daa12b06 | ||
![]() |
1f1e6006ec | ||
![]() |
8ee8acf655 | ||
![]() |
5e928769ec | ||
![]() |
a0b60a8347 | ||
![]() |
dbe2a7b240 | ||
![]() |
e8298726ca | ||
![]() |
4c675b8311 | ||
![]() |
1ab9bc40a5 | ||
![]() |
859c81c81b | ||
![]() |
83ed87cec0 | ||
![]() |
9b6feb4a01 | ||
![]() |
40841b9b07 | ||
![]() |
d1227b6447 | ||
![]() |
fc417e7273 | ||
![]() |
bda238344c | ||
![]() |
45add916f4 | ||
![]() |
ca549cf081 | ||
![]() |
fb3c5eaafa | ||
![]() |
8e4873cf2a | ||
![]() |
47316741c1 | ||
![]() |
86dd57d13e | ||
![]() |
4fe6ae9811 | ||
![]() |
edc1a71d0d | ||
![]() |
ed74403850 | ||
![]() |
9454c05b7e | ||
![]() |
cdd7f4b6bb | ||
![]() |
e4819c1753 | ||
![]() |
e8e0a19758 | ||
![]() |
a8a271e6fb | ||
![]() |
fc6aa7cc1c | ||
![]() |
b0140a655f | ||
![]() |
ef7ee02f65 | ||
![]() |
3d1d5c1151 | ||
![]() |
e0c56c6342 | ||
![]() |
2ceada84a6 | ||
![]() |
5b50a113d1 | ||
![]() |
fe52cc7797 | ||
![]() |
3923f2baf4 | ||
![]() |
0157ade1c9 | ||
![]() |
a863c743c4 | ||
![]() |
62d3b6a3f2 | ||
![]() |
e435274329 | ||
![]() |
55380298f1 | ||
![]() |
71e65119ae | ||
![]() |
dc788043ff | ||
![]() |
a98c465c9a | ||
![]() |
dace0671c2 | ||
![]() |
0882f9dd3c | ||
![]() |
c734e13753 | ||
![]() |
b54483db51 | ||
![]() |
5d0bf2e80b | ||
![]() |
46e694a5c8 | ||
![]() |
e0756a56a4 | ||
![]() |
55e2a2fd5d | ||
![]() |
eecc257822 | ||
![]() |
36f2998c91 | ||
![]() |
131926cad8 | ||
![]() |
d588327c1c | ||
![]() |
68c7f502b4 | ||
![]() |
06fa669f8c | ||
![]() |
d3885eefda | ||
![]() |
74e551cf1a | ||
![]() |
a1333d8d6b | ||
![]() |
3dfb40b57e | ||
![]() |
316cd7710f | ||
![]() |
cbefc5b81e | ||
![]() |
233e161327 | ||
![]() |
7cbbc33319 | ||
![]() |
61cc0a1864 | ||
![]() |
99f01475a0 | ||
![]() |
481e029600 | ||
![]() |
5ff22467a6 | ||
![]() |
3fd44a1813 | ||
![]() |
224a354480 | ||
![]() |
18cdf5d89d | ||
![]() |
293afa6059 | ||
![]() |
06fbcde815 | ||
![]() |
4195a72621 | ||
![]() |
d3bf001752 | ||
![]() |
d4604cba34 | ||
![]() |
221b4a9391 | ||
![]() |
5f5d4d7987 | ||
![]() |
8a17fe927f | ||
![]() |
090d1e900e | ||
![]() |
d0f9d4de02 | ||
![]() |
6bc1ac1156 | ||
![]() |
c77e403f27 | ||
![]() |
5178a288ca | ||
![]() |
7c6702057d | ||
![]() |
c1aad3882c | ||
![]() |
8cf30893f2 | ||
![]() |
da9af181dd | ||
![]() |
92e3c028bb | ||
![]() |
cf7d194dbf | ||
![]() |
eae6a34bfc | ||
![]() |
d9f851f0c1 | ||
![]() |
9590dce8a4 | ||
![]() |
5af77449fa | ||
![]() |
2c39ab01ea | ||
![]() |
035d70cf57 | ||
![]() |
8c76e258d7 | ||
![]() |
f05c7ffbfa | ||
![]() |
a49fcb0176 | ||
![]() |
87503b875a | ||
![]() |
d89d770e73 | ||
![]() |
083d64d151 | ||
![]() |
281f379ced | ||
![]() |
9dab48d135 | ||
![]() |
56221ebc95 | ||
![]() |
0b4b1cb3f3 | ||
![]() |
30e417a614 | ||
![]() |
538de08c07 | ||
![]() |
f540645d80 | ||
![]() |
a58743b820 | ||
![]() |
0ed1499d02 | ||
![]() |
5158a710dc | ||
![]() |
413b6cc43c | ||
![]() |
82a10f2558 | ||
![]() |
b97acb3174 | ||
![]() |
451976c3c8 | ||
![]() |
8e6494c451 | ||
![]() |
3579f9e0c4 | ||
![]() |
39d442a05d | ||
![]() |
0045206735 | ||
![]() |
7f46767411 | ||
![]() |
5414691f1f | ||
![]() |
13731fb094 | ||
![]() |
40ab9d9366 | ||
![]() |
93d6129732 | ||
![]() |
776e214789 | ||
![]() |
d8f6621c7d | ||
![]() |
aca2b4423e | ||
![]() |
204e3f8e3e | ||
![]() |
f54443d7f8 | ||
![]() |
3d82ad9222 | ||
![]() |
393c66385f | ||
![]() |
b008195abd | ||
![]() |
ad2c11413e | ||
![]() |
1e2853c950 | ||
![]() |
171e5d3766 | ||
![]() |
46535cc7a8 | ||
![]() |
16b40a731a | ||
![]() |
9b4b9334a3 | ||
![]() |
96d40d9816 | ||
![]() |
407b003cd6 | ||
![]() |
8129d5e6af | ||
![]() |
c12454a8e9 | ||
![]() |
0f49d412b5 | ||
![]() |
b33551e62e | ||
![]() |
8bb4d149b6 | ||
![]() |
854fda4530 | ||
![]() |
0deea7133d | ||
![]() |
1eaa8afc5f | ||
![]() |
1fc364fa07 | ||
![]() |
f275ba36a1 | ||
![]() |
1aa741a25b | ||
![]() |
6b2f619096 | ||
![]() |
f74ddc1010 | ||
![]() |
6a240919bf | ||
![]() |
2518867d50 | ||
![]() |
046b95d5b1 | ||
![]() |
12dab27729 | ||
![]() |
29964aa90c | ||
![]() |
f0f7b86c7d | ||
![]() |
2d437975a8 | ||
![]() |
a9502a11d6 | ||
![]() |
6aa502b1a5 | ||
![]() |
ea40d212c2 | ||
![]() |
befed2200e | ||
![]() |
7afe1807b7 | ||
![]() |
91478aa128 | ||
![]() |
424b36ffca | ||
![]() |
52f7dc71e6 | ||
![]() |
402e6e7433 | ||
![]() |
6592906d0d | ||
![]() |
678f4102a5 | ||
![]() |
c70e3bb3dd | ||
![]() |
b089355b5a | ||
![]() |
00cf1badfc | ||
![]() |
07159ea5df | ||
![]() |
52f3b8b507 | ||
![]() |
d1387e50e2 | ||
![]() |
65112b6008 | ||
![]() |
4e4dc30a91 | ||
![]() |
70292dac00 | ||
![]() |
e46cb2f240 | ||
![]() |
9fee86df59 | ||
![]() |
19b61a32af | ||
![]() |
60091a2b5e | ||
![]() |
68d246a3e9 | ||
![]() |
9749f59b5a | ||
![]() |
15c09a0f03 | ||
![]() |
971212e7e9 | ||
![]() |
807d461289 | ||
![]() |
e033b72942 | ||
![]() |
98b476de71 | ||
![]() |
8fd2af1870 | ||
![]() |
2a1849d027 | ||
![]() |
d04b759913 | ||
![]() |
2772e705ae | ||
![]() |
d44a4e8ae2 | ||
![]() |
53a40fa914 | ||
![]() |
86d1d4898a | ||
![]() |
e4af2d5e86 | ||
![]() |
d124e2ed15 | ||
![]() |
00c9c14efb | ||
![]() |
6fc66b7b13 | ||
![]() |
9658c2d553 | ||
![]() |
577761a6bb | ||
![]() |
6c610c509f | ||
![]() |
1eacc26ef9 | ||
![]() |
5f33516054 | ||
![]() |
0766205cb0 | ||
![]() |
0825626fc9 | ||
![]() |
470352ddc8 | ||
![]() |
5ccff7b406 | ||
![]() |
66c03c6798 | ||
![]() |
d7912c6973 | ||
![]() |
afa20c92e8 | ||
![]() |
95562a8965 | ||
![]() |
a1abbd406d | ||
![]() |
b05d125db2 | ||
![]() |
54efe5a351 | ||
![]() |
8594fa8199 | ||
![]() |
238667d989 | ||
![]() |
87536619e3 | ||
![]() |
84ab076fe8 | ||
![]() |
57f3cee390 | ||
![]() |
714aea2e27 | ||
![]() |
52081f4a2a | ||
![]() |
b88b422e2f | ||
![]() |
d3499e71ef | ||
![]() |
c7d94548be | ||
![]() |
c866932861 | ||
![]() |
317bd2fcec | ||
![]() |
b06ba06f40 | ||
![]() |
dc52a8a08b | ||
![]() |
5f1e7ffd0c | ||
![]() |
21a80650b0 | ||
![]() |
3b7ec3d913 | ||
![]() |
f1c89f0917 | ||
![]() |
8692ea1a74 | ||
![]() |
a5c25ce438 | ||
![]() |
12d494bf38 | ||
![]() |
f0f9b93694 | ||
![]() |
a97f5a8488 | ||
![]() |
a4224555d2 | ||
![]() |
1b0055ac61 | ||
![]() |
7dcde3e585 | ||
![]() |
4fe33268ac | ||
![]() |
b84abea670 | ||
![]() |
a5fcc7d65f | ||
![]() |
313d9a4ef9 | ||
![]() |
658aace8f1 | ||
![]() |
49562d43c3 | ||
![]() |
4241b62760 | ||
![]() |
47fcee4df8 | ||
![]() |
6c2eae29f5 | ||
![]() |
3869cb9bf3 | ||
![]() |
89c871f171 | ||
![]() |
12c95e6fca | ||
![]() |
2affb9b863 | ||
![]() |
710a85429e | ||
![]() |
2efa6f5cb1 | ||
![]() |
3f1a103e82 | ||
![]() |
1ce87ecfa8 | ||
![]() |
e102d256b2 | ||
![]() |
3b6b79229f | ||
![]() |
70c1e57ff0 | ||
![]() |
ad02f384d2 | ||
![]() |
0efa6a5816 | ||
![]() |
6edabeeecc | ||
![]() |
b96559b76f | ||
![]() |
3a9ded9315 | ||
![]() |
ee97eb6d27 | ||
![]() |
16f7dafcdf | ||
![]() |
1cce926ef8 | ||
![]() |
1cf50e5fcd | ||
![]() |
d8654edd2c | ||
![]() |
f3b97dedc8 | ||
![]() |
b45bbff97c | ||
![]() |
23284e1f35 | ||
![]() |
c9c6a4b867 | ||
![]() |
069c59d3d7 | ||
![]() |
4f2352355f | ||
![]() |
984f50b792 | ||
![]() |
426309a8f5 | ||
![]() |
b4b4694eb5 | ||
![]() |
5505a8609d | ||
![]() |
030ad0a8de | ||
![]() |
71d05a6fc2 | ||
![]() |
867cf478b1 | ||
![]() |
b43a05b662 | ||
![]() |
92db3878cf | ||
![]() |
b6d11d6be3 | ||
![]() |
669572b0e6 | ||
![]() |
5887f90202 | ||
![]() |
81d50ed3bb | ||
![]() |
9bdf3ad0c8 | ||
![]() |
0b38d73cc5 | ||
![]() |
7a3dbf626e | ||
![]() |
767ec1795a | ||
![]() |
ecbf230293 | ||
![]() |
839b63e389 | ||
![]() |
73078531aa | ||
![]() |
c5c3f7a667 | ||
![]() |
8738817eab | ||
![]() |
33877c73bb | ||
![]() |
0e3698fa2f | ||
![]() |
65f48800b9 | ||
![]() |
7738e6a381 | ||
![]() |
ca3b01f898 | ||
![]() |
e05c432d14 | ||
![]() |
53a8c2d574 | ||
![]() |
6783fb9010 | ||
![]() |
a7adb517ca | ||
![]() |
b7a66295f6 | ||
![]() |
27f2c44532 | ||
![]() |
efd1efad4d | ||
![]() |
b2ea4c758d | ||
![]() |
cfce057783 | ||
![]() |
5854beebc6 | ||
![]() |
879eb6295d | ||
![]() |
3ae761dd9d | ||
![]() |
f5ab2f63c0 | ||
![]() |
c89170a22d | ||
![]() |
84b36e72e4 | ||
![]() |
9298922601 | ||
![]() |
9913e7198c | ||
![]() |
9bd10d5102 | ||
![]() |
4c62840cef | ||
![]() |
e6f0a86c5a | ||
![]() |
c9da6d2ee2 | ||
![]() |
f33350a4ef | ||
![]() |
debe9959e4 | ||
![]() |
a357b021ba | ||
![]() |
f7099b2bc7 | ||
![]() |
8d3b1d10d2 | ||
![]() |
f143000f4a | ||
![]() |
7954caf957 | ||
![]() |
a07a31a2bb | ||
![]() |
405394707f | ||
![]() |
f13b8d1557 | ||
![]() |
6e1f4fada9 | ||
![]() |
892ef23ca0 | ||
![]() |
8e1c0a7fa4 | ||
![]() |
113e7e0d3d | ||
![]() |
ea619ce99a | ||
![]() |
e9c7d6a0cc | ||
![]() |
2f57e9cbee | ||
![]() |
0de16855b0 | ||
![]() |
4fedace179 | ||
![]() |
8d2da6f30b | ||
![]() |
8f427b727c | ||
![]() |
f1f0332d22 | ||
![]() |
7c0541034a | ||
![]() |
e8895a4d76 | ||
![]() |
a42bcfceb6 | ||
![]() |
fcda2e8706 | ||
![]() |
efe3680cd7 | ||
![]() |
272e30d761 | ||
![]() |
30940fad2e | ||
![]() |
d772280147 | ||
![]() |
7475fa69fa | ||
![]() |
ec8b53263c | ||
![]() |
0f567385a8 | ||
![]() |
dae1197ce4 | ||
![]() |
1296ab9b60 | ||
![]() |
b1575a979e | ||
![]() |
d677eb1698 | ||
![]() |
71ad1d9906 | ||
![]() |
731b79fc6c | ||
![]() |
e001448adb | ||
![]() |
3041064d21 | ||
![]() |
30134dbbc9 | ||
![]() |
4208885e36 | ||
![]() |
656f48b354 | ||
![]() |
68eb9a37ca | ||
![]() |
6c4ce03d59 | ||
![]() |
86e4679e04 | ||
![]() |
71806e6f95 | ||
![]() |
7e8a93e974 | ||
![]() |
b87ad9cd2f | ||
![]() |
10bab2b6e4 | ||
![]() |
5aab10fab2 | ||
![]() |
77aa23a375 | ||
![]() |
b09da2641e | ||
![]() |
26b0a7d75d | ||
![]() |
b6bf38b85c | ||
![]() |
b6424b4596 | ||
![]() |
dc2051fb59 | ||
![]() |
8bdc61ddcc | ||
![]() |
d4a25230ec | ||
![]() |
e548a4b6aa | ||
![]() |
8607fe43a6 | ||
![]() |
718178c1b6 | ||
![]() |
930205b1c9 | ||
![]() |
ec05e6864a | ||
![]() |
e08a702408 | ||
![]() |
d5b31a5545 | ||
![]() |
811c694f80 | ||
![]() |
8775222a61 | ||
![]() |
3aebdae305 | ||
![]() |
d3122020b9 | ||
![]() |
822ea1b450 | ||
![]() |
515057c7d0 | ||
![]() |
54b93febba | ||
![]() |
5d0600a0af | ||
![]() |
1ca2b40bf9 | ||
![]() |
ff85241962 | ||
![]() |
bc227b5f56 | ||
![]() |
283231ac6e | ||
![]() |
fc46bb65f8 | ||
![]() |
c5ca4dd7b3 | ||
![]() |
c493f732f9 | ||
![]() |
a59fb4fd1e | ||
![]() |
f671e699e6 | ||
![]() |
05fb60f809 | ||
![]() |
e564470490 | ||
![]() |
c7a8a3933e | ||
![]() |
e1fdb90da6 | ||
![]() |
df0562d6b4 | ||
![]() |
9afc39d5f1 | ||
![]() |
4a1c4df861 | ||
![]() |
b054ed6e03 | ||
![]() |
8a02a51d0c | ||
![]() |
da64785ea4 | ||
![]() |
8b91abd0dc | ||
![]() |
b6364941a0 | ||
![]() |
4fdd362ed5 | ||
![]() |
7bff5f562c | ||
![]() |
3de13a2e6d | ||
![]() |
329e885da7 | ||
![]() |
d125551c0d | ||
![]() |
250741e431 | ||
![]() |
22d6a32e4e | ||
![]() |
11c66f6ce4 | ||
![]() |
4ffd642732 | ||
![]() |
f7f6b15ef1 | ||
![]() |
efcb1715ee | ||
![]() |
8f5bcdff99 | ||
![]() |
a0b2a8ba74 | ||
![]() |
3013ee083d | ||
![]() |
0ae14f673c | ||
![]() |
b4eb09d5e9 | ||
![]() |
49d1bc08c3 | ||
![]() |
e9e5c41f79 | ||
![]() |
fac3721dfd | ||
![]() |
e687d5db69 | ||
![]() |
4fa48f7940 | ||
![]() |
f7fa770fc5 | ||
![]() |
a311366ee7 | ||
![]() |
9675ef1293 | ||
![]() |
f004000d5c | ||
![]() |
9fb7e1b46d | ||
![]() |
c2f5a12ce1 | ||
![]() |
671fb3476c | ||
![]() |
c28a168868 | ||
![]() |
3057302baa | ||
![]() |
94bc33f61c | ||
![]() |
e874730679 | ||
![]() |
1ae84c11ad | ||
![]() |
9cb2bc4825 | ||
![]() |
2bd69b2681 | ||
![]() |
f3b17d4896 | ||
![]() |
8f5a1ce1b6 | ||
![]() |
2aa0021a08 | ||
![]() |
81ec684025 | ||
![]() |
247b89aec5 | ||
![]() |
8b09a02028 | ||
![]() |
597c5063f3 | ||
![]() |
4843719c58 | ||
![]() |
d7c1c0d04c | ||
![]() |
4fa43fdf60 | ||
![]() |
cafb9ddcf4 | ||
![]() |
3f6b8def05 | ||
![]() |
40f0ea95ce | ||
![]() |
ca3b02229f | ||
![]() |
8039aa3eb1 | ||
![]() |
5a494ca50e | ||
![]() |
ef8e8c5a30 | ||
![]() |
a19629d105 | ||
![]() |
6e598c74f5 | ||
![]() |
35cf4f3009 | ||
![]() |
150d3df9b5 | ||
![]() |
ceff73ee5b | ||
![]() |
595b2e40d9 | ||
![]() |
9d09401dc1 | ||
![]() |
d9ad3ae2c9 | ||
![]() |
26cb600ae3 | ||
![]() |
435210b73c | ||
![]() |
8dc465f244 | ||
![]() |
8e708dc02a | ||
![]() |
b53b6723cc | ||
![]() |
af4b4ae85c | ||
![]() |
0e84bf95da | ||
![]() |
8e93470b94 | ||
![]() |
7cdb1caaf2 | ||
![]() |
bf346a6ed1 | ||
![]() |
c0101933bd | ||
![]() |
24a7c26819 | ||
![]() |
13019941b3 | ||
![]() |
a0451065a2 | ||
![]() |
54b87c19b1 | ||
![]() |
8828f21f2e | ||
![]() |
7fee135bf5 | ||
![]() |
b69e466de0 | ||
![]() |
37fcf2fed7 | ||
![]() |
8f69575b95 | ||
![]() |
2f71c2b784 | ||
![]() |
f3a809a495 | ||
![]() |
ad955b157b | ||
![]() |
a87b6c7022 | ||
![]() |
5104d0d250 | ||
![]() |
add83c4c7a | ||
![]() |
42b9ec819b | ||
![]() |
e93f9eb080 | ||
![]() |
0244db9182 | ||
![]() |
461f9c7ad6 | ||
![]() |
fd8e1ed623 | ||
![]() |
79b89f8a1b | ||
![]() |
6b33ad8a32 | ||
![]() |
40cf4f2c2a | ||
![]() |
d5ef1488b2 | ||
![]() |
3beab611a3 | ||
![]() |
89a67c0749 | ||
![]() |
fd842c43a6 | ||
![]() |
e51788ac05 | ||
![]() |
9b046ecc75 | ||
![]() |
aa68e69a18 | ||
![]() |
2e573e95e5 | ||
![]() |
e95bc31def | ||
![]() |
4eb843fd8c | ||
![]() |
05db08c8d2 | ||
![]() |
513de18af5 | ||
![]() |
151a6ce9e3 | ||
![]() |
d4a1764d20 | ||
![]() |
59c6ee7ade | ||
![]() |
4fbfa1d579 | ||
![]() |
5285699a21 | ||
![]() |
75879de47a | ||
![]() |
e23dc5bda3 | ||
![]() |
8998bf85bd | ||
![]() |
7b1601c840 | ||
![]() |
026b6a4b01 | ||
![]() |
d927e85eca | ||
![]() |
a877a9fd91 | ||
![]() |
abc481604f | ||
![]() |
c703458e00 | ||
![]() |
75861db84b | ||
![]() |
223957084a | ||
![]() |
8c0c0868e3 | ||
![]() |
a65d21d7e4 | ||
![]() |
82e6bc5d31 | ||
![]() |
09f1ad0625 | ||
![]() |
852aaae52e | ||
![]() |
270922b460 | ||
![]() |
b39da7e356 | ||
![]() |
a438803a5a | ||
![]() |
1b8b3cf1ee | ||
![]() |
c2176ba27d | ||
![]() |
a69f898e12 | ||
![]() |
0139d862b5 | ||
![]() |
0459c75dc0 | ||
![]() |
547242b1cb | ||
![]() |
b8d83c7ec4 | ||
![]() |
5c644d1b36 | ||
![]() |
7d3d0f7ceb | ||
![]() |
101d2ea9b6 | ||
![]() |
af7c7da735 | ||
![]() |
c1397bbaf7 | ||
![]() |
16a9f38844 | ||
![]() |
508d2472e9 | ||
![]() |
f162987a4f | ||
![]() |
17be72e55e | ||
![]() |
d61cd96d8f | ||
![]() |
6daf583b4b | ||
![]() |
b23a4c8288 | ||
![]() |
b1240426cd | ||
![]() |
30ab2cf9a7 | ||
![]() |
0c985a5fda | ||
![]() |
049d326836 | ||
![]() |
960bf3f3bf | ||
![]() |
edf959f26d | ||
![]() |
077d9d3918 | ||
![]() |
76c4a6bb7c | ||
![]() |
ee81aea2fa | ||
![]() |
0cf6fb2c9d | ||
![]() |
9d3913ed70 | ||
![]() |
d275dfea2f | ||
![]() |
11d8485399 | ||
![]() |
6b926b12a4 | ||
![]() |
9adec79401 | ||
![]() |
c3b146611b | ||
![]() |
a454f3de6e | ||
![]() |
b29d17d39a | ||
![]() |
ff3fdd4c72 | ||
![]() |
0533f29e7a | ||
![]() |
75adfebf9f | ||
![]() |
e96435378a | ||
![]() |
ccdfe77356 | ||
![]() |
adf39b6f6a | ||
![]() |
ad1d81dccf | ||
![]() |
8828629a30 | ||
![]() |
1ead9b59ba | ||
![]() |
c401cabee7 | ||
![]() |
1549f2aa4d | ||
![]() |
89beb12d59 | ||
![]() |
4083e552fd | ||
![]() |
3d7dea52a0 | ||
![]() |
c5f77a1c38 | ||
![]() |
3561ddf7d3 | ||
![]() |
81e96d25ba | ||
![]() |
1ad966bd80 | ||
![]() |
4f472982da | ||
![]() |
f1396b65b9 | ||
![]() |
ba12489c34 | ||
![]() |
4586d3be78 | ||
![]() |
2204e0cf9c | ||
![]() |
8787774764 | ||
![]() |
feb00d5b8a | ||
![]() |
660fc982ca | ||
![]() |
1189c76691 | ||
![]() |
27a3414a27 | ||
![]() |
4d717a5c19 | ||
![]() |
e7744eace9 | ||
![]() |
3bdb6a237f | ||
![]() |
98dc46e01a | ||
![]() |
a88f56bec3 | ||
![]() |
fbc10b8e32 | ||
![]() |
42fd7b216d | ||
![]() |
511731f0b4 | ||
![]() |
ad23021b29 | ||
![]() |
05a37b3a9c | ||
![]() |
8ca368f9c7 | ||
![]() |
dd924f389f | ||
![]() |
bbb54d6a6a | ||
![]() |
96e9cc9c32 | ||
![]() |
b1f16857fc | ||
![]() |
738fd1a09b | ||
![]() |
645249afa9 | ||
![]() |
a51d794885 | ||
![]() |
9d1f491a89 | ||
![]() |
e8a0d56ff8 | ||
![]() |
7048987157 | ||
![]() |
e03731fd24 | ||
![]() |
7097859b79 | ||
![]() |
e87df8a57a | ||
![]() |
982fb27f58 | ||
![]() |
0d1a5846a1 | ||
![]() |
b9e79f8d98 | ||
![]() |
666ef49924 | ||
![]() |
d2ea8a001f | ||
![]() |
16efba2b91 | ||
![]() |
e93b5c3c1c | ||
![]() |
9dabd1f344 | ||
![]() |
e96dd14e0d | ||
![]() |
3e970df09b | ||
![]() |
b0a8c4d278 | ||
![]() |
b55832df34 | ||
![]() |
4c7013644f | ||
![]() |
4859ffdf54 | ||
![]() |
bca5d8a6c5 | ||
![]() |
af57083afd | ||
![]() |
f10233a0a6 | ||
![]() |
4b9455e84d | ||
![]() |
9305612419 | ||
![]() |
357fd895bf | ||
![]() |
d7ae583473 | ||
![]() |
f8f4718474 | ||
![]() |
304d2f0874 | ||
![]() |
bda31aea59 | ||
![]() |
dea669ac26 | ||
![]() |
40693471e9 | ||
![]() |
bee8aac0cc | ||
![]() |
2f2c49cc47 | ||
![]() |
b57a9f5c58 | ||
![]() |
6afe2dd720 | ||
![]() |
aef3ecc3f2 | ||
![]() |
01ce86ce18 | ||
![]() |
99362c78bf | ||
![]() |
c0bf9a9a76 | ||
![]() |
1833b3e89f | ||
![]() |
6b71f91775 | ||
![]() |
5275988f37 | ||
![]() |
93162beaa0 | ||
![]() |
61699c7477 | ||
![]() |
a1a2434d98 | ||
![]() |
fd56ddaa54 | ||
![]() |
c438b9f8a5 | ||
![]() |
91e83b3397 | ||
![]() |
50bfc8db09 | ||
![]() |
3d0a20452f | ||
![]() |
b60e6a7d85 | ||
![]() |
c5b8495477 | ||
![]() |
809632794f | ||
![]() |
62ecc1cbc1 | ||
![]() |
cd822156c9 | ||
![]() |
e85c5e3b36 | ||
![]() |
194d57c85f | ||
![]() |
02ab6d8f59 | ||
![]() |
a548426c39 | ||
![]() |
5f851c7880 | ||
![]() |
f91e264889 | ||
![]() |
514f8396b7 | ||
![]() |
d5eac963a9 | ||
![]() |
c39ff89fab | ||
![]() |
3f19f64efd | ||
![]() |
9135a1a1cc | ||
![]() |
39366bfbf1 | ||
![]() |
5eeec0a828 | ||
![]() |
4f28a0f84b | ||
![]() |
56f057143e | ||
![]() |
39568737dd | ||
![]() |
13e0d9b306 | ||
![]() |
d6b7159db1 | ||
![]() |
2575476086 | ||
![]() |
211d68f590 | ||
![]() |
3e51b888cb | ||
![]() |
ff8f41c1eb | ||
![]() |
c6fd61ddf3 | ||
![]() |
9ca4b823bb | ||
![]() |
f513737628 | ||
![]() |
90051972d4 | ||
![]() |
6fa37c618b | ||
![]() |
47db295088 | ||
![]() |
6405ca5cef | ||
![]() |
2ba74d9232 | ||
![]() |
ed09595a87 | ||
![]() |
7c35bcdead | ||
![]() |
189627c89e | ||
![]() |
07bf8ef3b1 | ||
![]() |
32d902d37f | ||
![]() |
3028a8ebc9 | ||
![]() |
5c503203dc | ||
![]() |
644f65b5e4 | ||
![]() |
f593199af7 | ||
![]() |
26a5f12a8e | ||
![]() |
f3257a6743 | ||
![]() |
9f35efc31d | ||
![]() |
98c6add637 | ||
![]() |
3904dcda39 | ||
![]() |
6376f7dd56 | ||
![]() |
ae0a78dfb1 | ||
![]() |
1ad021ffa9 | ||
![]() |
ee6ab8e82a | ||
![]() |
b56c985a63 | ||
![]() |
abbf6ae2c8 | ||
![]() |
403ec56ef8 | ||
![]() |
a6e67fa9c3 | ||
![]() |
b6a401d697 | ||
![]() |
23be1d18a4 | ||
![]() |
d721d459eb | ||
![]() |
e1dd0f0979 | ||
![]() |
866947c445 | ||
![]() |
8ff3bbc9a7 | ||
![]() |
498191261c | ||
![]() |
f9091ea951 | ||
![]() |
8b9d75d726 | ||
![]() |
7e4a4459b4 | ||
![]() |
70fd33e587 | ||
![]() |
a265c2a7ca | ||
![]() |
fee5766abf | ||
![]() |
03573e46f5 | ||
![]() |
64b4789bc1 | ||
![]() |
13e651c822 | ||
![]() |
7d3fd6f5b6 | ||
![]() |
0c91fd396e | ||
![]() |
1827e1e033 | ||
![]() |
771dce4c8c | ||
![]() |
ab2fe909f0 | ||
![]() |
6fb9c6ef05 | ||
![]() |
9b41950e4c | ||
![]() |
bc09b6280b | ||
![]() |
3e7a4f593a | ||
![]() |
679a88c095 | ||
![]() |
97d1d0d63b | ||
![]() |
ee30f78ab0 | ||
![]() |
566b689ca6 | ||
![]() |
c2edbb30b5 | ||
![]() |
a6f86329c5 | ||
![]() |
05c1df81ad | ||
![]() |
327badb3ca | ||
![]() |
335a7a76b1 | ||
![]() |
1566da1f8f | ||
![]() |
1250b45694 | ||
![]() |
48e8b956d2 | ||
![]() |
56e80f7c0c | ||
![]() |
bd6cb3e5be | ||
![]() |
f2db4b2d1b | ||
![]() |
4a823c57d5 | ||
![]() |
2f74f64d65 | ||
![]() |
d44677e46f | ||
![]() |
4afb6b3d9c | ||
![]() |
18d3e8905e | ||
![]() |
fd14a4ac80 | ||
![]() |
cc48a0ad86 | ||
![]() |
aad1a836db | ||
![]() |
3d3bcaba7a | ||
![]() |
201169c4d7 | ||
![]() |
1ff6c701e8 | ||
![]() |
3fb9f900f6 | ||
![]() |
9c4093dc7c | ||
![]() |
acfe4deacf | ||
![]() |
342f9f6096 | ||
![]() |
f5b4936197 | ||
![]() |
0c6c7d2725 | ||
![]() |
136772073d | ||
![]() |
f5846853b3 | ||
![]() |
6db011d807 | ||
![]() |
fe59352800 | ||
![]() |
dedf6cd92f | ||
![]() |
99fdbcd1fe | ||
![]() |
005358f4bf | ||
![]() |
ea1cd63929 | ||
![]() |
c8ac4c0278 | ||
![]() |
49a332724e | ||
![]() |
a21c417c0b | ||
![]() |
155f05cb63 | ||
![]() |
6d644922d5 | ||
![]() |
b0a125c113 | ||
![]() |
d17cb3fafc | ||
![]() |
814fa881d0 | ||
![]() |
fd451ae766 | ||
![]() |
089bb92f70 | ||
![]() |
5b6e1f699d | ||
![]() |
03c469ff3a | ||
![]() |
c122dc108a | ||
![]() |
22c682f393 | ||
![]() |
d7b6c54b08 | ||
![]() |
a715650af6 | ||
![]() |
a72da8af56 | ||
![]() |
9b3a50c124 | ||
![]() |
5d7b8cfd48 | ||
![]() |
6318c0889a | ||
![]() |
0aae28e836 | ||
![]() |
b298571395 | ||
![]() |
4f6a496c36 | ||
![]() |
a3a7af73bd | ||
![]() |
520fd6e063 | ||
![]() |
bcc3b4d3ac | ||
![]() |
5ff8d6be38 | ||
![]() |
0f5658e65e | ||
![]() |
b89eb4f1e8 | ||
![]() |
9546817ad2 | ||
![]() |
6a31ce3e2e | ||
![]() |
905095317c | ||
![]() |
d8ff0e1745 | ||
![]() |
f9ac614147 | ||
![]() |
581a18f83d | ||
![]() |
f2c76c0a3b | ||
![]() |
7121a96ebd | ||
![]() |
8986d8037a | ||
![]() |
d97631fb8c | ||
![]() |
be1eb14fd0 | ||
![]() |
92e353be40 | ||
![]() |
c595ba3dc0 | ||
![]() |
ccd002b97f | ||
![]() |
c7d067ea3c | ||
![]() |
adbf787bd4 | ||
![]() |
7ee5cbe473 | ||
![]() |
c9c1d10ee4 | ||
![]() |
b5452bce6b | ||
![]() |
6628b07bba | ||
![]() |
8f34afc630 | ||
![]() |
1ac69ae315 | ||
![]() |
8a331e7400 | ||
![]() |
ca42740e1b | ||
![]() |
1d5c0597d9 | ||
![]() |
7576e60797 | ||
![]() |
acc734d22d | ||
![]() |
8ab7d26cd1 | ||
![]() |
e86375f721 | ||
![]() |
fde44d78d7 | ||
![]() |
a2567277c6 | ||
![]() |
feda6f88ff | ||
![]() |
74e5f5310b | ||
![]() |
51fdf95295 | ||
![]() |
0009b1b7d1 | ||
![]() |
74e50e209a | ||
![]() |
1208294747 | ||
![]() |
3de182a4ed | ||
![]() |
90ff9ac7fb | ||
![]() |
6b92d011d2 | ||
![]() |
8b9a8997ac | ||
![]() |
9df86ecaa9 | ||
![]() |
cfeb2797e3 | ||
![]() |
06a50b189d | ||
![]() |
7c1a4e353d | ||
![]() |
2b611ae798 | ||
![]() |
fdd82f7ac4 | ||
![]() |
ea9d345548 | ||
![]() |
24372470f6 | ||
![]() |
51f1781df4 | ||
![]() |
972bef4f7d | ||
![]() |
a2da8eca68 | ||
![]() |
620d8caea9 | ||
![]() |
4ddaa9b57f | ||
![]() |
22bb77e7a0 | ||
![]() |
3582b2c114 | ||
![]() |
9db715ee17 | ||
![]() |
1aaa1ba975 | ||
![]() |
35bd5e8595 | ||
![]() |
86e607c403 | ||
![]() |
c7247fc462 | ||
![]() |
25add764e4 | ||
![]() |
7d8490bdcb | ||
![]() |
f8fe426f70 | ||
![]() |
9b61ae44ab | ||
![]() |
991e59d0fc | ||
![]() |
7f3cf9583c | ||
![]() |
ff7ac90785 | ||
![]() |
b01e381087 | ||
![]() |
5026f1b0a8 | ||
![]() |
9d09890457 | ||
![]() |
4483180569 | ||
![]() |
4465146615 | ||
![]() |
3b7750c24e | ||
![]() |
7962baa2da | ||
![]() |
6f37654303 | ||
![]() |
2781e31be0 | ||
![]() |
b39d86d020 | ||
![]() |
754a3db1b9 | ||
![]() |
bc1d79a07d | ||
![]() |
8242e91238 | ||
![]() |
ff24899a62 | ||
![]() |
c3250378ee | ||
![]() |
9d4515954b | ||
![]() |
46455737cc | ||
![]() |
09413f5fc7 | ||
![]() |
2e74b73ba0 | ||
![]() |
818983831a | ||
![]() |
c64e97f0d6 | ||
![]() |
9034449e96 | ||
![]() |
00b6842f35 | ||
![]() |
99e5af67b7 | ||
![]() |
e8f049a765 | ||
![]() |
53afd97346 | ||
![]() |
604792a4dd | ||
![]() |
9b83c683b5 | ||
![]() |
9d511ba165 | ||
![]() |
c519239d87 | ||
![]() |
9f39bee486 | ||
![]() |
fa409e63f8 | ||
![]() |
e048fa6c6a | ||
![]() |
940d583f9a | ||
![]() |
ed965041e6 | ||
![]() |
1a5e661e14 | ||
![]() |
0f7ed0e04d | ||
![]() |
d50603f722 | ||
![]() |
e3931e8892 | ||
![]() |
4efcfa8bf4 | ||
![]() |
9551d11f5c | ||
![]() |
b40ef09108 | ||
![]() |
95d9d4ee9a | ||
![]() |
75cf8bbb0a | ||
![]() |
c40f0c6919 | ||
![]() |
231df44c94 | ||
![]() |
807dc866e5 | ||
![]() |
3d5c25efe4 | ||
![]() |
5a7a80f932 | ||
![]() |
3f52f78af2 | ||
![]() |
a66b71fb9c | ||
![]() |
65eff6b6d9 | ||
![]() |
027ee83c6f | ||
![]() |
60af54a17e | ||
![]() |
19c335e39a | ||
![]() |
d32303de57 | ||
![]() |
de360a8585 | ||
![]() |
24c1596048 | ||
![]() |
9484a27801 | ||
![]() |
c8eb93db07 | ||
![]() |
3c8867bf72 | ||
![]() |
ef2f1d56d4 | ||
![]() |
046877599e | ||
![]() |
b322b750c2 | ||
![]() |
4a75d9d4f5 | ||
![]() |
68ea4d02cb | ||
![]() |
8b3ff6454f | ||
![]() |
de28643e20 | ||
![]() |
3030e00b21 | ||
![]() |
7ba3d86af4 | ||
![]() |
2f3b3ebf2b | ||
![]() |
136a0fbe28 | ||
![]() |
7905e12fc3 | ||
![]() |
07caaa6b48 | ||
![]() |
59a04f101d | ||
![]() |
4265c4126a | ||
![]() |
0e85beec30 | ||
![]() |
f1276beb97 | ||
![]() |
365ba9b5f4 | ||
![]() |
f997a69ebc | ||
![]() |
e2b6dd37b7 | ||
![]() |
bc9bddf918 | ||
![]() |
b41570b663 | ||
![]() |
4c6e01d38d | ||
![]() |
8ef1ef0ce5 | ||
![]() |
e4fc4ca410 | ||
![]() |
0b9bc56959 | ||
![]() |
e9beeb85fc | ||
![]() |
a71e82c315 | ||
![]() |
d176150248 | ||
![]() |
ab9fa9ec0c | ||
![]() |
3d57fbb1b8 | ||
![]() |
f0f9240fc8 | ||
![]() |
b1bfe39cd5 | ||
![]() |
c82b19687a | ||
![]() |
be755fe25e | ||
![]() |
8e2be5cfe2 | ||
![]() |
cf1154f2c5 | ||
![]() |
f7ce401564 | ||
![]() |
5dbaf02e33 | ||
![]() |
2ef537ee6c | ||
![]() |
74048ce53f | ||
![]() |
96ceb253e8 | ||
![]() |
aa9e8b067f | ||
![]() |
e5963f8a76 | ||
![]() |
e5b851b171 | ||
![]() |
282f06b65e | ||
![]() |
e8a439cad3 | ||
![]() |
cd025316db | ||
![]() |
6e55c8b7c7 | ||
![]() |
38c663ee95 | ||
![]() |
ca473d6c3b | ||
![]() |
36b74e1c6a | ||
![]() |
8e3e96fe65 | ||
![]() |
0f0f3894ff | ||
![]() |
a2182e8a8d | ||
![]() |
4042de460e | ||
![]() |
0dd34403a2 | ||
![]() |
edb9d3f719 | ||
![]() |
1f5db0aa24 | ||
![]() |
74b2408668 | ||
![]() |
f53612bef7 | ||
![]() |
4d991cda6d | ||
![]() |
6984575901 | ||
![]() |
75b44f6980 | ||
![]() |
3c46f60042 | ||
![]() |
7dbc323f76 | ||
![]() |
903b62ba7c | ||
![]() |
53da40fe5d | ||
![]() |
fdb7b4cc0f | ||
![]() |
d0f85f3d04 | ||
![]() |
49d863fa54 | ||
![]() |
6fe5e2b751 | ||
![]() |
59d45f445a | ||
![]() |
09d19da3bc | ||
![]() |
1036d0e4a0 | ||
![]() |
8ba4201237 | ||
![]() |
7c69118c2c | ||
![]() |
6238bb7f2d | ||
![]() |
42ea01dde9 | ||
![]() |
c0b8e802ec | ||
![]() |
43bd8a2d6c | ||
![]() |
e85efe2c64 | ||
![]() |
988d169687 | ||
![]() |
43e6f374fa | ||
![]() |
c91011e6ea | ||
![]() |
f45c0b2377 | ||
![]() |
9b8eeb6a73 | ||
![]() |
468fe8f266 | ||
![]() |
5ccdfbd40a | ||
![]() |
ad969dcca7 | ||
![]() |
08b31416f3 | ||
![]() |
c494b01435 | ||
![]() |
c0e69e91a1 | ||
![]() |
1c029981ae | ||
![]() |
f9e64bc874 | ||
![]() |
8d26a572dd | ||
![]() |
936371a7ec | ||
![]() |
628cc992e9 | ||
![]() |
636813c603 | ||
![]() |
2eb6cc8f55 | ||
![]() |
8ae9b5261e | ||
![]() |
d6d9e911fe | ||
![]() |
797abdea80 | ||
![]() |
7153982981 | ||
![]() |
fcfd428b54 | ||
![]() |
20e3dc5fa7 | ||
![]() |
32ca6ac1a7 | ||
![]() |
02ad7542b3 | ||
![]() |
57c042ef79 | ||
![]() |
614a18913e | ||
![]() |
bfb7392198 | ||
![]() |
7eca3f4bc5 | ||
![]() |
52a468d9fd | ||
![]() |
2cf86642a2 | ||
![]() |
e76164c7a9 | ||
![]() |
bf0d359116 | ||
![]() |
5f37bf3eef | ||
![]() |
d8453384ab | ||
![]() |
f4cdc31788 | ||
![]() |
20a063105c | ||
![]() |
f8f26f0a89 | ||
![]() |
522e9ebf39 | ||
![]() |
27a9ea78b8 | ||
![]() |
7fa019321c | ||
![]() |
f98f52a50e | ||
![]() |
bc71555a73 | ||
![]() |
d3d7489232 | ||
![]() |
94cd5d32e5 | ||
![]() |
db339eecd3 | ||
![]() |
99dd5d5f63 | ||
![]() |
53cd2cdbf3 | ||
![]() |
4bbea46b90 | ||
![]() |
38230c62e6 | ||
![]() |
932db9bbe4 | ||
![]() |
bfd3d18a10 | ||
![]() |
b584c586ec | ||
![]() |
11463dbba5 | ||
![]() |
8fdfba97d9 | ||
![]() |
158f9e37e5 | ||
![]() |
28fbf5a802 | ||
![]() |
905976a1b6 | ||
![]() |
5ad614743b | ||
![]() |
ebdb1ed322 | ||
![]() |
b1989ef02e | ||
![]() |
ded6025eee | ||
![]() |
d591df39cc | ||
![]() |
d44f08382a | ||
![]() |
5e926ec98e | ||
![]() |
a27422a0fb | ||
![]() |
2af08c0ba1 | ||
![]() |
b6c0c955c5 | ||
![]() |
32a0f96ecd | ||
![]() |
37ef2802de | ||
![]() |
9f762a0230 | ||
![]() |
f1c320dd22 | ||
![]() |
4c7500e248 | ||
![]() |
8487056edb | ||
![]() |
bf69d24566 | ||
![]() |
1fc2a917d4 | ||
![]() |
c0a96983a7 | ||
![]() |
a4132c9cb1 | ||
![]() |
5c5e4b914c | ||
![]() |
1db2327b3d | ||
![]() |
0a380dc0ab | ||
![]() |
29aa1b7b93 | ||
![]() |
5a08545e2d | ||
![]() |
65d68fb8ac | ||
![]() |
d093764731 | ||
![]() |
24982aee42 | ||
![]() |
ef4a4acbc0 | ||
![]() |
b503d59c49 | ||
![]() |
f451ce6c91 | ||
![]() |
4f729aa98f | ||
![]() |
bdb5e1597c | ||
![]() |
6a1c5d96ac | ||
![]() |
c1aff56cf2 | ||
![]() |
bfd57561b7 | ||
![]() |
4cd393e4d8 | ||
![]() |
ac230219ee | ||
![]() |
c96dce3d0b | ||
![]() |
9e13994c54 | ||
![]() |
b8d68e2589 | ||
![]() |
5f753d41d4 | ||
![]() |
d2d335314d | ||
![]() |
545f4964a1 | ||
![]() |
7e88c4431a | ||
![]() |
e1d3a31c6c | ||
![]() |
231195ba0a | ||
![]() |
6cb700b302 | ||
![]() |
504b147cee | ||
![]() |
d9b3078f7b | ||
![]() |
b35bff4164 | ||
![]() |
5b6d4f972e | ||
![]() |
eb97ea4f34 | ||
![]() |
a69531c4c9 | ||
![]() |
527c2b4f54 | ||
![]() |
467cd480cf | ||
![]() |
6610914fdb | ||
![]() |
fa96ab453f | ||
![]() |
5bdc3b4fb5 | ||
![]() |
eee073c103 | ||
![]() |
35efd6b107 | ||
![]() |
f45ecb6cf4 | ||
![]() |
db8fe8d890 | ||
![]() |
bd9f88416c | ||
![]() |
cbac57dc88 | ||
![]() |
504d7812e5 | ||
![]() |
b88ee9a87a | ||
![]() |
2aba9cc3c2 | ||
![]() |
961fcbf660 | ||
![]() |
56c9ac8e46 | ||
![]() |
1f7b25b60c | ||
![]() |
34ec05d4b4 | ||
![]() |
bf672bdec3 | ||
![]() |
bb27e3124f | ||
![]() |
551aa3ef67 | ||
![]() |
de550d7cae | ||
![]() |
f50ec0eeee | ||
![]() |
6e51b5ba32 | ||
![]() |
8281176873 | ||
![]() |
eaa344abca | ||
![]() |
1f22009360 | ||
![]() |
7d7272a266 | ||
![]() |
83e372ce47 | ||
![]() |
8db16a1421 | ||
![]() |
21d0b829d3 | ||
![]() |
08219a0285 | ||
![]() |
d426b6bb02 | ||
![]() |
688c240b35 | ||
![]() |
b82cf8d08d | ||
![]() |
c44cb11800 | ||
![]() |
b9e0275417 | ||
![]() |
dc82f7ab3e | ||
![]() |
a6ce2a0253 | ||
![]() |
a0d85d7d83 | ||
![]() |
6a4882d81d | ||
![]() |
ac1a03d17b | ||
![]() |
0e930efd95 | ||
![]() |
d281f112d9 | ||
![]() |
5a42cb92cd | ||
![]() |
3459d35ed2 | ||
![]() |
699a8be721 | ||
![]() |
1f4a5643db | ||
![]() |
492408def7 | ||
![]() |
74763465a8 | ||
![]() |
c069d4f67c | ||
![]() |
bf41c7b651 | ||
![]() |
3f5a5b4f9b | ||
![]() |
b60a1ec455 | ||
![]() |
13157eea1e | ||
![]() |
dd11f87673 | ||
![]() |
87910a236b | ||
![]() |
096675c87e | ||
![]() |
7d3beeb9e0 | ||
![]() |
96a097e33d | ||
![]() |
1bb3140f5b | ||
![]() |
6bf3d34c6c | ||
![]() |
3cd7e4e48e | ||
![]() |
2777c4c537 | ||
![]() |
e525ea2431 | ||
![]() |
c8f37674c6 | ||
![]() |
12ca870e38 | ||
![]() |
f630e6c25e | ||
![]() |
7c6bea6ddd | ||
![]() |
efbef25c76 | ||
![]() |
1384da4691 | ||
![]() |
26a250d1df | ||
![]() |
426fe561c9 | ||
![]() |
5950b04c40 | ||
![]() |
434cfce32a | ||
![]() |
febdea6c64 | ||
![]() |
6c0062dbc1 | ||
![]() |
49735b7e55 | ||
![]() |
1d6168f457 | ||
![]() |
2a161f4421 | ||
![]() |
f30b2b6fc2 | ||
![]() |
ebbe19131b | ||
![]() |
03cabcb07c | ||
![]() |
401d3ec1c9 | ||
![]() |
fd1f104f4e | ||
![]() |
66e2192e65 | ||
![]() |
0d6d19502f | ||
![]() |
a7b22e2055 | ||
![]() |
71fbcee76c | ||
![]() |
e157894694 | ||
![]() |
f08f354eb6 | ||
![]() |
23db93d20f | ||
![]() |
67212ee29e | ||
![]() |
15d8589069 | ||
![]() |
14cd9315c1 | ||
![]() |
f1786c2ee6 | ||
![]() |
7d9cb65ba1 | ||
![]() |
9f72fdeb41 | ||
![]() |
b7106995b7 | ||
![]() |
dca3c775d1 | ||
![]() |
916709a7e4 | ||
![]() |
066f54f521 | ||
![]() |
64199c7ded | ||
![]() |
79ca1069ec | ||
![]() |
43c8dacd70 | ||
![]() |
f773bf3336 | ||
![]() |
9a9bd71634 | ||
![]() |
25657ebdca | ||
![]() |
c7146613a1 | ||
![]() |
c58d28861d | ||
![]() |
ff69b04216 | ||
![]() |
7653cba247 | ||
![]() |
b0ee0dae93 | ||
![]() |
0224863b1f | ||
![]() |
94c7ce9f42 | ||
![]() |
2a3b8a7692 | ||
![]() |
fec3ff7d8e | ||
![]() |
4b346243eb | ||
![]() |
c2b177434d | ||
![]() |
c5d90ddd19 | ||
![]() |
d607696a75 | ||
![]() |
4088c88a56 | ||
![]() |
3489d5de09 | ||
![]() |
db8c85d249 | ||
![]() |
f4a040064c | ||
![]() |
a89f66e8b5 | ||
![]() |
e9ce137e14 | ||
![]() |
2f7bfa95b2 | ||
![]() |
c84fcf2034 | ||
![]() |
0d3fa43f00 | ||
![]() |
2e04ba1880 | ||
![]() |
85b2a81568 | ||
![]() |
0725464388 | ||
![]() |
f506ac9701 | ||
![]() |
97b3563e6b | ||
![]() |
9ac5746e3c | ||
![]() |
1f83b6691b | ||
![]() |
1f1cbf01d7 | ||
![]() |
eefe7e1b5a | ||
![]() |
bc02de4b18 | ||
![]() |
42874038e2 | ||
![]() |
d55fbd1728 | ||
![]() |
aa402ebaf3 | ||
![]() |
ae21f056d5 | ||
![]() |
a4a23f3ea0 | ||
![]() |
7a996b3c8d | ||
![]() |
2db48b8c22 | ||
![]() |
90e7a69926 | ||
![]() |
3c7de29ca7 | ||
![]() |
9dbd6d881f | ||
![]() |
67395b5e47 | ||
![]() |
47d585768b | ||
![]() |
0f65783266 | ||
![]() |
fa3a981c19 | ||
![]() |
868e956c3f | ||
![]() |
742c2fe684 | ||
![]() |
1d06d30812 | ||
![]() |
5801863b76 | ||
![]() |
7fc2681252 | ||
![]() |
eae2d59da7 | ||
![]() |
4c11c96164 | ||
![]() |
dd5fba69d6 | ||
![]() |
653219d1de | ||
![]() |
6167dda01d | ||
![]() |
0b7d1ad90d | ||
![]() |
5e6f6c5b85 | ||
![]() |
f0f32cf25c | ||
![]() |
daefbada16 | ||
![]() |
d6721f0656 | ||
![]() |
de363b57ce | ||
![]() |
e291555e60 | ||
![]() |
0c0ec30560 | ||
![]() |
5d6169c232 | ||
![]() |
830f98573e | ||
![]() |
b0d574dfb0 | ||
![]() |
b73742839b | ||
![]() |
89bb12de71 | ||
![]() |
ed11ae283f | ||
![]() |
fb43dfb4f2 | ||
![]() |
6dc084c3ab | ||
![]() |
ba9cd9b0ca | ||
![]() |
aacde33614 | ||
![]() |
3f59727ab6 | ||
![]() |
b028eee90e | ||
![]() |
a871b5dbe7 | ||
![]() |
7900578077 | ||
![]() |
35a18bd0b2 | ||
![]() |
e6680b4f60 | ||
![]() |
077b3df3cd | ||
![]() |
edd726ac9f | ||
![]() |
cbf184846a | ||
![]() |
2677ad92d3 | ||
![]() |
f9c0c21714 | ||
![]() |
442c01eabf | ||
![]() |
d467aabd4c | ||
![]() |
f4f4aa0e50 | ||
![]() |
ffb67ab23e | ||
![]() |
1a3b48a60d | ||
![]() |
84b535767b | ||
![]() |
5908d2ca53 | ||
![]() |
27a582634f | ||
![]() |
47245f485a | ||
![]() |
545459be88 | ||
![]() |
b5d3995874 | ||
![]() |
487f5cb8c2 | ||
![]() |
cc31081f1c | ||
![]() |
a9b9902e60 | ||
![]() |
f183e756f3 | ||
![]() |
9ec050341b | ||
![]() |
bf042ce7a3 | ||
![]() |
d2cf613872 | ||
![]() |
f67cb7b145 | ||
![]() |
9f5ea86ebd | ||
![]() |
b8fb53ba20 | ||
![]() |
db744f576e | ||
![]() |
3f772d1e1e | ||
![]() |
438bdbc141 | ||
![]() |
728e497057 | ||
![]() |
d346a532ff | ||
![]() |
34a6337c01 | ||
![]() |
e42631932b | ||
![]() |
e9b2c76a88 | ||
![]() |
270eb677c7 | ||
![]() |
9a772eaecc | ||
![]() |
fae5d22e86 | ||
![]() |
c5629adf23 | ||
![]() |
9b480b41e1 | ||
![]() |
fbed15318b | ||
![]() |
14c906561d | ||
![]() |
e125895615 | ||
![]() |
6bef5ca7f4 | ||
![]() |
f33f1faeea | ||
![]() |
b261dc5dfd | ||
![]() |
164a9b947f | ||
![]() |
46440565dd | ||
![]() |
49adea1654 | ||
![]() |
169cf2e6ae | ||
![]() |
dafe9981a2 | ||
![]() |
c78af9773c | ||
![]() |
1a7a22eb91 | ||
![]() |
4c7ff9bb53 | ||
![]() |
1ee592f5b7 | ||
![]() |
10632c15ef | ||
![]() |
25bb14037c | ||
![]() |
d7db7a8060 | ||
![]() |
fe2344ea12 | ||
![]() |
7898b237fc | ||
![]() |
f3edd8013d | ||
![]() |
279e2ae865 | ||
![]() |
c056384bd5 | ||
![]() |
a06ce64bbf | ||
![]() |
7e314287d6 | ||
![]() |
7bda72027a | ||
![]() |
c5c0df838f | ||
![]() |
75d94aaf06 | ||
![]() |
16bfb35109 | ||
![]() |
6c8a7e3cfa | ||
![]() |
409841a3cf | ||
![]() |
ffa3055a33 | ||
![]() |
41aa15ec26 | ||
![]() |
880d536837 | ||
![]() |
1144f28982 | ||
![]() |
3384bd1e58 | ||
![]() |
0cd795b157 | ||
![]() |
03f3e9e4fd | ||
![]() |
e1f7752aff | ||
![]() |
52afc9d0b2 | ||
![]() |
8ea8e8c28a | ||
![]() |
c6792b7674 | ||
![]() |
de14c55311 | ||
![]() |
2cb8cc9107 | ||
![]() |
309308dac0 | ||
![]() |
8650651567 | ||
![]() |
4bb7e04f2a | ||
![]() |
7e53bed95f | ||
![]() |
e527dbdf88 | ||
![]() |
8295f9cd6b | ||
![]() |
2f38f8d9f3 | ||
![]() |
44afaf7a4b | ||
![]() |
ccfcdbc6ca | ||
![]() |
929432d469 | ||
![]() |
e15cb8a43d | ||
![]() |
fbe96a0a6d | ||
![]() |
fa65a6dc05 | ||
![]() |
9921fc07dd | ||
![]() |
12a0c955bc | ||
![]() |
2be3ff02bd | ||
![]() |
c6c6af1268 | ||
![]() |
e3baf8944e | ||
![]() |
233d5f31f2 | ||
![]() |
cfb2276ecd | ||
![]() |
16f7a6d3cb | ||
![]() |
b8bb8cab25 | ||
![]() |
f3c4e1dc19 | ||
![]() |
5cbcb7e878 | ||
![]() |
fed61d7109 | ||
![]() |
d7ba9f6924 | ||
![]() |
cbf175ac96 | ||
![]() |
afcd6dfa14 | ||
![]() |
f55c2659cd | ||
![]() |
7705cb1158 | ||
![]() |
9755df6fa6 | ||
![]() |
e564d730ce | ||
![]() |
e37938abe0 | ||
![]() |
9b5f7928bf | ||
![]() |
d7496d6fd5 | ||
![]() |
2a131f7978 | ||
![]() |
f46f9fa6ea | ||
![]() |
2473a09f7b | ||
![]() |
ec513f0b60 | ||
![]() |
03bec6a36c | ||
![]() |
ad826dd846 | ||
![]() |
fd8742ae3a | ||
![]() |
96e45bb517 | ||
![]() |
909b9da9da | ||
![]() |
50937990e5 | ||
![]() |
4fbfb5838a | ||
![]() |
e2a7d86446 | ||
![]() |
ef6c9325e0 | ||
![]() |
0e2f742b7e | ||
![]() |
7d85a10e82 | ||
![]() |
dff261eb18 | ||
![]() |
349ee5858f | ||
![]() |
461f0a274a | ||
![]() |
a2eef70030 | ||
![]() |
2efb6cf4f1 | ||
![]() |
36db8a4dc9 | ||
![]() |
52ed170292 | ||
![]() |
871588aa9f | ||
![]() |
a72c4eafff | ||
![]() |
97cec2ba46 | ||
![]() |
4e846774b4 | ||
![]() |
a6be8ddc2d | ||
![]() |
b65670b9e4 | ||
![]() |
ea635d4630 | ||
![]() |
2e9737c149 | ||
![]() |
0f8de2b7c1 | ||
![]() |
2188c5d9fa | ||
![]() |
b5d879c1cd | ||
![]() |
fd365a19f7 | ||
![]() |
adbaadae81 | ||
![]() |
09ad4133fb | ||
![]() |
da49981ecd | ||
![]() |
ebd00bfb9f | ||
![]() |
353ddd3105 | ||
![]() |
e80887d779 | ||
![]() |
06ddf26d09 | ||
![]() |
c5c41f2d3e | ||
![]() |
c5b47fe4a0 | ||
![]() |
fe7a53fb8a | ||
![]() |
bad2a79f05 | ||
![]() |
75100c335f | ||
![]() |
b31c928898 | ||
![]() |
656c818211 | ||
![]() |
c95637eee1 | ||
![]() |
819f765bce | ||
![]() |
f42ad03f91 | ||
![]() |
26bad8d05b | ||
![]() |
117d577e82 | ||
![]() |
0aa6358938 | ||
![]() |
048207f70b | ||
![]() |
a724dd472f | ||
![]() |
ad900479f6 | ||
![]() |
2e4767926c | ||
![]() |
4d3603136f | ||
![]() |
8a15bf059d | ||
![]() |
b9a5c94923 | ||
![]() |
c08b83c299 | ||
![]() |
4aff146812 | ||
![]() |
76bebb1eed | ||
![]() |
71ac8ff996 | ||
![]() |
92b9397ee2 | ||
![]() |
a59cf9b9aa | ||
![]() |
221aa16433 | ||
![]() |
44507d8556 | ||
![]() |
1897bce814 | ||
![]() |
04492a3262 | ||
![]() |
f857330f47 | ||
![]() |
7fb8cd06bc | ||
![]() |
2854e202e1 | ||
![]() |
63b609f725 | ||
![]() |
2eef0bded1 | ||
![]() |
45e2b350a9 | ||
![]() |
ccf2897a65 | ||
![]() |
22059e71a2 | ||
![]() |
8ba684e8c3 | ||
![]() |
ce4a91b679 | ||
![]() |
ddc3f72afb | ||
![]() |
d57d82f439 | ||
![]() |
39734dfadb | ||
![]() |
23dc8e063c | ||
![]() |
f5fb6b83ff | ||
![]() |
3d55af2111 | ||
![]() |
179ee2d2a4 | ||
![]() |
94485ed78c | ||
![]() |
9381015d80 | ||
![]() |
3ff2ec9104 | ||
![]() |
6a71bb256e | ||
![]() |
cfad85b30e | ||
![]() |
276b65f0b9 | ||
![]() |
a15d82a7ef | ||
![]() |
ccd2a9f171 | ||
![]() |
9e550849f6 | ||
![]() |
6149f5c6ee | ||
![]() |
a390c19a20 | ||
![]() |
bb57120872 | ||
![]() |
edfe760227 | ||
![]() |
27d31b3a1e | ||
![]() |
0106a193e5 | ||
![]() |
f523925817 | ||
![]() |
893e37cd74 | ||
![]() |
0490fe9f18 | ||
![]() |
0d1c81353c | ||
![]() |
90c90b3cc6 | ||
![]() |
768ae23b09 | ||
![]() |
95229852fa | ||
![]() |
2421e387b9 | ||
![]() |
6b900ab0b1 | ||
![]() |
91cba2514a | ||
![]() |
db7cd746b1 | ||
![]() |
fda4aaf10d | ||
![]() |
381c4e4867 | ||
![]() |
0f0eed4317 | ||
![]() |
7f06abc359 | ||
![]() |
fc3f102528 | ||
![]() |
becb71ad28 | ||
![]() |
2cea157ecc | ||
![]() |
4cb14243be | ||
![]() |
62c31e1fc3 | ||
![]() |
56efa02820 | ||
![]() |
f3c71f3e6c | ||
![]() |
4ec0209dd7 | ||
![]() |
277d16eabb | ||
![]() |
38d330da1a | ||
![]() |
90c9adc8dc | ||
![]() |
eb30eb90d6 | ||
![]() |
6ec1776f57 | ||
![]() |
4f1fc06984 | ||
![]() |
d411d4d398 | ||
![]() |
c9fa29c562 | ||
![]() |
f74e9397db | ||
![]() |
4988d31aff | ||
![]() |
22eff13fe3 | ||
![]() |
f4bb6d24e5 | ||
![]() |
76013aa5fd | ||
![]() |
fcd0ec5996 | ||
![]() |
a08114b1b0 | ||
![]() |
57a0d4f8a6 | ||
![]() |
aab6255783 | ||
![]() |
66e2bfd76b | ||
![]() |
db7dad6731 | ||
![]() |
b03f287d86 | ||
![]() |
39e3f22d0b | ||
![]() |
2b389e4e85 | ||
![]() |
f2700332ae | ||
![]() |
4559ee0d7f | ||
![]() |
a7f21f0680 | ||
![]() |
1668659ff6 | ||
![]() |
13f0ccf8c6 | ||
![]() |
8a75f200ca | ||
![]() |
8b2920c296 | ||
![]() |
b29634e906 | ||
![]() |
32c63fb140 | ||
![]() |
d65c7d05a4 | ||
![]() |
72f91a2816 | ||
![]() |
c57d0283b3 | ||
![]() |
8fcca9cb9c | ||
![]() |
c9ba59e895 | ||
![]() |
697e177e13 | ||
![]() |
0f0cf18f92 | ||
![]() |
6271fb9218 | ||
![]() |
fd71a36da8 | ||
![]() |
6c9f709290 | ||
![]() |
802b6775d7 | ||
![]() |
2113b03a42 | ||
![]() |
a0c3d7f8a6 | ||
![]() |
6a46181e8e | ||
![]() |
54c553d13b | ||
![]() |
bf34d95bc1 | ||
![]() |
b6644887bd | ||
![]() |
50acc6b55f | ||
![]() |
8f3c596d46 | ||
![]() |
e7d8f6b807 | ||
![]() |
7938689f95 | ||
![]() |
8258da2e46 | ||
![]() |
5c1ce5302a | ||
![]() |
e92c861e1c | ||
![]() |
778e010bb3 | ||
![]() |
f529c7f2a2 | ||
![]() |
6679baadbb | ||
![]() |
cdf3b3f011 | ||
![]() |
2dc981496b | ||
![]() |
b68424f008 | ||
![]() |
60d58a2d45 | ||
![]() |
391b6e972f | ||
![]() |
8c9457d636 | ||
![]() |
0a3a71f634 | ||
![]() |
d3fc1cf897 | ||
![]() |
d6ff1dfd8d | ||
![]() |
6b0ef500ed | ||
![]() |
c20efea1e9 | ||
![]() |
77e07dd5b5 | ||
![]() |
1d8fe61920 | ||
![]() |
2425f9f03e | ||
![]() |
60fc5fb4d3 | ||
![]() |
4e96ddfb9e | ||
![]() |
9fd30b8853 | ||
![]() |
91c8526c88 | ||
![]() |
4a6dc7971e | ||
![]() |
901478475f | ||
![]() |
1a5bfd973e | ||
![]() |
6cb3bd9860 | ||
![]() |
6c68f8dd6c | ||
![]() |
2447279191 | ||
![]() |
612d885ad2 | ||
![]() |
60e9bfbf19 | ||
![]() |
5f567c357b | ||
![]() |
0a1eecee20 | ||
![]() |
86bba4f0e4 | ||
![]() |
bea212a8d1 | ||
![]() |
41ac8dc781 | ||
![]() |
3afca90c0e | ||
![]() |
8ef7e0a772 | ||
![]() |
6f2445c417 | ||
![]() |
da7821a3a9 | ||
![]() |
431415c052 | ||
![]() |
5c2b90c20f | ||
![]() |
9d5bfa4be8 | ||
![]() |
eee65da257 | ||
![]() |
d9f8e83429 | ||
![]() |
c94e73a029 | ||
![]() |
4514154364 | ||
![]() |
889c88c492 | ||
![]() |
ee0176e730 | ||
![]() |
5103879471 | ||
![]() |
74a4a4b9da | ||
![]() |
819f9fad2c | ||
![]() |
fdb12e8eaf | ||
![]() |
61ca9d6374 | ||
![]() |
dfb5a00c77 | ||
![]() |
bc7042bee1 | ||
![]() |
a3f62f56f6 | ||
![]() |
9a1735f37d | ||
![]() |
86b0046f28 | ||
![]() |
9565335394 | ||
![]() |
f00b5365f9 | ||
![]() |
690eff0e80 | ||
![]() |
e37daabb53 | ||
![]() |
5cf96dbada | ||
![]() |
987b7ffd0c | ||
![]() |
bb99e5c4ac | ||
![]() |
9e97f0d318 | ||
![]() |
f89f351b3c | ||
![]() |
34ab2bad49 | ||
![]() |
62501d00b1 | ||
![]() |
c0d2e5efa6 | ||
![]() |
afffcd18dd | ||
![]() |
34e52153a2 | ||
![]() |
6198137283 | ||
![]() |
d6e3db4ff7 | ||
![]() |
3cd04d03c8 | ||
![]() |
a17ab1ce25 | ||
![]() |
c4ae8ef711 | ||
![]() |
c2de26f557 | ||
![]() |
10772a704e | ||
![]() |
64321043b7 | ||
![]() |
61654edbf6 | ||
![]() |
83d8a44db7 | ||
![]() |
d62c8a0f16 | ||
![]() |
a875ddcf54 | ||
![]() |
61fee707ec | ||
![]() |
350b417ccb | ||
![]() |
383d8a5f33 | ||
![]() |
f1f12ee8bd | ||
![]() |
b66317fa7d | ||
![]() |
c69f70bdb0 | ||
![]() |
58848068e6 | ||
![]() |
20ed04af84 | ||
![]() |
5e6b41945b | ||
![]() |
173a7fc409 | ||
![]() |
2a443a29c2 | ||
![]() |
d3f44e35ce | ||
![]() |
a41078e2d3 | ||
![]() |
83f379a4d1 | ||
![]() |
4e0fea1d8b | ||
![]() |
e5b5f5ba62 | ||
![]() |
62ae824b36 | ||
![]() |
8b7bd2d572 | ||
![]() |
47744c1a0c | ||
![]() |
c1fd2be8cf | ||
![]() |
e919a9bd43 | ||
![]() |
e5ebf93bcb | ||
![]() |
a21ac77ae3 | ||
![]() |
7de2c1cb81 | ||
![]() |
3b74733b3b | ||
![]() |
cf52b505b2 | ||
![]() |
ae9d96f494 | ||
![]() |
c6aba41ed1 | ||
![]() |
4f97444bd5 | ||
![]() |
e2d73637f6 | ||
![]() |
e5d1d0a402 | ||
![]() |
9b269dc594 | ||
![]() |
5f6bdf7302 | ||
![]() |
df34abfcd9 | ||
![]() |
f083e26cf0 | ||
![]() |
befaa0ff21 | ||
![]() |
76686b40a8 | ||
![]() |
3e06f21432 | ||
![]() |
53e7d502c5 | ||
![]() |
77208475b0 | ||
![]() |
a86cbb934b | ||
![]() |
b9f5dda22b | ||
![]() |
45028fa2a3 | ||
![]() |
53d032b29d | ||
![]() |
be2e8ce34a | ||
![]() |
5adf4c31cb | ||
![]() |
052b795fa5 | ||
![]() |
610938b120 | ||
![]() |
1ed773bc02 | ||
![]() |
13a17c1a16 | ||
![]() |
1ac1ace66c | ||
![]() |
8102b0608a | ||
![]() |
1c7f2fd4b0 | ||
![]() |
033b16928c | ||
![]() |
3037d1bd4f | ||
![]() |
027388ce5a | ||
![]() |
9e9885803f | ||
![]() |
0fa0f4c0d9 | ||
![]() |
4451e20573 | ||
![]() |
6f982c8bf9 | ||
![]() |
d6c70a4259 | ||
![]() |
33a88c201e | ||
![]() |
1b53cbdc8f | ||
![]() |
bbbfffd1cb | ||
![]() |
a5290288c7 | ||
![]() |
f0e16fc5ae | ||
![]() |
6b13d00635 | ||
![]() |
cd669f0a14 | ||
![]() |
a601d5e41c | ||
![]() |
15061bafae | ||
![]() |
23a7c3e49c | ||
![]() |
167a9bfd1a | ||
![]() |
5dfcebac16 | ||
![]() |
d65d5eb45c | ||
![]() |
00e8d7af3b | ||
![]() |
782e5194ad | ||
![]() |
ab1a7d7a17 | ||
![]() |
4707adb2f6 | ||
![]() |
7839cf4f90 | ||
![]() |
84fb82a574 | ||
![]() |
1ef3338f9e | ||
![]() |
c8acb74f0c | ||
![]() |
4c8794a4d1 | ||
![]() |
430805adcc | ||
![]() |
38ff33512f | ||
![]() |
8ad6cd2e81 | ||
![]() |
0f31f1d34a | ||
![]() |
b25bf97cad | ||
![]() |
9ff2bfc4bb | ||
![]() |
b276f01f2e | ||
![]() |
e4cfec3ba0 | ||
![]() |
da2e47cd5f | ||
![]() |
d7ff49c4c7 | ||
![]() |
4413f77ba9 | ||
![]() |
bd49ecd585 | ||
![]() |
0e83eba6df | ||
![]() |
b220f5de5c | ||
![]() |
6c74f6c527 | ||
![]() |
2e08d68d87 | ||
![]() |
d5210d27e2 | ||
![]() |
c38611dc15 | ||
![]() |
eacd75108f | ||
![]() |
16ccac11ae | ||
![]() |
c82c61ef3d | ||
![]() |
43e9044ef7 | ||
![]() |
4f3282099f | ||
![]() |
c076f605aa | ||
![]() |
70e78056e9 | ||
![]() |
93866f9abf | ||
![]() |
c2984ecd47 | ||
![]() |
57df94afee | ||
![]() |
d21d0d5b9a | ||
![]() |
14f2470477 | ||
![]() |
84c3957bf4 | ||
![]() |
de5eb43da1 | ||
![]() |
110885401c | ||
![]() |
2b7e898781 | ||
![]() |
2cdde7dd46 | ||
![]() |
9765afc286 | ||
![]() |
cb5b3884cd | ||
![]() |
9178a3ad6a | ||
![]() |
cc504e93f6 | ||
![]() |
4890c4cb2f | ||
![]() |
73b3c8db55 | ||
![]() |
713a2f88a9 | ||
![]() |
30a2116abf | ||
![]() |
6acc81cb50 | ||
![]() |
af571deb16 | ||
![]() |
e6a82d4c64 | ||
![]() |
83b3dc8fbe | ||
![]() |
d1ad7e0850 | ||
![]() |
2048648b39 | ||
![]() |
a3d21f5377 | ||
![]() |
62d092e5ac | ||
![]() |
d60785ee33 | ||
![]() |
fe19bd2fce | ||
![]() |
7951763612 | ||
![]() |
5fafe7cf29 | ||
![]() |
bd51c61eb6 | ||
![]() |
9fcd456205 | ||
![]() |
c727443ea3 | ||
![]() |
7ffd897653 | ||
![]() |
e7c9c2d192 | ||
![]() |
0b599a95e8 | ||
![]() |
5893bbc199 | ||
![]() |
7baeb8e10f | ||
![]() |
14f10d7d10 | ||
![]() |
bdc8c7e5cc | ||
![]() |
f55a1f1bf3 | ||
![]() |
b7ede15b06 | ||
![]() |
d5d3f32bf8 | ||
![]() |
0d9e3baebc | ||
![]() |
8955998d25 | ||
![]() |
d595373f9f | ||
![]() |
12bf5bb152 | ||
![]() |
c35ffccf6c | ||
![]() |
0706748d10 | ||
![]() |
51c3ae15e8 | ||
![]() |
55417a1103 | ||
![]() |
f84c394e09 | ||
![]() |
368efddb0d | ||
![]() |
7d0852f7f3 | ||
![]() |
0a2617cd21 | ||
![]() |
790dc4e3d4 | ||
![]() |
b3ba79a0fd | ||
![]() |
81744c584e | ||
![]() |
0774662689 | ||
![]() |
6201612169 | ||
![]() |
fc239cfd0d | ||
![]() |
7677b6859e | ||
![]() |
76af3a2e78 | ||
![]() |
38838b94a2 | ||
![]() |
5ff31e7cb4 | ||
![]() |
ccb35f1353 | ||
![]() |
a4a52e7ed4 | ||
![]() |
0214741345 | ||
![]() |
d5b8722bb0 | ||
![]() |
c5851cd166 | ||
![]() |
0418a3e6de | ||
![]() |
8f3dfd7435 | ||
![]() |
efbd485f5f | ||
![]() |
3ba2ecedce | ||
![]() |
cb214476e4 | ||
![]() |
91513f15ae | ||
![]() |
d39d6a2bd4 | ||
![]() |
65d2b0d996 | ||
![]() |
484659fc3f | ||
![]() |
4ac933fa29 | ||
![]() |
660f3e9149 | ||
![]() |
c519cc21c6 | ||
![]() |
68c3d26aa1 | ||
![]() |
286dd05e1e | ||
![]() |
5032854aca | ||
![]() |
7055bcd9ed | ||
![]() |
811bd16768 | ||
![]() |
64cc162472 | ||
![]() |
5f3b01476a | ||
![]() |
4cfaff8ebd | ||
![]() |
ae2e15a6fa | ||
![]() |
64179e7dc1 | ||
![]() |
b003c8df01 | ||
![]() |
83158de1ce | ||
![]() |
c9efcf7389 | ||
![]() |
3774630476 | ||
![]() |
1849547133 | ||
![]() |
8f6f6abb0e | ||
![]() |
fbdc9e2b40 | ||
![]() |
bca08970ba | ||
![]() |
d6da0c421c | ||
![]() |
bb23bc7b3c | ||
![]() |
31077eb346 | ||
![]() |
2ab04deded | ||
![]() |
906f6c65d9 | ||
![]() |
ea47c47b5e | ||
![]() |
d760a71b76 | ||
![]() |
6a58083431 | ||
![]() |
a5a56355dc | ||
![]() |
3331e2305b | ||
![]() |
117a82c059 | ||
![]() |
13837971a7 | ||
![]() |
39dc00ad78 | ||
![]() |
9e68a6f7e7 | ||
![]() |
d608a0d847 | ||
![]() |
67c9921ace | ||
![]() |
4690a111bf | ||
![]() |
65e5d05a9e | ||
![]() |
fe59618eaf | ||
![]() |
60dd797d1a | ||
![]() |
e7762e03b4 | ||
![]() |
a7de4aca91 | ||
![]() |
1c86a6c58a | ||
![]() |
ca2a30f7ae | ||
![]() |
9fd19bf382 | ||
![]() |
14544922bf | ||
![]() |
bd5ebdb2de | ||
![]() |
931426e4fc | ||
![]() |
8241651a28 | ||
![]() |
e3de6da87e | ||
![]() |
7d78623c92 | ||
![]() |
c8325589cc | ||
![]() |
7ead5c701a | ||
![]() |
a7aaa73b84 | ||
![]() |
212eabbc20 | ||
![]() |
b2b91c9aff | ||
![]() |
d5651f1df3 | ||
![]() |
7572f8c98b | ||
![]() |
75e74f1ff7 | ||
![]() |
a6c4a77e13 | ||
![]() |
d9bbb20743 | ||
![]() |
ac530b53f6 | ||
![]() |
b8bf773c40 | ||
![]() |
dae5cd3969 | ||
![]() |
59205088d2 | ||
![]() |
cb20280530 | ||
![]() |
6806eed2ae | ||
![]() |
b7dfeb139b | ||
![]() |
25cb7d9ffa | ||
![]() |
bc875b396b | ||
![]() |
6ac4c8c9cb | ||
![]() |
4dbaab7dd4 | ||
![]() |
7eab490089 | ||
![]() |
49efcd0c2d | ||
![]() |
69f0e972d3 | ||
![]() |
46a2ea9421 | ||
![]() |
96043ba279 | ||
![]() |
d778f2a989 | ||
![]() |
5a36a002bf | ||
![]() |
37a3b4678d | ||
![]() |
365f0cd646 | ||
![]() |
2a7f03af8a | ||
![]() |
f698ec2502 | ||
![]() |
a1f0c20afc | ||
![]() |
b0a85b0669 | ||
![]() |
9528d333bd | ||
![]() |
4437a99330 | ||
![]() |
8f158cbc8f | ||
![]() |
135a9b8f5c | ||
![]() |
1dd488ef89 | ||
![]() |
a614545467 | ||
![]() |
3724e0f376 | ||
![]() |
a12c639bed | ||
![]() |
d6cc40cf3b | ||
![]() |
f4de16fde6 | ||
![]() |
0ab2630519 | ||
![]() |
53c087f1ea | ||
![]() |
682fa80a8a | ||
![]() |
1801df32a6 | ||
![]() |
23e06a09cb | ||
![]() |
1bd6e2f493 | ||
![]() |
ecb5885dba | ||
![]() |
227751e455 | ||
![]() |
46a84a42dc | ||
![]() |
2fcfa21761 | ||
![]() |
4efaaaabc0 | ||
![]() |
709dc854e7 | ||
![]() |
b52e8f74e2 | ||
![]() |
887a07c3fb | ||
![]() |
3ce11a0dab | ||
![]() |
6c6ce54a20 | ||
![]() |
d76c1ca98e | ||
![]() |
1557d3c390 | ||
![]() |
4d4219409a | ||
![]() |
7edb784305 | ||
![]() |
f44de281a8 | ||
![]() |
5061e0051a | ||
![]() |
2c4f566687 | ||
![]() |
21812ec064 | ||
![]() |
173355bc02 | ||
![]() |
0f51826d9d | ||
![]() |
3c7a9f0f82 | ||
![]() |
3049e489fb | ||
![]() |
67da76427b | ||
![]() |
d39b8570cc | ||
![]() |
3524198599 | ||
![]() |
274414a54b | ||
![]() |
b40b3e2d85 | ||
![]() |
c9660ed20e | ||
![]() |
8cd7a63490 | ||
![]() |
e9009c5c75 | ||
![]() |
686a2de570 | ||
![]() |
cb4cf564be | ||
![]() |
0b1ef00077 | ||
![]() |
92e4dd64d1 | ||
![]() |
3c5e4e64ca | ||
![]() |
16ed55a745 | ||
![]() |
64febc7f61 | ||
![]() |
4975ef82d7 | ||
![]() |
954c6bcbc3 | ||
![]() |
af4447e666 | ||
![]() |
e6456f491a | ||
![]() |
b7b238b890 | ||
![]() |
96f368c7e2 | ||
![]() |
48d59e355c | ||
![]() |
7bd76270ba | ||
![]() |
bed66cdcd2 | ||
![]() |
f719b89a7a | ||
![]() |
7291ac8c6b | ||
![]() |
ec759442ed | ||
![]() |
e4b718fb10 | ||
![]() |
b33499b7b5 | ||
![]() |
131a51fbdc | ||
![]() |
9a1e1e56b3 | ||
![]() |
538af99e8c | ||
![]() |
238cb25bb4 | ||
![]() |
f21fdb23b6 | ||
![]() |
0ddc1e47ba | ||
![]() |
55cc496c2e | ||
![]() |
721bed76a9 | ||
![]() |
a38b6c9fb2 | ||
![]() |
1f4609d1a3 | ||
![]() |
07a6bd523c | ||
![]() |
122ef02605 | ||
![]() |
67f75c4bef | ||
![]() |
0275954fb7 | ||
![]() |
44dc4c41c9 | ||
![]() |
e5e76a4ef5 | ||
![]() |
021a084796 | ||
![]() |
1ec00947ad | ||
![]() |
f38f4c3758 | ||
![]() |
2852bbc657 | ||
![]() |
0d565dc64d | ||
![]() |
1f3a4f0e2f | ||
![]() |
53249f4f07 | ||
![]() |
0569697d98 | ||
![]() |
acf24aab6f | ||
![]() |
3ff75c6602 | ||
![]() |
0be6cb6602 | ||
![]() |
9def3b58eb | ||
![]() |
447a37f228 | ||
![]() |
70cf67498b | ||
![]() |
10b46db524 | ||
![]() |
687208a785 | ||
![]() |
2915eb7a83 | ||
![]() |
f82055fef7 | ||
![]() |
835381fbb1 | ||
![]() |
04bed98a97 | ||
![]() |
d542cc5b32 | ||
![]() |
c486e9fa1a | ||
![]() |
92dbe597ea | ||
![]() |
c23c397764 | ||
![]() |
d6fdf8af9c | ||
![]() |
3d7ba7bdf8 | ||
![]() |
523a7d4c16 | ||
![]() |
9e9832ad09 | ||
![]() |
a10e80318f | ||
![]() |
81f196647b | ||
![]() |
9f5917325b | ||
![]() |
1a536369de | ||
![]() |
5b35bea489 | ||
![]() |
bba7318a5b | ||
![]() |
3b8dc59ac5 | ||
![]() |
fc9c604b66 | ||
![]() |
d2d2967f11 | ||
![]() |
c9e0d6d6ca | ||
![]() |
b66149bce8 | ||
![]() |
8b545383e7 | ||
![]() |
ba24ad31d8 | ||
![]() |
3d081f6a59 | ||
![]() |
eac475d13f | ||
![]() |
36e969ed1f | ||
![]() |
35192531a6 | ||
![]() |
aa9161a83a | ||
![]() |
eedce00d57 | ||
![]() |
158059071b | ||
![]() |
639a834658 | ||
![]() |
95cbc544de | ||
![]() |
3294f629b0 | ||
![]() |
432aa037cb | ||
![]() |
93e598cc59 | ||
![]() |
4598b77687 | ||
![]() |
af3fe99ebb | ||
![]() |
44ce10b14b | ||
![]() |
bf384922a6 | ||
![]() |
038bf4f2c8 | ||
![]() |
34ead0aec2 | ||
![]() |
23265861c9 | ||
![]() |
e53f56ed30 | ||
![]() |
d71d8b4cf5 | ||
![]() |
e9af4a5f6b | ||
![]() |
dbdde65f52 | ||
![]() |
75a5ab9441 | ||
![]() |
2a8355dba7 | ||
![]() |
be49b6d35a | ||
![]() |
f405e702ba | ||
![]() |
11f84d7bac | ||
![]() |
59c5ad560b | ||
![]() |
3812aa6502 | ||
![]() |
21cfa653ca | ||
![]() |
c7b8ec5667 | ||
![]() |
73c69d54bc | ||
![]() |
23389dc31e | ||
![]() |
38057c2e03 | ||
![]() |
cd52a61c4c | ||
![]() |
55d76c8c5c | ||
![]() |
38c78dcba1 | ||
![]() |
768dd98a22 | ||
![]() |
6f8c24e11d | ||
![]() |
5f55d8a034 | ||
![]() |
e481a12647 | ||
![]() |
30d16a8e05 | ||
![]() |
14db5c8469 | ||
![]() |
3dd99626ff | ||
![]() |
d2b9555508 | ||
![]() |
b5c3ac74c7 | ||
![]() |
7e55c551ce | ||
![]() |
cd5cdd580a | ||
![]() |
f69edf96d5 | ||
![]() |
ab1393e5c1 | ||
![]() |
9a09115626 | ||
![]() |
3d157af915 | ||
![]() |
6c9da8aba6 | ||
![]() |
e3a4dbaab5 | ||
![]() |
a40580501a | ||
![]() |
3bdde14821 | ||
![]() |
3a089ac512 | ||
![]() |
bbf1bc163d | ||
![]() |
b63254d15d | ||
![]() |
5720f74669 | ||
![]() |
9bb0a0d91b | ||
![]() |
561bd80aa3 | ||
![]() |
1beb4fc2cb | ||
![]() |
e94fdda567 | ||
![]() |
298191a6ef | ||
![]() |
faa2c3e6ce | ||
![]() |
915dd92b5a | ||
![]() |
1f612fa1b0 | ||
![]() |
58f0b0d0a1 | ||
![]() |
6388973292 | ||
![]() |
57bf36f90a | ||
![]() |
6e65169a9d | ||
![]() |
a397253695 | ||
![]() |
b4859c7293 | ||
![]() |
1229b19a1e | ||
![]() |
083f86412b | ||
![]() |
9c1ea71d2f | ||
![]() |
7ee8eead23 | ||
![]() |
82b960c641 | ||
![]() |
7c3e470ea7 | ||
![]() |
ab1aaea7f5 | ||
![]() |
888ad2aeff | ||
![]() |
064341e1a8 | ||
![]() |
d527268cbe | ||
![]() |
699d32187e | ||
![]() |
8604c9c0d6 | ||
![]() |
644fe69fa8 | ||
![]() |
84e78859dc | ||
![]() |
8f39ed2467 | ||
![]() |
ec70c47aa0 | ||
![]() |
f83a466919 | ||
![]() |
5d0cc0b205 | ||
![]() |
8c09fd39db | ||
![]() |
9a15cd80b6 | ||
![]() |
695d257be5 | ||
![]() |
a33cdba11c | ||
![]() |
5a34448ba1 | ||
![]() |
258822d74b | ||
![]() |
a37c3231b0 | ||
![]() |
293e5edf42 | ||
![]() |
7a4472db4c | ||
![]() |
16df4da7d2 | ||
![]() |
b0566da72d | ||
![]() |
bd737f6779 | ||
![]() |
17ddbd8b32 | ||
![]() |
0e13fbf5dc | ||
![]() |
c8b949cef5 | ||
![]() |
646eba930f | ||
![]() |
aa1ee33e92 | ||
![]() |
b895ab5778 | ||
![]() |
84f1718348 | ||
![]() |
05028d3e00 | ||
![]() |
89b4fca6a2 | ||
![]() |
a4352814d1 | ||
![]() |
fc74c420d7 | ||
![]() |
f0bb70951d | ||
![]() |
9d7efade61 | ||
![]() |
382091cb1c | ||
![]() |
bef4596afa | ||
![]() |
ba23dd5132 | ||
![]() |
0d934bcbe5 | ||
![]() |
09197ef71e | ||
![]() |
f1c9eafd79 | ||
![]() |
6d222b6ed7 | ||
![]() |
045a57491a | ||
![]() |
5b0ae42d13 | ||
![]() |
d222018bb6 | ||
![]() |
61c846c6ed | ||
![]() |
33430ee67b | ||
![]() |
2353eed64c | ||
![]() |
be6d77aceb | ||
![]() |
e50333f3d8 | ||
![]() |
b5d74eec16 | ||
![]() |
6496a6125f | ||
![]() |
34c5a7ccde | ||
![]() |
60fa986c47 | ||
![]() |
04ce9087ed | ||
![]() |
b04b973b93 | ||
![]() |
c0b7377076 | ||
![]() |
ee9ca96ab3 | ||
![]() |
36371ffaaa | ||
![]() |
1364e3f316 | ||
![]() |
f3c8d5e541 | ||
![]() |
fcd4253a8d | ||
![]() |
7cc408de4c | ||
![]() |
54f9b49e90 | ||
![]() |
988f1435c5 | ||
![]() |
437a9b79a8 | ||
![]() |
ae092dc757 | ||
![]() |
99f0407ba2 | ||
![]() |
d43d6068e8 | ||
![]() |
3128b23b6f | ||
![]() |
c70d98d642 | ||
![]() |
ab19dba99d | ||
![]() |
231578b887 | ||
![]() |
09b3d6ffca | ||
![]() |
edfaba89f9 | ||
![]() |
bdd658dddb | ||
![]() |
ff681adfe9 | ||
![]() |
50708c16de | ||
![]() |
f27e58395b | ||
![]() |
ef699b2f91 | ||
![]() |
af2e2c5011 | ||
![]() |
a3bba8c14c | ||
![]() |
5af32f21fd | ||
![]() |
f694c9c346 | ||
![]() |
dc5994e18f | ||
![]() |
a692640e46 | ||
![]() |
2c4f582437 | ||
![]() |
0ccc81f46b | ||
![]() |
0ee1448e13 | ||
![]() |
ba498fc1a6 | ||
![]() |
7bc708e064 | ||
![]() |
59791eab29 | ||
![]() |
fe0eafc262 | ||
![]() |
a18c8741a4 | ||
![]() |
a880e7c797 | ||
![]() |
c1c60e0a04 | ||
![]() |
cd2aa033b4 | ||
![]() |
f241a9aef3 | ||
![]() |
77ac1ae796 | ||
![]() |
9a57b5a01d | ||
![]() |
69d919d3c4 | ||
![]() |
eea55f8f16 | ||
![]() |
3a8e20df7d | ||
![]() |
6187abe56f | ||
![]() |
6f4b896bce | ||
![]() |
16d01c961e | ||
![]() |
3e8bf598df | ||
![]() |
52044a50a1 | ||
![]() |
9a58b6fbe6 | ||
![]() |
153de04659 | ||
![]() |
f372fe371a | ||
![]() |
1cf0e0eb21 | ||
![]() |
11e09adda7 | ||
![]() |
7707c42a02 | ||
![]() |
fbfa0f3717 | ||
![]() |
7e7dddde2e | ||
![]() |
6f63303781 | ||
![]() |
5a234745fc | ||
![]() |
61b4d97832 | ||
![]() |
145d4dbd69 | ||
![]() |
28b96179a3 | ||
![]() |
cb7fb88b11 | ||
![]() |
d3fb6f515f | ||
![]() |
950638ea77 | ||
![]() |
067f95b1f6 | ||
![]() |
dee250d3b8 | ||
![]() |
ba851a684c | ||
![]() |
78a7542b4d | ||
![]() |
555c136143 | ||
![]() |
f735862977 | ||
![]() |
6ff91cd2e8 | ||
![]() |
b0ac9869ce | ||
![]() |
4d7ce1dc16 | ||
![]() |
6519e846d8 | ||
![]() |
56dbd88889 | ||
![]() |
25752c927c | ||
![]() |
ed0e858e30 | ||
![]() |
0897154584 | ||
![]() |
48db062b49 | ||
![]() |
e891a10e54 | ||
![]() |
ea3257dc09 | ||
![]() |
82e7900359 | ||
![]() |
2901f1e1ba | ||
![]() |
bfdccb809c | ||
![]() |
d0919d8ab8 | ||
![]() |
178d4f20ca | ||
![]() |
119a408f78 | ||
![]() |
2a06cec5c2 | ||
![]() |
dba42f5f5e | ||
![]() |
1d6f56c676 | ||
![]() |
08005499fb | ||
![]() |
6cbb6adcfc | ||
![]() |
58ce98e0fc | ||
![]() |
99aecedd0b | ||
![]() |
b2e1f61ff7 | ||
![]() |
aee83605ab | ||
![]() |
3f6f273fb1 | ||
![]() |
9316f75eae | ||
![]() |
7ea86a1e8a | ||
![]() |
5b821c46e5 | ||
![]() |
ea72ffad77 | ||
![]() |
2834a2d987 | ||
![]() |
1c0be2e5ce | ||
![]() |
00503b596a | ||
![]() |
f905f019a6 | ||
![]() |
779075c4a5 | ||
![]() |
36d3268cf7 | ||
![]() |
5b59da2435 | ||
![]() |
9039896247 | ||
![]() |
f9b052963a | ||
![]() |
89fb282be5 | ||
![]() |
1f2d43a8ef | ||
![]() |
bbbd42eca3 | ||
![]() |
58fa5f6e16 | ||
![]() |
b2fb1fcd3c | ||
![]() |
da58f5a89c | ||
![]() |
91f48d3d81 | ||
![]() |
ac0dc3bf11 | ||
![]() |
2f25204be9 | ||
![]() |
e389e22832 | ||
![]() |
d1aee93b59 | ||
![]() |
1dbc33445f | ||
![]() |
4885d2be79 | ||
![]() |
61cfb8aa2f | ||
![]() |
018c6f7075 | ||
![]() |
0faa6fbceb | ||
![]() |
31f108a3ed | ||
![]() |
4cea7ebcda | ||
![]() |
c11d4f2632 | ||
![]() |
687a848292 | ||
![]() |
f7eaf7b222 | ||
![]() |
03bfb64c2e | ||
![]() |
f03062910f | ||
![]() |
e57fc771bc | ||
![]() |
7d25812087 | ||
![]() |
2121612a72 | ||
![]() |
c59bc0cb3a | ||
![]() |
8edbcb7a6c | ||
![]() |
6427709dec | ||
![]() |
e296ee7ebb | ||
![]() |
7d35797749 | ||
![]() |
44a68104fb | ||
![]() |
f5f26821d5 | ||
![]() |
e9d2cdfd37 |
371 changed files with 23923 additions and 25512 deletions
40
.forgejo/workflows/build-on-commit.yml
Normal file
40
.forgejo/workflows/build-on-commit.yml
Normal file
|
@ -0,0 +1,40 @@
|
|||
name: Build Docker Image on Commit
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- '!' # Exclude tags
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: docker-builder
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set REPO_VARS
|
||||
id: repo-url
|
||||
run: |
|
||||
echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV
|
||||
echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to OCI registry
|
||||
run: |
|
||||
echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin
|
||||
|
||||
- name: Build and push Docker images
|
||||
run: |
|
||||
# Build Docker image with commit SHA
|
||||
docker build -t $REPO_HOST/$REPO_PATH:${{ github.sha }} .
|
||||
docker push $REPO_HOST/$REPO_PATH:${{ github.sha }}
|
||||
|
||||
# Build Docker image with nightly tag
|
||||
docker tag $REPO_HOST/$REPO_PATH:${{ github.sha }} $REPO_HOST/$REPO_PATH:nightly
|
||||
docker push $REPO_HOST/$REPO_PATH:nightly
|
||||
|
||||
# Remove local images to save storage
|
||||
docker rmi $REPO_HOST/$REPO_PATH:${{ github.sha }}
|
||||
docker rmi $REPO_HOST/$REPO_PATH:nightly
|
37
.forgejo/workflows/build-on-tag.yml
Normal file
37
.forgejo/workflows/build-on-tag.yml
Normal file
|
@ -0,0 +1,37 @@
|
|||
name: Build and Publish Docker Image on Tag
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: docker-builder
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set REPO_VARS
|
||||
id: repo-url
|
||||
run: |
|
||||
echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV
|
||||
echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to OCI registry
|
||||
run: |
|
||||
echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin
|
||||
|
||||
- name: Build and push Docker image
|
||||
run: |
|
||||
TAG=${{ github.ref_name }} # Get the tag name from the context
|
||||
# Build and push multi-platform Docker images
|
||||
docker build -t $REPO_HOST/$REPO_PATH:$TAG --push .
|
||||
# Tag and push latest
|
||||
docker tag $REPO_HOST/$REPO_PATH:$TAG $REPO_HOST/$REPO_PATH:latest
|
||||
docker push $REPO_HOST/$REPO_PATH:latest
|
||||
|
||||
# Remove the local image to save storage
|
||||
docker rmi $REPO_HOST/$REPO_PATH:$TAG
|
||||
docker rmi $REPO_HOST/$REPO_PATH:latest
|
10
.github/FUNDING.yml
vendored
Normal file
10
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
github: canewsin
|
||||
patreon: # Replace with a single Patreon username e.g., user1
|
||||
open_collective: # Replace with a single Open Collective username e.g., user1
|
||||
ko_fi: canewsin
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: canewsin
|
||||
issuehunt: # Replace with a single IssueHunt username e.g., user1
|
||||
otechie: # Replace with a single Otechie username e.g., user1
|
||||
custom: ['https://paypal.me/PramUkesh', 'https://zerolink.ml/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/']
|
33
.github/ISSUE_TEMPLATE/bug-report.md
vendored
Normal file
33
.github/ISSUE_TEMPLATE/bug-report.md
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve ZeroNet
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Step 1: Please describe your environment
|
||||
|
||||
* ZeroNet version: _____
|
||||
* Operating system: _____
|
||||
* Web browser: _____
|
||||
* Tor status: not available/always/disabled
|
||||
* Opened port: yes/no
|
||||
* Special configuration: ____
|
||||
|
||||
### Step 2: Describe the problem:
|
||||
|
||||
#### Steps to reproduce:
|
||||
|
||||
1. _____
|
||||
2. _____
|
||||
3. _____
|
||||
|
||||
#### Observed Results:
|
||||
|
||||
* What happened? This could be a screenshot, a description, log output (you can send log/debug.log file to hello@zeronet.io if necessary), etc.
|
||||
|
||||
#### Expected Results:
|
||||
|
||||
* What did you expect to happen?
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for ZeroNet
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
72
.github/workflows/codeql-analysis.yml
vendored
Normal file
72
.github/workflows/codeql-analysis.yml
vendored
Normal file
|
@ -0,0 +1,72 @@
|
|||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ py3-latest ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ py3-latest ]
|
||||
schedule:
|
||||
- cron: '32 19 * * 2'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
51
.github/workflows/tests.yml
vendored
Normal file
51
.github/workflows/tests.yml
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
name: tests
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-20.04
|
||||
strategy:
|
||||
max-parallel: 16
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9"]
|
||||
|
||||
steps:
|
||||
- name: Checkout ZeroNet
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: "true"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Prepare for installation
|
||||
run: |
|
||||
python3 -m pip install setuptools
|
||||
python3 -m pip install --upgrade pip wheel
|
||||
python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
python3 -m pip install --upgrade -r requirements.txt
|
||||
python3 -m pip list
|
||||
|
||||
- name: Prepare for tests
|
||||
run: |
|
||||
openssl version -a
|
||||
echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini
|
||||
export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test
|
||||
export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test
|
||||
export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test
|
||||
export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test
|
||||
export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test
|
||||
export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test
|
||||
find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
|
||||
find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
|
||||
flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/
|
23
.gitignore
vendored
23
.gitignore
vendored
|
@ -3,13 +3,34 @@ __pycache__/
|
|||
*.py[cod]
|
||||
|
||||
# Log files
|
||||
*.log
|
||||
**/*.log
|
||||
|
||||
# Hidden files
|
||||
.*
|
||||
!/.forgejo
|
||||
!/.github
|
||||
!/.gitignore
|
||||
!/.travis.yml
|
||||
!/.gitlab-ci.yml
|
||||
|
||||
# Temporary files
|
||||
*.bak
|
||||
|
||||
# Data dir
|
||||
data/*
|
||||
*.db
|
||||
|
||||
# Virtualenv
|
||||
env/*
|
||||
|
||||
# Tor data
|
||||
tools/tor/data
|
||||
|
||||
# PhantomJS, downloaded manually for unit tests
|
||||
tools/phantomjs
|
||||
|
||||
# ZeroNet config file
|
||||
zeronet.conf
|
||||
|
||||
# ZeroNet log files
|
||||
log/*
|
||||
|
|
48
.gitlab-ci.yml
Normal file
48
.gitlab-ci.yml
Normal file
|
@ -0,0 +1,48 @@
|
|||
stages:
|
||||
- test
|
||||
|
||||
.test_template: &test_template
|
||||
stage: test
|
||||
before_script:
|
||||
- pip install --upgrade pip wheel
|
||||
# Selenium and requests can't be installed without a requests hint on Python 3.4
|
||||
- pip install --upgrade requests>=2.22.0
|
||||
- pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium
|
||||
- pip install --upgrade -r requirements.txt
|
||||
script:
|
||||
- pip list
|
||||
- openssl version -a
|
||||
- python -m pytest -x plugins/CryptMessage/Test --color=yes
|
||||
- python -m pytest -x plugins/Bigfile/Test --color=yes
|
||||
- python -m pytest -x plugins/AnnounceLocal/Test --color=yes
|
||||
- python -m pytest -x plugins/OptionalManager/Test --color=yes
|
||||
- python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini --color=yes
|
||||
- mv plugins/disabled-Multiuser plugins/Multiuser
|
||||
- python -m pytest -x plugins/Multiuser/Test --color=yes
|
||||
- mv plugins/disabled-Bootstrapper plugins/Bootstrapper
|
||||
- python -m pytest -x plugins/Bootstrapper/Test --color=yes
|
||||
- flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/
|
||||
|
||||
test:py3.4:
|
||||
image: python:3.4.3
|
||||
<<: *test_template
|
||||
|
||||
test:py3.5:
|
||||
image: python:3.5.7
|
||||
<<: *test_template
|
||||
|
||||
test:py3.6:
|
||||
image: python:3.6.9
|
||||
<<: *test_template
|
||||
|
||||
test:py3.7-openssl1.1.0:
|
||||
image: python:3.7.0b5
|
||||
<<: *test_template
|
||||
|
||||
test:py3.7-openssl1.1.1:
|
||||
image: python:3.7.4
|
||||
<<: *test_template
|
||||
|
||||
test:py3.8:
|
||||
image: python:3.8.0b3
|
||||
<<: *test_template
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
[submodule "plugins"]
|
||||
path = plugins
|
||||
url = https://github.com/ZeroNetX/ZeroNet-Plugins.git
|
47
.travis.yml
47
.travis.yml
|
@ -1,18 +1,47 @@
|
|||
language: python
|
||||
cache: pip
|
||||
python:
|
||||
- 2.7
|
||||
- 3.4
|
||||
- 3.5
|
||||
- 3.6
|
||||
- 3.7
|
||||
- 3.8
|
||||
services:
|
||||
- docker
|
||||
cache: pip
|
||||
before_install:
|
||||
- pip install --upgrade pip wheel
|
||||
- pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium
|
||||
# - docker build -t zeronet .
|
||||
# - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
- pip install --upgrade -r requirements.txt
|
||||
- pip list
|
||||
before_script:
|
||||
- openssl version -a
|
||||
# Add an IPv6 config - see the corresponding Travis issue
|
||||
# https://github.com/travis-ci/travis-ci/issues/8361
|
||||
- if [ "${TRAVIS_OS_NAME}" == "linux" ]; then
|
||||
sudo sh -c 'echo 0 > /proc/sys/net/ipv6/conf/all/disable_ipv6';
|
||||
fi
|
||||
script:
|
||||
- python -m pytest plugins/CryptMessage/Test
|
||||
- python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini
|
||||
before_install:
|
||||
- pip install -U pytest mock pytest-cov
|
||||
- pip install codecov
|
||||
- pip install coveralls
|
||||
- catchsegv python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini
|
||||
- export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python -m pytest -x plugins/CryptMessage/Test
|
||||
- export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python -m pytest -x plugins/Bigfile/Test
|
||||
- export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python -m pytest -x plugins/AnnounceLocal/Test
|
||||
- export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python -m pytest -x plugins/OptionalManager/Test
|
||||
- export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test
|
||||
- export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test
|
||||
- find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
|
||||
- find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
|
||||
- flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/
|
||||
after_failure:
|
||||
- zip -r log.zip log/
|
||||
- curl --upload-file ./log.zip https://transfer.sh/log.zip
|
||||
after_success:
|
||||
- codecov
|
||||
- coveralls --rcfile=src/Test/coverage.ini
|
||||
notifications:
|
||||
email:
|
||||
recipients:
|
||||
hello@zeronet.io
|
||||
on_success: change
|
||||
|
|
649
CHANGELOG.md
Normal file
649
CHANGELOG.md
Normal file
|
@ -0,0 +1,649 @@
|
|||
### ZeroNet 0.9.0 (2023-07-12) Rev4630
|
||||
- Fix RDos Issue in Plugins https://github.com/ZeroNetX/ZeroNet-Plugins/pull/9
|
||||
- Add trackers to Config.py for failsafety incase missing trackers.txt
|
||||
- Added Proxy links
|
||||
- Fix pysha3 dep installation issue
|
||||
- FileRequest -> Remove Unnecessary check, Fix error wording
|
||||
- Fix Response when site is missing for `actionAs`
|
||||
|
||||
|
||||
### ZeroNet 0.8.5 (2023-02-12) Rev4625
|
||||
- Fix(https://github.com/ZeroNetX/ZeroNet/pull/202) for SSL cert gen failed on Windows.
|
||||
- default theme-class for missing value in `users.json`.
|
||||
- Fetch Stats Plugin changes.
|
||||
|
||||
### ZeroNet 0.8.4 (2022-12-12) Rev4620
|
||||
- Increase Minimum Site size to 25MB.
|
||||
|
||||
### ZeroNet 0.8.3 (2022-12-11) Rev4611
|
||||
- main.py -> Fix accessing unassigned varible
|
||||
- ContentManager -> Support for multiSig
|
||||
- SiteStrorage.py -> Fix accessing unassigned varible
|
||||
- ContentManager.py Improve Logging of Valid Signers
|
||||
|
||||
### ZeroNet 0.8.2 (2022-11-01) Rev4610
|
||||
- Fix Startup Error when plugins dir missing
|
||||
- Move trackers to seperate file & Add more trackers
|
||||
- Config:: Skip loading missing tracker files
|
||||
- Added documentation for getRandomPort fn
|
||||
|
||||
### ZeroNet 0.8.1 (2022-10-01) Rev4600
|
||||
- fix readdress loop (cherry-pick previously added commit from conservancy)
|
||||
- Remove Patreon badge
|
||||
- Update README-ru.md (#177)
|
||||
- Include inner_path of failed request for signing in error msg and response
|
||||
- Don't Fail Silently When Cert is Not Selected
|
||||
- Console Log Updates, Specify min supported ZeroNet version for Rust version Protocol Compatibility
|
||||
- Update FUNDING.yml
|
||||
|
||||
### ZeroNet 0.8.0 (2022-05-27) Rev4591
|
||||
- Revert File Open to catch File Access Errors.
|
||||
|
||||
### ZeroNet 0.7.9-patch (2022-05-26) Rev4586
|
||||
- Use xescape(s) from zeronet-conservancy
|
||||
- actionUpdate response Optimisation
|
||||
- Fetch Plugins Repo Updates
|
||||
- Fix Unhandled File Access Errors
|
||||
- Create codeql-analysis.yml
|
||||
|
||||
### ZeroNet 0.7.9 (2022-05-26) Rev4585
|
||||
- Rust Version Compatibility for update Protocol msg
|
||||
- Removed Non Working Trakers.
|
||||
- Dynamically Load Trackers from Dashboard Site.
|
||||
- Tracker Supply Improvements.
|
||||
- Fix Repo Url for Bug Report
|
||||
- First Party Tracker Update Service using Dashboard Site.
|
||||
- remove old v2 onion service [#158](https://github.com/ZeroNetX/ZeroNet/pull/158)
|
||||
|
||||
### ZeroNet 0.7.8 (2022-03-02) Rev4580
|
||||
- Update Plugins with some bug fixes and Improvements
|
||||
|
||||
### ZeroNet 0.7.6 (2022-01-12) Rev4565
|
||||
- Sync Plugin Updates
|
||||
- Clean up tor v3 patch [#115](https://github.com/ZeroNetX/ZeroNet/pull/115)
|
||||
- Add More Default Plugins to Repo
|
||||
- Doubled Site Publish Limits
|
||||
- Update ZeroNet Repo Urls [#103](https://github.com/ZeroNetX/ZeroNet/pull/103)
|
||||
- UI/UX: Increases Size of Notifications Close Button [#106](https://github.com/ZeroNetX/ZeroNet/pull/106)
|
||||
- Moved Plugins to Seperate Repo
|
||||
- Added `access_key` variable in Config, this used to access restrited plugins when multiuser plugin is enabled. When MultiUserPlugin is enabled we cannot access some pages like /Stats, this key will remove such restriction with access key.
|
||||
- Added `last_connection_id_current_version` to ConnectionServer, helpful to estimate no of connection from current client version.
|
||||
- Added current version: connections to /Stats page. see the previous point.
|
||||
|
||||
### ZeroNet 0.7.5 (2021-11-28) Rev4560
|
||||
- Add more default trackers
|
||||
- Change default homepage address to `1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`
|
||||
- Change default update site address to `1Update8crprmciJHwp2WXqkx2c4iYp18`
|
||||
|
||||
### ZeroNet 0.7.3 (2021-11-28) Rev4555
|
||||
- Fix xrange is undefined error
|
||||
- Fix Incorrect viewport on mobile while loading
|
||||
- Tor-V3 Patch by anonymoose
|
||||
|
||||
|
||||
### ZeroNet 0.7.1 (2019-07-01) Rev4206
|
||||
### Added
|
||||
- Built-in logging console in the web UI to see what's happening in the background. (pull down top-right 0 button to see it)
|
||||
- Display database rebuild errors [Thanks to Lola]
|
||||
- New plugin system that allows to install and manage builtin/third party extensions to the ZeroNet client using the web interface.
|
||||
- Support multiple trackers_file
|
||||
- Add OpenSSL 1.1 support to CryptMessage plugin based on Bitmessage modifications [Thanks to radfish]
|
||||
- Display visual error message on startup errors
|
||||
- Fix max opened files changing on Windows platform
|
||||
- Display TLS1.3 compatibility on /Stats page
|
||||
- Add fake SNI and ALPN to peer connections to make it more like standard https connections
|
||||
- Hide and ignore tracker_proxy setting in Tor: Always mode as it's going to use Tor anyway.
|
||||
- Deny websocket connections from unknown origins
|
||||
- Restrict open_browser values to avoid RCE on sandbox escape
|
||||
- Offer access web interface by IP address in case of unknown host
|
||||
- Link to site's sidebar with "#ZeroNet:OpenSidebar" hash
|
||||
|
||||
### Changed
|
||||
- Allow .. in file names [Thanks to imachug]
|
||||
- Change unstable trackers
|
||||
- More clean errors on sites.json/users.json load error
|
||||
- Various tweaks for tracker rating on unstable connections
|
||||
- Use OpenSSL 1.1 dlls from default Python Windows distribution if possible
|
||||
- Re-factor domain resolving for easier domain plugins
|
||||
- Disable UDP connections if --proxy is used
|
||||
- New, decorator-based Websocket API permission system to avoid future typo mistakes
|
||||
|
||||
### Fixed
|
||||
- Fix parsing config lines that have no value
|
||||
- Fix start.py [Thanks to imachug]
|
||||
- Allow multiple values of the same key in the config file [Thanks ssdifnskdjfnsdjk for reporting]
|
||||
- Fix parsing config file lines that has % in the value [Thanks slrslr for reporting]
|
||||
- Fix bootstrapper plugin hash reloads [Thanks geekless for reporting]
|
||||
- Fix CryptMessage plugin OpenSSL dll loading on Windows (ZeroMail errors) [Thanks cxgreat2014 for reporting]
|
||||
- Fix startup error when using OpenSSL 1.1 [Thanks to imachug]
|
||||
- Fix a bug that did not loaded merged site data for 5 sec after the merged site got added
|
||||
- Fix typo that allowed to add new plugins in public proxy mode. [Thanks styromaniac for reporting]
|
||||
- Fix loading non-big files with "|all" postfix [Thanks to krzotr]
|
||||
- Fix OpenSSL cert generation error crash by change Windows console encoding to utf8
|
||||
|
||||
#### Wrapper html injection vulnerability [Reported by ivanq]
|
||||
|
||||
In ZeroNet before rev4188 the wrapper template variables was rendered incorrectly.
|
||||
|
||||
Result: The opened site was able to gain WebSocket connection with unrestricted ADMIN/NOSANDBOX access, change configuration values and possible RCE on client's machine.
|
||||
|
||||
Fix: Fixed the template rendering code, disallowed WebSocket connections from unknown locations, restricted open_browser configuration values to avoid possible RCE in case of sandbox escape.
|
||||
|
||||
Note: The fix is also back ported to ZeroNet Py 2.x version (Rev3870)
|
||||
|
||||
|
||||
### ZeroNet 0.7.0 (2019-06-12) Rev4106 (First release targeting Python 3.4+)
|
||||
### Added
|
||||
- 5-10x faster signature verification by using libsecp256k1 (Thanks to ZeroMux)
|
||||
- Generated SSL certificate randomization to avoid protocol filters (Thanks to ValdikSS)
|
||||
- Offline mode
|
||||
- P2P source code update using ZeroNet protocol
|
||||
- ecdsaSign/Verify commands to CryptMessage plugin (Thanks to imachug)
|
||||
- Efficient file rename: change file names instead of re-downloading the file.
|
||||
- Make redirect optional on site cloning (Thanks to Lola)
|
||||
- EccPrivToPub / EccPubToPriv functions (Thanks to imachug)
|
||||
- Detect and change dark/light theme based on OS setting (Thanks to filips123)
|
||||
|
||||
### Changed
|
||||
- Re-factored code to Python3 runtime (compatible with Python 3.4-3.8)
|
||||
- More safe database sync mode
|
||||
- Removed bundled third-party libraries where it's possible
|
||||
- Use lang=en instead of lang={lang} in urls to avoid url encode problems
|
||||
- Remove environment details from error page
|
||||
- Don't push content.json updates larger than 10kb to significantly reduce bw usage for site with many files
|
||||
|
||||
### Fixed
|
||||
- Fix sending files with \0 characters
|
||||
- Security fix: Escape error detail to avoid XSS (reported by krzotr)
|
||||
- Fix signature verification using libsecp256k1 for compressed addresses (mostly certificates generated in the browser)
|
||||
- Fix newsfeed if you have more than 1000 followed topic/post on one site.
|
||||
- Fix site download as zip file
|
||||
- Fix displaying sites with utf8 title
|
||||
- Error message if dbRebuild fails (Thanks to Lola)
|
||||
- Fix browser reopen if executing start.py again. (Thanks to imachug)
|
||||
|
||||
|
||||
### ZeroNet 0.6.5 (2019-02-16) Rev3851 (Last release targeting Python 2.7.x)
|
||||
### Added
|
||||
- IPv6 support in peer exchange, bigfiles, optional file finding, tracker sharing, socket listening and connecting (based on tangdou1 modifications)
|
||||
- New tracker database format with IPv6 support
|
||||
- Display notification if there is an unpublished modification for your site
|
||||
- Listen and shut down normally for SIGTERM (Thanks to blurHY)
|
||||
- Support tilde `~` in filenames (by d14na)
|
||||
- Support map for Namecoin subdomain names (Thanks to lola)
|
||||
- Add log level to config page
|
||||
- Support `{data}` for data dir variable in trackers_file value
|
||||
- Quick check content.db on startup and rebuild if necessary
|
||||
- Don't show meek proxy option if the tor client does not supports it
|
||||
|
||||
### Changed
|
||||
- Refactored port open checking with IPv6 support
|
||||
- Consider non-local IPs as external even is the open port check fails (for CJDNS and Yggdrasil support)
|
||||
- Add IPv6 tracker and change unstable tracker
|
||||
- Don't correct sent local time with the calculated time correction
|
||||
- Disable CSP for Edge
|
||||
- Only support CREATE commands in dbschema indexes node and SELECT from storage.query
|
||||
|
||||
### Fixed
|
||||
- Check the length of master seed when executing cryptGetPrivatekey CLI command
|
||||
- Only reload source code on file modification / creation
|
||||
- Detection and issue warning for latest no-script plugin
|
||||
- Fix atomic write of a non-existent file
|
||||
- Fix sql queries with lots of variables and sites with lots of content.json
|
||||
- Fix multi-line parsing of zeronet.conf
|
||||
- Fix site deletion from users.json
|
||||
- Fix site cloning before site downloaded (Reported by unsystemizer)
|
||||
- Fix queryJson for non-list nodes (Reported by MingchenZhang)
|
||||
|
||||
|
||||
## ZeroNet 0.6.4 (2018-10-20) Rev3660
|
||||
### Added
|
||||
- New plugin: UiConfig. A web interface that allows changing ZeroNet settings.
|
||||
- New plugin: AnnounceShare. Share trackers between users, automatically announce client's ip as tracker if Bootstrapper plugin is enabled.
|
||||
- Global tracker stats on ZeroHello: Include statistics from all served sites instead of displaying request statistics only for one site.
|
||||
- Support custom proxy for trackers. (Configurable with /Config)
|
||||
- Adding peers to sites manually using zeronet_peers get parameter
|
||||
- Copy site address with peers link on the sidebar.
|
||||
- Zip file listing and streaming support for Bigfiles.
|
||||
- Tracker statistics on /Stats page
|
||||
- Peer reputation save/restore to speed up sync time after startup.
|
||||
- Full support fileGet, fileList, dirList calls on tar.gz/zip files.
|
||||
- Archived_before support to user content rules to allow deletion of all user files before the specified date
|
||||
- Show and manage "Connecting" sites on ZeroHello
|
||||
- Add theme support to ZeroNet sites
|
||||
- Dark theme for ZeroHello, ZeroBlog, ZeroTalk
|
||||
|
||||
### Changed
|
||||
- Dynamic big file allocation: More efficient storage usage by don't pre-allocate the whole file at the beginning, but expand the size as the content downloads.
|
||||
- Reduce the request frequency to unreliable trackers.
|
||||
- Only allow 5 concurrent checkSites to run in parallel to reduce load under Tor/slow connection.
|
||||
- Stop site downloading if it reached 95% of site limit to avoid download loop for sites out of limit
|
||||
- The pinned optional files won't be removed from download queue after 30 retries and won't be deleted even if the site owner removes it.
|
||||
- Don't remove incomplete (downloading) sites on startup
|
||||
- Remove --pin_bigfile argument as big files are automatically excluded from optional files limit.
|
||||
|
||||
### Fixed
|
||||
- Trayicon compatibility with latest gevent
|
||||
- Request number counting for zero:// trackers
|
||||
- Peer reputation boost for zero:// trackers.
|
||||
- Blocklist of peers loaded from peerdb (Thanks tangdou1 for report)
|
||||
- Sidebar map loading on foreign languages (Thx tangdou1 for report)
|
||||
- FileGet on non-existent files (Thanks mcdev for reporting)
|
||||
- Peer connecting bug for sites with low amount of peers
|
||||
|
||||
#### "The Vacation" Sandbox escape bug [Reported by GitCenter / Krixano / ZeroLSTN]
|
||||
|
||||
In ZeroNet 0.6.3 Rev3615 and earlier as a result of invalid file type detection, a malicious site could escape the iframe sandbox.
|
||||
|
||||
Result: Browser iframe sandbox escape
|
||||
|
||||
Applied fix: Replaced the previous, file extension based file type identification with a proper one.
|
||||
|
||||
Affected versions: All versions before ZeroNet Rev3616
|
||||
|
||||
|
||||
## ZeroNet 0.6.3 (2018-06-26)
|
||||
### Added
|
||||
- New plugin: ContentFilter that allows to have shared site and user block list.
|
||||
- Support Tor meek proxies to avoid tracker blocking of GFW
|
||||
- Detect network level tracker blocking and easy setting meek proxy for tracker connections.
|
||||
- Support downloading 2GB+ sites as .zip (Thx to Radtoo)
|
||||
- Support ZeroNet as a transparent proxy (Thx to JeremyRand)
|
||||
- Allow fileQuery as CORS command (Thx to imachug)
|
||||
- Windows distribution includes Tor and meek client by default
|
||||
- Download sites as zip link to sidebar
|
||||
- File server port randomization
|
||||
- Implicit SSL for all connection
|
||||
- fileList API command for zip files
|
||||
- Auto download bigfiles size limit on sidebar
|
||||
- Local peer number to the sidebar
|
||||
- Open site directory button in sidebar
|
||||
|
||||
### Changed
|
||||
- Switched to Azure Tor meek proxy as Amazon one became unavailable
|
||||
- Refactored/rewritten tracker connection manager
|
||||
- Improved peer discovery for optional files without opened port
|
||||
- Also delete Bigfile's piecemap on deletion
|
||||
|
||||
### Fixed
|
||||
- Important security issue: Iframe sandbox escape [Reported by Ivanq / gitcenter]
|
||||
- Local peer discovery when running multiple clients on the same machine
|
||||
- Uploading small files with Bigfile plugin
|
||||
- Ctrl-c shutdown when running CLI commands
|
||||
- High CPU/IO usage when Multiuser plugin enabled
|
||||
- Firefox back button
|
||||
- Peer discovery on older Linux kernels
|
||||
- Optional file handling when multiple files have the same hash_id (first 4 chars of the hash)
|
||||
- Msgpack 0.5.5 and 0.5.6 compatibility
|
||||
|
||||
## ZeroNet 0.6.2 (2018-02-18)
|
||||
|
||||
### Added
|
||||
- New plugin: AnnounceLocal to make ZeroNet work without an internet connection on the local network.
|
||||
- Allow dbQuey and userGetSettings using the `as` API command on different sites with Cors permission
|
||||
- New config option: `--log_level` to reduce log verbosity and IO load
|
||||
- Prefer to connect to recent peers from trackers first
|
||||
- Mark peers with port 1 is also unconnectable for future fix for trackers that do not support port 0 announce
|
||||
|
||||
### Changed
|
||||
- Don't keep connection for sites that have not been modified in the last week
|
||||
- Change unreliable trackers to new ones
|
||||
- Send maximum 10 findhash request in one find optional files round (15sec)
|
||||
- Change "Unique to site" to "No certificate" for default option in cert selection dialog.
|
||||
- Dont print warnings if not in debug mode
|
||||
- Generalized tracker logging format
|
||||
- Only recover sites from sites.json if they had peers
|
||||
- Message from local peers does not means internet connection
|
||||
- Removed `--debug_gevent` and turned on Gevent block logging by default
|
||||
|
||||
### Fixed
|
||||
- Limit connections to 512 to avoid reaching 1024 limit on windows
|
||||
- Exception when logging foreign operating system socket errors
|
||||
- Don't send private (local) IPs on pex
|
||||
- Don't connect to private IPs in tor always mode
|
||||
- Properly recover data from msgpack unpacker on file stream start
|
||||
- Symlinked data directory deletion when deleting site using Windows
|
||||
- De-duplicate peers before publishing
|
||||
- Bigfile info for non-existing files
|
||||
|
||||
|
||||
## ZeroNet 0.6.1 (2018-01-25)
|
||||
|
||||
### Added
|
||||
- New plugin: Chart
|
||||
- Collect and display charts about your contribution to ZeroNet network
|
||||
- Allow list as argument replacement in sql queries. (Thanks to imachug)
|
||||
- Newsfeed query time statistics (Click on "From XX sites in X.Xs on ZeroHello)
|
||||
- New UiWebsocket API command: As to run commands as other site
|
||||
- Ranged ajax queries for big files
|
||||
- Filter feed by type and site address
|
||||
- FileNeed, Bigfile upload command compatibility with merger sites
|
||||
- Send event on port open / tor status change
|
||||
- More description on permission request
|
||||
|
||||
### Changed
|
||||
- Reduce memory usage of sidebar geoip database cache
|
||||
- Change unreliable tracker to new one
|
||||
- Don't display Cors permission ask if it already granted
|
||||
- Avoid UI blocking when rebuilding a merger site
|
||||
- Skip listing ignored directories on signing
|
||||
- In Multiuser mode show the seed welcome message when adding new certificate instead of first visit
|
||||
- Faster async port opening on multiple network interfaces
|
||||
- Allow javascript modals
|
||||
- Only zoom sidebar globe if mouse button is pressed down
|
||||
|
||||
### Fixed
|
||||
- Open port checking error reporting (Thanks to imachug)
|
||||
- Out-of-range big file requests
|
||||
- Don't output errors happened on gevent greenlets twice
|
||||
- Newsfeed skip sites with no database
|
||||
- Newsfeed queries with multiple params
|
||||
- Newsfeed queries with UNION and UNION ALL
|
||||
- Fix site clone with sites larger that 10MB
|
||||
- Unreliable Websocket connection when requesting files from different sites at the same time
|
||||
|
||||
|
||||
## ZeroNet 0.6.0 (2017-10-17)
|
||||
|
||||
### Added
|
||||
- New plugin: Big file support
|
||||
- Automatic pinning on Big file download
|
||||
- Enable TCP_NODELAY for supporting sockets
|
||||
- actionOptionalFileList API command arguments to list non-downloaded files or only big files
|
||||
- serverShowdirectory API command arguments to allow to display site's directory in OS file browser
|
||||
- fileNeed API command to initialize optional file downloading
|
||||
- wrapperGetAjaxKey API command to request nonce for AJAX request
|
||||
- Json.gz support for database files
|
||||
- P2P port checking (Thanks for grez911)
|
||||
- `--download_optional auto` argument to enable automatic optional file downloading for newly added site
|
||||
- Statistics for big files and protocol command requests on /Stats
|
||||
- Allow to set user limitation based on auth_address
|
||||
|
||||
### Changed
|
||||
- More aggressive and frequent connection timeout checking
|
||||
- Use out of msgpack context file streaming for files larger than 512KB
|
||||
- Allow optional files workers over the worker limit
|
||||
- Automatic redirection to wrapper on nonce_error
|
||||
- Send websocket event on optional file deletion
|
||||
- Optimize sites.json saving
|
||||
- Enable faster C-based msgpack packer by default
|
||||
- Major optimization on Bootstrapper plugin SQL queries
|
||||
- Don't reset bad file counter on restart, to allow easier give up on unreachable files
|
||||
- Incoming connection limit changed from 1000 to 500 to avoid reaching socket limit on Windows
|
||||
- Changed tracker boot.zeronet.io domain, because zeronet.io got banned in some countries
|
||||
|
||||
#### Fixed
|
||||
- Sub-directories in user directories
|
||||
|
||||
## ZeroNet 0.5.7 (2017-07-19)
|
||||
### Added
|
||||
- New plugin: CORS to request read permission to other site's content
|
||||
- New API command: userSetSettings/userGetSettings to store site's settings in users.json
|
||||
- Avoid file download if the file size does not match with the requested one
|
||||
- JavaScript and wrapper less file access using /raw/ prefix ([Example](http://127.0.0.1:43110/raw/1AsRLpuRxr3pb9p3TKoMXPSWHzh6i7fMGi/en.tar.gz/index.html))
|
||||
- --silent command line option to disable logging to stdout
|
||||
|
||||
|
||||
### Changed
|
||||
- Better error reporting on sign/verification errors
|
||||
- More test for sign and verification process
|
||||
- Update to OpenSSL v1.0.2l
|
||||
- Limit compressed files to 6MB to avoid zip/tar.gz bomb
|
||||
- Allow space, [], () characters in filenames
|
||||
- Disable cross-site resource loading to improve privacy. [Reported by Beardog108]
|
||||
- Download directly accessed Pdf/Svg/Swf files instead of displaying them to avoid wrapper escape using in JS in SVG file. [Reported by Beardog108]
|
||||
- Disallow potentially unsafe regular expressions to avoid ReDoS [Reported by MuxZeroNet]
|
||||
|
||||
### Fixed
|
||||
- Detecting data directory when running Windows distribution exe [Reported by Plasmmer]
|
||||
- OpenSSL loading under Android 6+
|
||||
- Error on exiting when no connection server started
|
||||
|
||||
|
||||
## ZeroNet 0.5.6 (2017-06-15)
|
||||
### Added
|
||||
- Callback for certSelect API command
|
||||
- More compact list formatting in json
|
||||
|
||||
### Changed
|
||||
- Remove obsolete auth_key_sha512 and signature format
|
||||
- Improved Spanish translation (Thanks to Pupiloho)
|
||||
|
||||
### Fixed
|
||||
- Opened port checking (Thanks l5h5t7 & saber28 for reporting)
|
||||
- Standalone update.py argument parsing (Thanks Zalex for reporting)
|
||||
- uPnP crash on startup (Thanks Vertux for reporting)
|
||||
- CoffeeScript 1.12.6 compatibility (Thanks kavamaken & imachug)
|
||||
- Multi value argument parsing
|
||||
- Database error when running from directory that contains special characters (Thanks Pupiloho for reporting)
|
||||
- Site lock violation logging
|
||||
|
||||
|
||||
#### Proxy bypass during source upgrade [Reported by ZeroMux]
|
||||
|
||||
In ZeroNet before 0.5.6 during the client's built-in source code upgrade mechanism,
|
||||
ZeroNet did not respect Tor and/or proxy settings.
|
||||
|
||||
Result: ZeroNet downloaded the update without using the Tor network and potentially leaked the connections.
|
||||
|
||||
Fix: Removed the problematic code line from the updater that removed the proxy settings from the socket library.
|
||||
|
||||
Affected versions: ZeroNet 0.5.5 and earlier, Fixed in: ZeroNet 0.5.6
|
||||
|
||||
|
||||
#### XSS vulnerability using DNS rebinding. [Reported by Beardog108]
|
||||
|
||||
In ZeroNet before 0.5.6 the web interface did not validate the request's Host parameter.
|
||||
|
||||
Result: An attacker using a specially crafted DNS entry could have bypassed the browser's cross-site-scripting protection
|
||||
and potentially gained access to user's private data stored on site.
|
||||
|
||||
Fix: By default ZeroNet only accept connections from 127.0.0.1 and localhost hosts.
|
||||
If you bind the ui server to an external interface, then it also adds the first http request's host to the allowed host list
|
||||
or you can define it manually using --ui_host.
|
||||
|
||||
Affected versions: ZeroNet 0.5.5 and earlier, Fixed in: ZeroNet 0.5.6
|
||||
|
||||
|
||||
## ZeroNet 0.5.5 (2017-05-18)
|
||||
### Added
|
||||
- Outgoing socket binding by --bind parameter
|
||||
- Database rebuilding progress bar
|
||||
- Protect low traffic site's peers from cleanup closing
|
||||
- Local site blacklisting
|
||||
- Cloned site source code upgrade from parent
|
||||
- Input placeholder support for displayPrompt
|
||||
- Alternative interaction for wrapperConfirm
|
||||
|
||||
### Changed
|
||||
- New file priorities for faster site display on first visit
|
||||
- Don't add ? to url if push/replaceState url starts with #
|
||||
|
||||
### Fixed
|
||||
- PermissionAdd/Remove admin command requirement
|
||||
- Multi-line confirmation dialog
|
||||
|
||||
|
||||
## ZeroNet 0.5.4 (2017-04-14)
|
||||
### Added
|
||||
- Major speed and CPU usage enhancements in Tor always mode
|
||||
- Send skipped modifications to outdated clients
|
||||
|
||||
### Changed
|
||||
- Upgrade libs to latest version
|
||||
- Faster port opening and closing
|
||||
- Deny site limit modification in MultiUser mode
|
||||
|
||||
### Fixed
|
||||
- Filling database from optional files
|
||||
- OpenSSL detection on systems with OpenSSL 1.1
|
||||
- Users.json corruption on systems with slow hdd
|
||||
- Fix leaking files in data directory by webui
|
||||
|
||||
|
||||
## ZeroNet 0.5.3 (2017-02-27)
|
||||
### Added
|
||||
- Tar.gz/zip packed site support
|
||||
- Utf8 filenames in archive files
|
||||
- Experimental --db_mode secure database mode to prevent data loss on systems with unreliable power source.
|
||||
- Admin user support in MultiUser mode
|
||||
- Optional deny adding new sites in MultiUser mode
|
||||
|
||||
### Changed
|
||||
- Faster update and publish times by new socket sharing algorithm
|
||||
|
||||
### Fixed
|
||||
- Fix missing json_row errors when using Mute plugin
|
||||
|
||||
|
||||
## ZeroNet 0.5.2 (2017-02-09)
|
||||
### Added
|
||||
- User muting
|
||||
- Win/Mac signed exe/.app
|
||||
- Signed commits
|
||||
|
||||
### Changed
|
||||
- Faster site updates after startup
|
||||
- New macOS package for 10.10 compatibility
|
||||
|
||||
### Fixed
|
||||
- Fix "New version just released" popup on page first visit
|
||||
- Fix disappearing optional files bug (Thanks l5h5t7 for reporting)
|
||||
- Fix skipped updates on unreliable connections (Thanks P2P for reporting)
|
||||
- Sandbox escape security fix (Thanks Firebox for reporting)
|
||||
- Fix error reporting on async websocket functions
|
||||
|
||||
|
||||
## ZeroNet 0.5.1 (2016-11-18)
|
||||
### Added
|
||||
- Multi language interface
|
||||
- New plugin: Translation helper for site html and js files
|
||||
- Per-site favicon
|
||||
|
||||
### Fixed
|
||||
- Parallel optional file downloading
|
||||
|
||||
|
||||
## ZeroNet 0.5.0 (2016-11-08)
|
||||
### Added
|
||||
- New Plugin: Allow list/delete/pin/manage files on ZeroHello
|
||||
- New API commands to follow user's optional files, and query stats for optional files
|
||||
- Set total size limit on optional files.
|
||||
- New Plugin: Save peers to database and keep them between restarts to allow more faster optional file search and make it work without trackers
|
||||
- Rewritten uPnP port opener + close port on exit (Thanks to sirMackk!)
|
||||
- Lower memory usage by lazy PeerHashfield creation
|
||||
- Loaded json files statistics and database info at /Stats page
|
||||
|
||||
### Changed
|
||||
- Separate lock file for better Windows compatibility
|
||||
- When executing start.py open browser even if ZeroNet is already running
|
||||
- Keep plugin order after reload to allow plugins to extends an another plug-in
|
||||
- Only save sites.json if fully loaded to avoid data loss
|
||||
- Change aletorrenty tracker to a more reliable one
|
||||
- Much lower findhashid CPU usage
|
||||
- Pooled downloading of large amount of optional files
|
||||
- Lots of other optional file changes to make it better
|
||||
- If we have 1000 peers for a site make cleanup more aggressive
|
||||
- Use warning instead of error on verification errors
|
||||
- Push updates to newer clients first
|
||||
- Bad file reset improvements
|
||||
|
||||
### Fixed
|
||||
- Fix site deletion errors on startup
|
||||
- Delay websocket messages until it's connected
|
||||
- Fix database import if data file contains extra data
|
||||
- Fix big site download
|
||||
- Fix diff sending bug (been chasing it for a long time)
|
||||
- Fix random publish errors when json file contained [] characters
|
||||
- Fix site delete and siteCreate bug
|
||||
- Fix file write confirmation dialog
|
||||
|
||||
|
||||
## ZeroNet 0.4.1 (2016-09-05)
|
||||
### Added
|
||||
- Major core changes to allow fast startup and lower memory usage
|
||||
- Try to reconnect to Tor on lost connection
|
||||
- Sidebar fade-in
|
||||
- Try to avoid incomplete data files overwrite
|
||||
- Faster database open
|
||||
- Display user file sizes in sidebar
|
||||
- Concurrent worker number depends on --connection_limit
|
||||
|
||||
### Changed
|
||||
- Close databases after 5 min idle time
|
||||
- Better site size calculation
|
||||
- Allow "-" character in domains
|
||||
- Always try to keep connections for sites
|
||||
- Remove merger permission from merged sites
|
||||
- Newsfeed scans only last 3 days to speed up database queries
|
||||
- Updated ZeroBundle-win to Python 2.7.12
|
||||
|
||||
### Fixed
|
||||
- Fix for important security problem, which is allowed anyone to publish new content without valid certificate from ID provider. Thanks Kaffie for pointing it out!
|
||||
- Fix sidebar error when no certificate provider selected
|
||||
- Skip invalid files on database rebuilding
|
||||
- Fix random websocket connection error popups
|
||||
- Fix new siteCreate command
|
||||
- Fix site size calculation
|
||||
- Fix port open checking after computer wake up
|
||||
- Fix --size_limit parsing from command line
|
||||
|
||||
|
||||
## ZeroNet 0.4.0 (2016-08-11)
|
||||
### Added
|
||||
- Merger site plugin
|
||||
- Live source code reloading: Faster core development by allowing me to make changes in ZeroNet source code without restarting it.
|
||||
- New json table format for merger sites
|
||||
- Database rebuild from sidebar.
|
||||
- Allow to store custom data directly in json table: Much simpler and faster SQL queries.
|
||||
- User file archiving: Allows the site owner to archive inactive user's content into single file. (Reducing initial sync time/cpu/memory usage)
|
||||
- Also trigger onUpdated/update database on file delete.
|
||||
- Permission request from ZeroFrame API.
|
||||
- Allow to store extra data in content.json using fileWrite API command.
|
||||
- Faster optional files downloading
|
||||
- Use alternative sources (Gogs, Gitlab) to download updates
|
||||
- Track provided sites/connection and prefer to keep the ones with more sites to reduce connection number
|
||||
|
||||
### Changed
|
||||
- Keep at least 5 connection per site
|
||||
- Changed target connection for sites to 10 from 15
|
||||
- ZeroHello search function stability/speed improvements
|
||||
- Improvements for clients with slower HDD
|
||||
|
||||
### Fixed
|
||||
- Fix IE11 wrapper nonce errors
|
||||
- Fix sidebar on mobile devices
|
||||
- Fix site size calculation
|
||||
- Fix IE10 compatibility
|
||||
- Windows XP ZeroBundle compatibility (THX to people of China)
|
||||
|
||||
|
||||
## ZeroNet 0.3.7 (2016-05-27)
|
||||
### Changed
|
||||
- Patch command to reduce bandwidth usage by transfer only the changed lines
|
||||
- Other cpu/memory optimizations
|
||||
|
||||
|
||||
## ZeroNet 0.3.6 (2016-05-27)
|
||||
### Added
|
||||
- New ZeroHello
|
||||
- Newsfeed function
|
||||
|
||||
### Fixed
|
||||
- Security fixes
|
||||
|
||||
|
||||
## ZeroNet 0.3.5 (2016-02-02)
|
||||
### Added
|
||||
- Full Tor support with .onion hidden services
|
||||
- Bootstrap using ZeroNet protocol
|
||||
|
||||
### Fixed
|
||||
- Fix Gevent 1.0.2 compatibility
|
||||
|
||||
|
||||
## ZeroNet 0.3.4 (2015-12-28)
|
||||
### Added
|
||||
- AES, ECIES API function support
|
||||
- PushState and ReplaceState url manipulation support in API
|
||||
- Multiuser localstorage
|
|
@ -1,7 +1,7 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
@ -645,7 +645,7 @@ the "copyright" line and a pointer to where the full notice is found.
|
|||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
|
@ -664,11 +664,11 @@ might be different; for a GUI interface, you would use an "about box".
|
|||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
37
Dockerfile
37
Dockerfile
|
@ -1,28 +1,33 @@
|
|||
FROM ubuntu:14.04
|
||||
|
||||
MAINTAINER Felix Imobersteg <felix@whatwedo.ch>
|
||||
FROM alpine:3.15
|
||||
|
||||
#Base settings
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV HOME /root
|
||||
|
||||
#Update package lists
|
||||
RUN apt-get update -y
|
||||
COPY requirements.txt /root/requirements.txt
|
||||
|
||||
#Install ZeroNet deps
|
||||
RUN apt-get install msgpack-python python-gevent python-pip python-dev -y
|
||||
RUN pip install msgpack-python --upgrade
|
||||
#Install ZeroNet
|
||||
RUN apk --update --no-cache --no-progress add python3 python3-dev py3-pip gcc g++ autoconf automake libtool libffi-dev musl-dev make tor openssl \
|
||||
&& pip3 install -r /root/requirements.txt \
|
||||
&& apk del python3-dev gcc g++ autoconf automake libtool libffi-dev musl-dev make \
|
||||
&& echo "ControlPort 9051" >> /etc/tor/torrc \
|
||||
&& echo "CookieAuthentication 1" >> /etc/tor/torrc
|
||||
|
||||
RUN python3 -V \
|
||||
&& python3 -m pip list \
|
||||
&& tor --version \
|
||||
&& openssl version
|
||||
|
||||
#Add Zeronet source
|
||||
ADD . /root
|
||||
COPY . /root
|
||||
VOLUME /root/data
|
||||
|
||||
#Slimming down Docker containers
|
||||
RUN apt-get clean -y
|
||||
RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
#Control if Tor proxy is started
|
||||
ENV ENABLE_TOR true
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
#Set upstart command
|
||||
CMD cd /root && python zeronet.py --ui_ip 0.0.0.0
|
||||
CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26117
|
||||
|
||||
#Expose ports
|
||||
EXPOSE 43110
|
||||
EXPOSE 15441
|
||||
EXPOSE 43110 26117
|
||||
|
|
34
Dockerfile.arm64v8
Normal file
34
Dockerfile.arm64v8
Normal file
|
@ -0,0 +1,34 @@
|
|||
FROM alpine:3.12
|
||||
|
||||
#Base settings
|
||||
ENV HOME /root
|
||||
|
||||
COPY requirements.txt /root/requirements.txt
|
||||
|
||||
#Install ZeroNet
|
||||
RUN apk --update --no-cache --no-progress add python3 python3-dev gcc libffi-dev musl-dev make tor openssl \
|
||||
&& pip3 install -r /root/requirements.txt \
|
||||
&& apk del python3-dev gcc libffi-dev musl-dev make \
|
||||
&& echo "ControlPort 9051" >> /etc/tor/torrc \
|
||||
&& echo "CookieAuthentication 1" >> /etc/tor/torrc
|
||||
|
||||
RUN python3 -V \
|
||||
&& python3 -m pip list \
|
||||
&& tor --version \
|
||||
&& openssl version
|
||||
|
||||
#Add Zeronet source
|
||||
COPY . /root
|
||||
VOLUME /root/data
|
||||
|
||||
#Control if Tor proxy is started
|
||||
ENV ENABLE_TOR false
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
#Set upstart command
|
||||
CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26552
|
||||
|
||||
#Expose ports
|
||||
EXPOSE 43110 26552
|
||||
|
367
LICENSE
367
LICENSE
|
@ -1,340 +1,27 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc., <http://fsf.org/>
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
License is intended to guarantee your freedom to share and change free
|
||||
software--to make sure the software is free for all its users. This
|
||||
General Public License applies to most of the Free Software
|
||||
Foundation's software and to any other program whose authors commit to
|
||||
using it. (Some other Free Software Foundation software is covered by
|
||||
the GNU Lesser General Public License instead.) You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if you
|
||||
distribute copies of the software, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must give the recipients all the rights that
|
||||
you have. You must make sure that they, too, receive or can get the
|
||||
source code. And you must show them these terms so they know their
|
||||
rights.
|
||||
|
||||
We protect your rights with two steps: (1) copyright the software, and
|
||||
(2) offer you this license which gives you legal permission to copy,
|
||||
distribute and/or modify the software.
|
||||
|
||||
Also, for each author's protection and ours, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
software. If the software is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original, so
|
||||
that any problems introduced by others will not reflect on the original
|
||||
authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that redistributors of a free
|
||||
program will individually obtain patent licenses, in effect making the
|
||||
program proprietary. To prevent this, we have made it clear that any
|
||||
patent must be licensed for everyone's free use or not licensed at all.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License applies to any program or other work which contains
|
||||
a notice placed by the copyright holder saying it may be distributed
|
||||
under the terms of this General Public License. The "Program", below,
|
||||
refers to any such program or work, and a "work based on the Program"
|
||||
means either the Program or any derivative work under copyright law:
|
||||
that is to say, a work containing the Program or a portion of it,
|
||||
either verbatim or with modifications and/or translated into another
|
||||
language. (Hereinafter, translation is included without limitation in
|
||||
the term "modification".) Each licensee is addressed as "you".
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running the Program is not restricted, and the output from the Program
|
||||
is covered only if its contents constitute a work based on the
|
||||
Program (independent of having been made by running the Program).
|
||||
Whether that is true depends on what the Program does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Program's
|
||||
source code as you receive it, in any medium, provided that you
|
||||
conspicuously and appropriately publish on each copy an appropriate
|
||||
copyright notice and disclaimer of warranty; keep intact all the
|
||||
notices that refer to this License and to the absence of any warranty;
|
||||
and give any other recipients of the Program a copy of this License
|
||||
along with the Program.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy, and
|
||||
you may at your option offer warranty protection in exchange for a fee.
|
||||
|
||||
2. You may modify your copy or copies of the Program or any portion
|
||||
of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any
|
||||
part thereof, to be licensed as a whole at no charge to all third
|
||||
parties under the terms of this License.
|
||||
|
||||
c) If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such
|
||||
interactive use in the most ordinary way, to print or display an
|
||||
announcement including an appropriate copyright notice and a
|
||||
notice that there is no warranty (or else, saying that you provide
|
||||
a warranty) and that users may redistribute the program under
|
||||
these conditions, and telling the user how to view a copy of this
|
||||
License. (Exception: if the Program itself is interactive but
|
||||
does not normally print such an announcement, your work based on
|
||||
the Program is not required to print an announcement.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Program,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Program, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Program.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Program
|
||||
with the Program (or with a work based on the Program) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may copy and distribute the Program (or a work based on it,
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
a) Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections
|
||||
1 and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
b) Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your
|
||||
cost of physically performing source distribution, a complete
|
||||
machine-readable copy of the corresponding source code, to be
|
||||
distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange; or,
|
||||
|
||||
c) Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is
|
||||
allowed only for noncommercial distribution and only if you
|
||||
received the program in object code or executable form with such
|
||||
an offer, in accord with Subsection b above.)
|
||||
|
||||
The source code for a work means the preferred form of the work for
|
||||
making modifications to it. For an executable work, complete source
|
||||
code means all the source code for all modules it contains, plus any
|
||||
associated interface definition files, plus the scripts used to
|
||||
control compilation and installation of the executable. However, as a
|
||||
special exception, the source code distributed need not include
|
||||
anything that is normally distributed (in either source or binary
|
||||
form) with the major components (compiler, kernel, and so on) of the
|
||||
operating system on which the executable runs, unless that component
|
||||
itself accompanies the executable.
|
||||
|
||||
If distribution of executable or object code is made by offering
|
||||
access to copy from a designated place, then offering equivalent
|
||||
access to copy the source code from the same place counts as
|
||||
distribution of the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
4. You may not copy, modify, sublicense, or distribute the Program
|
||||
except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense or distribute the Program is
|
||||
void, and will automatically terminate your rights under this License.
|
||||
However, parties who have received copies, or rights, from you under
|
||||
this License will not have their licenses terminated so long as such
|
||||
parties remain in full compliance.
|
||||
|
||||
5. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Program or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Program (or any work based on the
|
||||
Program), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Program or works based on it.
|
||||
|
||||
6. Each time you redistribute the Program (or any work based on the
|
||||
Program), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute or modify the Program subject to
|
||||
these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
7. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Program at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Program by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Program.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system, which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
8. If the distribution and/or use of the Program is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Program under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
9. The Free Software Foundation may publish revised and/or new versions
|
||||
of the General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies a version number of this License which applies to it and "any
|
||||
later version", you have the option of following the terms and conditions
|
||||
either of that version or of any later version published by the Free
|
||||
Software Foundation. If the Program does not specify a version number of
|
||||
this License, you may choose any version ever published by the Free Software
|
||||
Foundation.
|
||||
|
||||
10. If you wish to incorporate parts of the Program into other free
|
||||
programs whose distribution conditions are different, write to the author
|
||||
to ask for permission. For software which is copyrighted by the Free
|
||||
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||
make exceptions for this. Our decision will be guided by the two goals
|
||||
of preserving the free status of all derivatives of our free software and
|
||||
of promoting the sharing and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||
REPAIR OR CORRECTION.
|
||||
|
||||
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
{description}
|
||||
Copyright (C) {year} {fullname}
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program is interactive, make it output a short notice like this
|
||||
when it starts in an interactive mode:
|
||||
|
||||
Gnomovision version 69, Copyright (C) year name of author
|
||||
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, the commands you use may
|
||||
be called something other than `show w' and `show c'; they could even be
|
||||
mouse-clicks or menu items--whatever suits your program.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||
|
||||
{signature of Ty Coon}, 1 April 1989
|
||||
Ty Coon, President of Vice
|
||||
|
||||
This General Public License does not permit incorporating your program into
|
||||
proprietary programs. If your program is a subroutine library, you may
|
||||
consider it more useful to permit linking proprietary applications with the
|
||||
library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License.
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, version 3.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
Additional Conditions :
|
||||
|
||||
Contributing to this repo
|
||||
This repo is governed by GPLv3, same is located at the root of the ZeroNet git repo,
|
||||
unless specified separately all code is governed by that license, contributions to this repo
|
||||
are divided into two key types, key contributions and non-key contributions, key contributions
|
||||
are which, directly affects the code performance, quality and features of software,
|
||||
non key contributions include things like translation datasets, image, graphic or video
|
||||
contributions that does not affect the main usability of software but improves the existing
|
||||
usability of certain thing or feature, these also include tests written with code, since their
|
||||
purpose is to check, whether something is working or not as intended. All the non-key contributions
|
||||
are governed by [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/), unless specified
|
||||
above, a contribution is ruled by the type of contribution if there is a conflict between two
|
||||
contributing parties of repo in any case.
|
||||
|
|
133
README-ru.md
Normal file
133
README-ru.md
Normal file
|
@ -0,0 +1,133 @@
|
|||
# ZeroNet [](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [](https://hub.docker.com/r/canewsin/zeronet)
|
||||
|
||||
[简体中文](./README-zh-cn.md)
|
||||
[English](./README.md)
|
||||
|
||||
Децентрализованные вебсайты, использующие криптографию Bitcoin и протокол BitTorrent — https://zeronet.dev ([Зеркало в ZeroNet](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/)). В отличии от Bitcoin, ZeroNet'у не требуется блокчейн для работы, однако он использует ту же криптографию, чтобы обеспечить сохранность и проверку данных.
|
||||
|
||||
## Зачем?
|
||||
|
||||
- Мы верим в открытую, свободную, и неподдающуюся цензуре сеть и связь.
|
||||
- Нет единой точки отказа: Сайт остаётся онлайн, пока его обслуживает хотя бы 1 пир.
|
||||
- Нет затрат на хостинг: Сайты обслуживаются посетителями.
|
||||
- Невозможно отключить: Он нигде, потому что он везде.
|
||||
- Скорость и возможность работать без Интернета: Вы сможете получить доступ к сайту, потому что его копия хранится на вашем компьютере и у ваших пиров.
|
||||
|
||||
## Особенности
|
||||
|
||||
- Обновление сайтов в реальном времени
|
||||
- Поддержка доменов `.bit` ([Namecoin](https://www.namecoin.org))
|
||||
- Легкая установка: просто распакуйте и запустите
|
||||
- Клонирование сайтов "в один клик"
|
||||
- Беспарольная [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki)
|
||||
авторизация: Ваша учетная запись защищена той же криптографией, что и ваш Bitcoin-кошелек
|
||||
- Встроенный SQL-сервер с синхронизацией данных P2P: Позволяет упростить разработку сайта и ускорить загрузку страницы
|
||||
- Анонимность: Полная поддержка сети Tor, используя скрытые службы `.onion` вместо адресов IPv4
|
||||
- Зашифрованное TLS подключение
|
||||
- Автоматическое открытие UPnP–порта
|
||||
- Плагин для поддержки нескольких пользователей (openproxy)
|
||||
- Работа с любыми браузерами и операционными системами
|
||||
|
||||
## Текущие ограничения
|
||||
|
||||
- Файловые транзакции не сжаты
|
||||
- Нет приватных сайтов
|
||||
|
||||
## Как это работает?
|
||||
|
||||
- После запуска `zeronet.py` вы сможете посещать сайты в ZeroNet, используя адрес
|
||||
`http://127.0.0.1:43110/{zeronet_адрес}`
|
||||
(Например: `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`).
|
||||
- Когда вы посещаете новый сайт в ZeroNet, он пытается найти пиров с помощью протокола BitTorrent,
|
||||
чтобы скачать у них файлы сайта (HTML, CSS, JS и т.д.).
|
||||
- После посещения сайта вы тоже становитесь его пиром.
|
||||
- Каждый сайт содержит файл `content.json`, который содержит SHA512 хеши всех остальные файлы
|
||||
и подпись, созданную с помощью закрытого ключа сайта.
|
||||
- Если владелец сайта (тот, кто владеет закрытым ключом для адреса сайта) изменяет сайт, он
|
||||
подписывает новый `content.json` и публикует его для пиров. После этого пиры проверяют целостность `content.json`
|
||||
(используя подпись), скачвают изменённые файлы и распространяют новый контент для других пиров.
|
||||
|
||||
[Презентация о криптографии ZeroNet, обновлениях сайтов, многопользовательских сайтах »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000)
|
||||
[Часто задаваемые вопросы »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/)
|
||||
[Документация разработчика ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
|
||||
|
||||
## Скриншоты
|
||||
|
||||

|
||||

|
||||
[Больше скриншотов в документации ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/)
|
||||
|
||||
## Как присоединиться?
|
||||
|
||||
### Windows
|
||||
|
||||
- Скачайте и распакуйте архив [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26МБ)
|
||||
- Запустите `ZeroNet.exe`
|
||||
|
||||
### macOS
|
||||
|
||||
- Скачайте и распакуйте архив [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14МБ)
|
||||
- Запустите `ZeroNet.app`
|
||||
|
||||
### Linux (64 бит)
|
||||
|
||||
- Скачайте и распакуйте архив [ZeroNet-linux.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip) (14МБ)
|
||||
- Запустите `./ZeroNet.sh`
|
||||
|
||||
> **Note**
|
||||
> Запустите таким образом: `./ZeroNet.sh --ui_ip '*' --ui_restrict ваш_ip_адрес`, чтобы разрешить удалённое подключение к веб–интерфейсу.
|
||||
|
||||
### Docker
|
||||
|
||||
Официальный образ находится здесь: https://hub.docker.com/r/canewsin/zeronet/
|
||||
|
||||
### Android (arm, arm64, x86)
|
||||
|
||||
- Для работы требуется Android как минимум версии 5.0 Lollipop
|
||||
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
|
||||
alt="Download from Google Play"
|
||||
height="80">](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile)
|
||||
- Скачать APK: https://github.com/canewsin/zeronet_mobile/releases
|
||||
|
||||
### Android (arm, arm64, x86) Облегчённый клиент только для просмотра (1МБ)
|
||||
|
||||
- Для работы требуется Android как минимум версии 4.1 Jelly Bean
|
||||
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
|
||||
alt="Download from Google Play"
|
||||
height="80">](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite)
|
||||
|
||||
### Установка из исходного кода
|
||||
|
||||
```sh
|
||||
wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip
|
||||
unzip ZeroNet-src.zip
|
||||
cd ZeroNet
|
||||
sudo apt-get update
|
||||
sudo apt-get install python3-pip
|
||||
sudo python3 -m pip install -r requirements.txt
|
||||
```
|
||||
- Запустите `python3 zeronet.py`
|
||||
|
||||
Откройте приветственную страницу ZeroHello в вашем браузере по ссылке http://127.0.0.1:43110/
|
||||
|
||||
## Как мне создать сайт в ZeroNet?
|
||||
|
||||
- Кликните на **⋮** > **"Create new, empty site"** в меню на сайте [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d).
|
||||
- Вы будете **перенаправлены** на совершенно новый сайт, который может быть изменён только вами!
|
||||
- Вы можете найти и изменить контент вашего сайта в каталоге **data/[адрес_вашего_сайта]**
|
||||
- После изменений откройте ваш сайт, переключите влево кнопку "0" в правом верхнем углу, затем нажмите кнопки **sign** и **publish** внизу
|
||||
|
||||
Следующие шаги: [Документация разработчика ZeroNet](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
|
||||
|
||||
## Поддержите проект
|
||||
|
||||
- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Рекомендуем)
|
||||
- LiberaPay: https://liberapay.com/PramUkesh
|
||||
- Paypal: https://paypal.me/PramUkesh
|
||||
- Другие способы: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive)
|
||||
|
||||
#### Спасибо!
|
||||
|
||||
- Здесь вы можете получить больше информации, помощь, прочитать список изменений и исследовать ZeroNet сайты: https://www.reddit.com/r/zeronetx/
|
||||
- Общение происходит на канале [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) или в [Gitter](https://gitter.im/canewsin/ZeroNet)
|
||||
- Электронная почта: canews.in@gmail.com
|
132
README-zh-cn.md
Normal file
132
README-zh-cn.md
Normal file
|
@ -0,0 +1,132 @@
|
|||
# ZeroNet [](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [](https://hub.docker.com/r/canewsin/zeronet)
|
||||
|
||||
[English](./README.md)
|
||||
|
||||
使用 Bitcoin 加密和 BitTorrent 网络的去中心化网络 - https://zeronet.dev
|
||||
|
||||
|
||||
## 为什么?
|
||||
|
||||
* 我们相信开放,自由,无审查的网络和通讯
|
||||
* 不会受单点故障影响:只要有在线的节点,站点就会保持在线
|
||||
* 无托管费用:站点由访问者托管
|
||||
* 无法关闭:因为节点无处不在
|
||||
* 快速并可离线运行:即使没有互联网连接也可以使用
|
||||
|
||||
|
||||
## 功能
|
||||
* 实时站点更新
|
||||
* 支持 Namecoin 的 .bit 域名
|
||||
* 安装方便:只需解压并运行
|
||||
* 一键克隆存在的站点
|
||||
* 无需密码、基于 [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki)
|
||||
的认证:您的账户被与比特币钱包相同的加密方法保护
|
||||
* 内建 SQL 服务器和 P2P 数据同步:让开发更简单并提升加载速度
|
||||
* 匿名性:完整的 Tor 网络支持,支持通过 .onion 隐藏服务相互连接而不是通过 IPv4 地址连接
|
||||
* TLS 加密连接
|
||||
* 自动打开 uPnP 端口
|
||||
* 多用户(openproxy)支持的插件
|
||||
* 适用于任何浏览器 / 操作系统
|
||||
|
||||
|
||||
## 原理
|
||||
|
||||
* 在运行 `zeronet.py` 后,您将可以通过
|
||||
`http://127.0.0.1:43110/{zeronet_address}`(例如:
|
||||
`http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`)访问 zeronet 中的站点
|
||||
* 在您浏览 zeronet 站点时,客户端会尝试通过 BitTorrent 网络来寻找可用的节点,从而下载需要的文件(html,css,js...)
|
||||
* 您将会储存每一个浏览过的站点
|
||||
* 每个站点都包含一个名为 `content.json` 的文件,它储存了其他所有文件的 sha512 散列值以及一个通过站点私钥生成的签名
|
||||
* 如果站点的所有者(拥有站点地址的私钥)修改了站点,并且他 / 她签名了新的 `content.json` 然后推送至其他节点,
|
||||
那么这些节点将会在使用签名验证 `content.json` 的真实性后,下载修改后的文件并将新内容推送至另外的节点
|
||||
|
||||
#### [关于 ZeroNet 加密,站点更新,多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000)
|
||||
#### [常见问题 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/)
|
||||
|
||||
#### [ZeroNet 开发者文档 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
|
||||
|
||||
|
||||
## 屏幕截图
|
||||
|
||||

|
||||

|
||||
|
||||
#### [ZeroNet 文档中的更多屏幕截图 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/)
|
||||
|
||||
|
||||
## 如何加入
|
||||
|
||||
### Windows
|
||||
|
||||
- 下载 [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB)
|
||||
- 在任意位置解压缩
|
||||
- 运行 `ZeroNet.exe`
|
||||
|
||||
### macOS
|
||||
|
||||
- 下载 [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB)
|
||||
- 在任意位置解压缩
|
||||
- 运行 `ZeroNet.app`
|
||||
|
||||
### Linux (x86-64bit)
|
||||
|
||||
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip`
|
||||
- `unzip ZeroNet-linux.zip`
|
||||
- `cd ZeroNet-linux`
|
||||
- 使用以下命令启动 `./ZeroNet.sh`
|
||||
- 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面
|
||||
|
||||
__提示:__ 若要允许在 Web 界面上的远程连接,使用以下命令启动 `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address`
|
||||
|
||||
### 从源代码安装
|
||||
|
||||
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip`
|
||||
- `unzip ZeroNet-src.zip`
|
||||
- `cd ZeroNet`
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get install python3-pip`
|
||||
- `sudo python3 -m pip install -r requirements.txt`
|
||||
- 使用以下命令启动 `python3 zeronet.py`
|
||||
- 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面
|
||||
|
||||
### Android (arm, arm64, x86)
|
||||
- minimum Android version supported 21 (Android 5.0 Lollipop)
|
||||
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
|
||||
alt="Download from Google Play"
|
||||
height="80">](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile)
|
||||
- APK download: https://github.com/canewsin/zeronet_mobile/releases
|
||||
|
||||
### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB)
|
||||
- minimum Android version supported 16 (JellyBean)
|
||||
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
|
||||
alt="Download from Google Play"
|
||||
height="80">](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite)
|
||||
|
||||
## 现有限制
|
||||
|
||||
* 传输文件时没有压缩
|
||||
* 不支持私有站点
|
||||
|
||||
|
||||
## 如何创建一个 ZeroNet 站点?
|
||||
|
||||
* 点击 [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d) 站点的 **⋮** > **「新建空站点」** 菜单项
|
||||
* 您将被**重定向**到一个全新的站点,该站点只能由您修改
|
||||
* 您可以在 **data/[您的站点地址]** 目录中找到并修改网站的内容
|
||||
* 修改后打开您的网站,将右上角的「0」按钮拖到左侧,然后点击底部的**签名**并**发布**按钮
|
||||
|
||||
接下来的步骤:[ZeroNet 开发者文档](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
|
||||
|
||||
## 帮助这个项目
|
||||
- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred)
|
||||
- LiberaPay: https://liberapay.com/PramUkesh
|
||||
- Paypal: https://paypal.me/PramUkesh
|
||||
- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive)
|
||||
|
||||
|
||||
#### 感谢您!
|
||||
|
||||
* 更多信息,帮助,变更记录和 zeronet 站点:https://www.reddit.com/r/zeronetx/
|
||||
* 前往 [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 或 [gitter](https://gitter.im/canewsin/ZeroNet) 和我们聊天
|
||||
* [这里](https://gitter.im/canewsin/ZeroNet)是一个 gitter 上的中文聊天室
|
||||
* Email: canews.in@gmail.com
|
200
README.md
200
README.md
|
@ -1,16 +1,16 @@
|
|||
# ZeroNet [](https://travis-ci.org/HelloZeroNet/ZeroNet) [](https://zeronet.readthedocs.org/en/latest/faq/) [](https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/)
|
||||
|
||||
Decentralized websites using Bitcoin crypto and the BitTorrent network - http://zeronet.io
|
||||
# ZeroNet [](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [](https://hub.docker.com/r/canewsin/zeronet)
|
||||
<!--TODO: Update Onion Site -->
|
||||
Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.dev / [ZeroNet Site](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/), Unlike Bitcoin, ZeroNet Doesn't need a blockchain to run, But uses cryptography used by BTC, to ensure data integrity and validation.
|
||||
|
||||
|
||||
## Why?
|
||||
|
||||
* We believe in open, free, and uncensored network and communication.
|
||||
* No single point of failure: Site remains online so long as at least 1 peer
|
||||
* No single point of failure: Site remains online so long as at least 1 peer is
|
||||
serving it.
|
||||
* No hosting costs: Sites are served by visitors.
|
||||
* Impossible to shut down: It's nowhere because it's everywhere.
|
||||
* Fast and works offline: You can access the site even if your internet is
|
||||
* Fast and works offline: You can access the site even if Internet is
|
||||
unavailable.
|
||||
|
||||
|
||||
|
@ -20,11 +20,11 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - http://
|
|||
* Easy to setup: unpack & run
|
||||
* Clone websites in one click
|
||||
* Password-less [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki)
|
||||
based authorization: Your account is protected by same cryptography as your Bitcoin wallet
|
||||
based authorization: Your account is protected by the same cryptography as your Bitcoin wallet
|
||||
* Built-in SQL server with P2P data synchronization: Allows easier site development and faster page load times
|
||||
* Tor network support
|
||||
* Anonymity: Full Tor network support with .onion hidden services instead of IPv4 addresses
|
||||
* TLS encrypted connections
|
||||
* Automatic, uPnP port opening
|
||||
* Automatic uPnP port opening
|
||||
* Plugin for multiuser (openproxy) support
|
||||
* Works with any browser/OS
|
||||
|
||||
|
@ -33,154 +33,124 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - http://
|
|||
|
||||
* After starting `zeronet.py` you will be able to visit zeronet sites using
|
||||
`http://127.0.0.1:43110/{zeronet_address}` (eg.
|
||||
`http://127.0.0.1:43110/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr`).
|
||||
`http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`).
|
||||
* When you visit a new zeronet site, it tries to find peers using the BitTorrent
|
||||
network so it can download the site files (html, css, js...) from them.
|
||||
* Each visited site becomes also served by you.
|
||||
* Every site contains a `content.json` which holds all other files in a sha512 hash
|
||||
and a signature generated using site's private key.
|
||||
* Each visited site is also served by you.
|
||||
* Every site contains a `content.json` file which holds all other files in a sha512 hash
|
||||
and a signature generated using the site's private key.
|
||||
* If the site owner (who has the private key for the site address) modifies the
|
||||
site, then he/she signs the new `content.json` and publishes it to the peers.
|
||||
After the peers have verified the `content.json` integrity (using the
|
||||
site and signs the new `content.json` and publishes it to the peers.
|
||||
Afterwards, the peers verify the `content.json` integrity (using the
|
||||
signature), they download the modified files and publish the new content to
|
||||
other peers.
|
||||
|
||||
#### [Slideshow about ZeroNet cryptography, site updates, multi-user sites »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000)
|
||||
#### [Frequently asked questions »](http://zeronet.readthedocs.org/en/latest/faq/)
|
||||
#### [Frequently asked questions »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/)
|
||||
|
||||
#### [ZeroNet Developer Documentation »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
|
||||
|
||||
|
||||
## Screenshots
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
#### [More screenshots in ZeroNet docs »](http://zeronet.readthedocs.org/en/latest/using_zeronet/sample_sites/)
|
||||
#### [More screenshots in ZeroNet docs »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/)
|
||||
|
||||
|
||||
## How to join?
|
||||
## How to join
|
||||
|
||||
### Windows
|
||||
|
||||
* [Download ZeroBundle package](https://github.com/HelloZeroNet/ZeroBundle/releases/download/0.1.1/ZeroBundle-v0.1.1.zip) that includes Python 2.7.9 and all required libraries
|
||||
* Unpack to any directory
|
||||
* Run `zeronet.cmd`
|
||||
- Download [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB)
|
||||
- Unpack anywhere
|
||||
- Run `ZeroNet.exe`
|
||||
|
||||
### macOS
|
||||
|
||||
It downloads the latest version of ZeroNet then starts it automatically.
|
||||
- Download [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB)
|
||||
- Unpack anywhere
|
||||
- Run `ZeroNet.app`
|
||||
|
||||
### Linux (x86-64bit)
|
||||
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip`
|
||||
- `unzip ZeroNet-linux.zip`
|
||||
- `cd ZeroNet-linux`
|
||||
- Start with: `./ZeroNet.sh`
|
||||
- Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/
|
||||
|
||||
__Tip:__ Start with `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` to allow remote connections on the web interface.
|
||||
|
||||
### Android (arm, arm64, x86)
|
||||
- minimum Android version supported 21 (Android 5.0 Lollipop)
|
||||
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
|
||||
alt="Download from Google Play"
|
||||
height="80">](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile)
|
||||
- APK download: https://github.com/canewsin/zeronet_mobile/releases
|
||||
|
||||
### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB)
|
||||
- minimum Android version supported 16 (JellyBean)
|
||||
- [<img src="https://play.google.com/intl/en_us/badges/images/generic/en_badge_web_generic.png"
|
||||
alt="Download from Google Play"
|
||||
height="80">](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite)
|
||||
|
||||
|
||||
#### Alternative method for Windows by installing Python
|
||||
#### Docker
|
||||
There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/
|
||||
|
||||
* [Install Python 2.7](https://www.python.org/ftp/python/2.7.9/python-2.7.9.msi)
|
||||
* [Install Python Greenlet](http://zeronet.io/files/windows/greenlet-0.4.5.win32-py2.7.exe)
|
||||
* [Install Python Gevent](http://zeronet.io/files/windows/gevent-1.0.1.win32-py2.7.exe)
|
||||
* [Install Python MsgPack](http://zeronet.io/files/windows/msgpack-python-0.4.2.win32-py2.7.exe)
|
||||
* [Download and extract ZeroNet](https://codeload.github.com/HelloZeroNet/ZeroNet/zip/master) to any directory
|
||||
* Run `start.py`
|
||||
### Online Proxies
|
||||
Proxies are like seed boxes for sites(i.e ZNX runs on a cloud vps), you can try zeronet experience from proxies. Add your proxy below if you have one.
|
||||
|
||||
### Linux
|
||||
#### Official ZNX Proxy :
|
||||
|
||||
#### Debian
|
||||
https://proxy.zeronet.dev/
|
||||
|
||||
* `sudo apt-get update`
|
||||
* `sudo apt-get install msgpack-python python-gevent`
|
||||
* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz`
|
||||
* `tar xvpfz master.tar.gz`
|
||||
* `cd ZeroNet-master`
|
||||
* Start with `python zeronet.py`
|
||||
* Open http://127.0.0.1:43110/ in your browser and enjoy! :)
|
||||
https://zeronet.dev/
|
||||
|
||||
#### Other Linux or without root access
|
||||
* Check your python version using `python --version` if the returned version is not `Python 2.7.X` then try `python2` or `python2.7` command and use it from now
|
||||
* `wget https://bootstrap.pypa.io/get-pip.py`
|
||||
* `python get-pip.py --user gevent msgpack-python`
|
||||
* Start with `python zeronet.py`
|
||||
#### From Community
|
||||
|
||||
### Mac
|
||||
https://0net-preview.com/
|
||||
|
||||
* Install [Homebrew](http://brew.sh/)
|
||||
* `brew install python`
|
||||
* `pip install gevent msgpack-python`
|
||||
* [Download](https://github.com/HelloZeroNet/ZeroNet/archive/master.zip), Unpack, run `python zeronet.py`
|
||||
https://portal.ngnoid.tv/
|
||||
|
||||
### Vagrant
|
||||
https://zeronet.ipfsscan.io/
|
||||
|
||||
* `vagrant up`
|
||||
* Access VM with `vagrant ssh`
|
||||
* `cd /vagrant`
|
||||
* Run `python zeronet.py --ui_ip 0.0.0.0`
|
||||
* Open http://127.0.0.1:43110/ in your browser
|
||||
|
||||
### Docker
|
||||
* `docker run -p 15441:15441 -p 43110:43110 nofish/zeronet`
|
||||
* Open http://127.0.0.1:43110/ in your browser
|
||||
### Install from source
|
||||
|
||||
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip`
|
||||
- `unzip ZeroNet-src.zip`
|
||||
- `cd ZeroNet`
|
||||
- `sudo apt-get update`
|
||||
- `sudo apt-get install python3-pip`
|
||||
- `sudo python3 -m pip install -r requirements.txt`
|
||||
- Start with: `python3 zeronet.py`
|
||||
- Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/
|
||||
|
||||
## Current limitations
|
||||
|
||||
* No torrent-like, file splitting for big file support
|
||||
* No more anonymous than Bittorrent
|
||||
* File transactions are not compressed ~~or encrypted yet~~
|
||||
* File transactions are not compressed
|
||||
* No private sites
|
||||
* ~~You must have an open port to publish new changes~~
|
||||
* ~~Timeout problems on slow connections~~
|
||||
|
||||
|
||||
## How can I create a ZeroNet site?
|
||||
|
||||
Shut down zeronet if you are running it already
|
||||
* Click on **⋮** > **"Create new, empty site"** menu item on the site [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d).
|
||||
* You will be **redirected** to a completely new site that is only modifiable by you!
|
||||
* You can find and modify your site's content in **data/[yoursiteaddress]** directory
|
||||
* After the modifications open your site, drag the topright "0" button to left, then press **sign** and **publish** buttons on the bottom
|
||||
|
||||
```bash
|
||||
$ zeronet.py siteCreate
|
||||
...
|
||||
- Site private key: 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq
|
||||
- Site address: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2
|
||||
...
|
||||
- Site created!
|
||||
$ zeronet.py
|
||||
...
|
||||
```
|
||||
Next steps: [ZeroNet Developer Documentation](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/)
|
||||
|
||||
Congratulations, you're finished! Now anyone can access your site using
|
||||
`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2`
|
||||
|
||||
Next steps: [ZeroNet Developer Documentation](http://zeronet.readthedocs.org/en/latest/site_development/getting_started/)
|
||||
|
||||
|
||||
## How can I modify a ZeroNet site?
|
||||
|
||||
* Modify files located in data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 directory.
|
||||
After you're finished:
|
||||
|
||||
```bash
|
||||
$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2
|
||||
- Signing site: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2...
|
||||
Private key (input hidden):
|
||||
```
|
||||
|
||||
* Enter the private key you got when you created the site, then:
|
||||
|
||||
```bash
|
||||
$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2
|
||||
...
|
||||
Site:13DNDk..bhC2 Publishing to 3/10 peers...
|
||||
Site:13DNDk..bhC2 Successfuly published to 3 peers
|
||||
- Serving files....
|
||||
```
|
||||
|
||||
* That's it! You've successfully signed and published your modifications.
|
||||
|
||||
|
||||
## If you want to help keep this project alive
|
||||
|
||||
- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX
|
||||
- Paypal: https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/
|
||||
|
||||
### Sponsors
|
||||
|
||||
* Better OSX/Safari compatibility made possible by [BrowserStack.com](https://www.browserstack.com)
|
||||
## Help keep this project alive
|
||||
- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred)
|
||||
- LiberaPay: https://liberapay.com/PramUkesh
|
||||
- Paypal: https://paypal.me/PramUkesh
|
||||
- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive)
|
||||
|
||||
#### Thank you!
|
||||
|
||||
* More info, help, changelog, zeronet sites: http://www.reddit.com/r/zeronet/
|
||||
* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/HelloZeroNet/ZeroNet)
|
||||
* Email: hello@noloop.me
|
||||
* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronetx/
|
||||
* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/canewsin/ZeroNet)
|
||||
* Email: canews.in@gmail.com
|
||||
|
|
2
Vagrantfile
vendored
2
Vagrantfile
vendored
|
@ -40,6 +40,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
|||
config.vm.provision "shell",
|
||||
inline: "sudo apt-get install msgpack-python python-gevent python-pip python-dev -y"
|
||||
config.vm.provision "shell",
|
||||
inline: "sudo pip install msgpack-python --upgrade"
|
||||
inline: "sudo pip install msgpack --upgrade"
|
||||
|
||||
end
|
||||
|
|
1
plugins
Submodule
1
plugins
Submodule
|
@ -0,0 +1 @@
|
|||
Subproject commit 689d9309f73371f4681191b125ec3f2e14075eeb
|
|
@ -1,53 +0,0 @@
|
|||
from lib.pybitcointools import bitcoin as btctools
|
||||
import hashlib
|
||||
|
||||
ecc_cache = {}
|
||||
|
||||
|
||||
def encrypt(data, pubkey, ephemcurve=None, ciphername='aes-256-cbc'):
|
||||
from lib import pyelliptic
|
||||
curve, pubkey_x, pubkey_y, i = pyelliptic.ECC._decode_pubkey(pubkey)
|
||||
if ephemcurve is None:
|
||||
ephemcurve = curve
|
||||
ephem = pyelliptic.ECC(curve=ephemcurve)
|
||||
key = hashlib.sha512(ephem.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest()
|
||||
key_e, key_m = key[:32], key[32:]
|
||||
pubkey = ephem.get_pubkey()
|
||||
iv = pyelliptic.OpenSSL.rand(pyelliptic.OpenSSL.get_cipher(ciphername).get_blocksize())
|
||||
ctx = pyelliptic.Cipher(key_e, iv, 1, ciphername)
|
||||
ciphertext = iv + pubkey + ctx.ciphering(data)
|
||||
mac = pyelliptic.hmac_sha256(key_m, ciphertext)
|
||||
return key_e, ciphertext + mac
|
||||
|
||||
|
||||
def split(encrypted):
|
||||
iv = encrypted[0:16]
|
||||
ciphertext = encrypted[16+70:-32]
|
||||
|
||||
return iv, ciphertext
|
||||
|
||||
|
||||
def getEcc(privatekey=None):
|
||||
from lib import pyelliptic
|
||||
global eccs
|
||||
if privatekey not in ecc_cache:
|
||||
if privatekey:
|
||||
publickey_bin = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin")
|
||||
publickey_openssl = toOpensslPublickey(publickey_bin)
|
||||
privatekey_openssl = toOpensslPrivatekey(privatekey)
|
||||
ecc_cache[privatekey] = pyelliptic.ECC(curve='secp256k1', privkey=privatekey_openssl, pubkey=publickey_openssl)
|
||||
else:
|
||||
ecc_cache[None] = pyelliptic.ECC()
|
||||
return ecc_cache[privatekey]
|
||||
|
||||
|
||||
def toOpensslPrivatekey(privatekey):
|
||||
privatekey_bin = btctools.encode_privkey(privatekey, "bin")
|
||||
return '\x02\xca\x00\x20' + privatekey_bin
|
||||
|
||||
|
||||
def toOpensslPublickey(publickey):
|
||||
publickey_bin = btctools.encode_pubkey(publickey, "bin")
|
||||
publickey_bin = publickey_bin[1:]
|
||||
publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:]
|
||||
return publickey_openssl
|
|
@ -1,149 +0,0 @@
|
|||
import base64
|
||||
import os
|
||||
|
||||
from Plugin import PluginManager
|
||||
from Crypt import CryptBitcoin
|
||||
from lib.pybitcointools import bitcoin as btctools
|
||||
|
||||
import CryptMessage
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiWebsocket")
|
||||
class UiWebsocketPlugin(object):
|
||||
def encrypt(self, text, publickey):
|
||||
encrypted = CryptMessage.encrypt(text, CryptMessage.toOpensslPublickey(publickey))
|
||||
return encrypted
|
||||
|
||||
def decrypt(self, encrypted, privatekey):
|
||||
back = CryptMessage.getEcc(privatekey).decrypt(encrypted)
|
||||
return back.decode("utf8")
|
||||
|
||||
# - Actions -
|
||||
|
||||
# Returns user's public key unique to site
|
||||
# Return: Public key
|
||||
def actionUserPublickey(self, to, index=0):
|
||||
publickey = self.user.getEncryptPublickey(self.site.address, index)
|
||||
self.response(to, publickey)
|
||||
|
||||
# Encrypt a text using the publickey or user's sites unique publickey
|
||||
# Return: Encrypted text using base64 encoding
|
||||
def actionEciesEncrypt(self, to, text, publickey=0, return_aes_key=False):
|
||||
if type(publickey) is int: # Encrypt using user's publickey
|
||||
publickey = self.user.getEncryptPublickey(self.site.address, publickey)
|
||||
aes_key, encrypted = self.encrypt(text.encode("utf8"), publickey.decode("base64"))
|
||||
if return_aes_key:
|
||||
self.response(to, [base64.b64encode(encrypted), base64.b64encode(aes_key)])
|
||||
else:
|
||||
self.response(to, base64.b64encode(encrypted))
|
||||
|
||||
# Decrypt a text using privatekey or the user's site unique private key
|
||||
# Return: Decrypted text or list of decrypted texts
|
||||
def actionEciesDecrypt(self, to, param, privatekey=0):
|
||||
if type(privatekey) is int: # Decrypt using user's privatekey
|
||||
privatekey = self.user.getEncryptPrivatekey(self.site.address, privatekey)
|
||||
|
||||
if type(param) == list:
|
||||
encrypted_texts = param
|
||||
else:
|
||||
encrypted_texts = [param]
|
||||
|
||||
texts = [] # Decoded texts
|
||||
for encrypted_text in encrypted_texts:
|
||||
try:
|
||||
text = self.decrypt(encrypted_text.decode("base64"), privatekey)
|
||||
texts.append(text)
|
||||
except Exception, err:
|
||||
texts.append(None)
|
||||
|
||||
if type(param) == list:
|
||||
self.response(to, texts)
|
||||
else:
|
||||
self.response(to, texts[0])
|
||||
|
||||
# Encrypt a text using AES
|
||||
# Return: Iv, AES key, Encrypted text
|
||||
def actionAesEncrypt(self, to, text, key=None, iv=None):
|
||||
from lib import pyelliptic
|
||||
|
||||
if key:
|
||||
key = key.decode("base64")
|
||||
else:
|
||||
key = os.urandom(32)
|
||||
|
||||
if iv: # Generate new AES key if not definied
|
||||
iv = iv.decode("base64")
|
||||
else:
|
||||
iv = pyelliptic.Cipher.gen_IV('aes-256-cbc')
|
||||
|
||||
if text:
|
||||
encrypted = pyelliptic.Cipher(key, iv, 1, ciphername='aes-256-cbc').ciphering(text.encode("utf8"))
|
||||
else:
|
||||
encrypted = ""
|
||||
|
||||
self.response(to, [base64.b64encode(key), base64.b64encode(iv), base64.b64encode(encrypted)])
|
||||
|
||||
# Decrypt a text using AES
|
||||
# Return: Decrypted text
|
||||
def actionAesDecrypt(self, to, *args):
|
||||
from lib import pyelliptic
|
||||
|
||||
if len(args) == 3: # Single decrypt
|
||||
encrypted_texts = [(args[0], args[1])]
|
||||
keys = [args[2]]
|
||||
else: # Batch decrypt
|
||||
encrypted_texts, keys = args
|
||||
|
||||
texts = [] # Decoded texts
|
||||
for iv, encrypted_text in encrypted_texts:
|
||||
encrypted_text = encrypted_text.decode("base64")
|
||||
iv = iv.decode("base64")
|
||||
text = None
|
||||
for key in keys:
|
||||
ctx = pyelliptic.Cipher(key.decode("base64"), iv, 0, ciphername='aes-256-cbc')
|
||||
try:
|
||||
decrypted = ctx.ciphering(encrypted_text)
|
||||
if decrypted and decrypted.decode("utf8"): # Valid text decoded
|
||||
text = decrypted
|
||||
except Exception, err:
|
||||
pass
|
||||
texts.append(text)
|
||||
|
||||
if len(args) == 3:
|
||||
self.response(to, texts[0])
|
||||
else:
|
||||
self.response(to, texts)
|
||||
|
||||
|
||||
@PluginManager.registerTo("User")
|
||||
class UserPlugin(object):
|
||||
def getEncryptPrivatekey(self, address, param_index=0):
|
||||
assert param_index >= 0 and param_index <= 1000
|
||||
site_data = self.getSiteData(address)
|
||||
|
||||
if site_data.get("cert"): # Different privatekey for different cert provider
|
||||
index = param_index + self.getAddressAuthIndex(site_data["cert"])
|
||||
else:
|
||||
index = param_index
|
||||
|
||||
if "encrypt_privatekey_%s" % index not in site_data:
|
||||
address_index = self.getAddressAuthIndex(address)
|
||||
crypt_index = address_index + 1000 + index
|
||||
site_data["encrypt_privatekey_%s" % index] = CryptBitcoin.hdPrivatekey(self.master_seed, crypt_index)
|
||||
self.log.debug("New encrypt privatekey generated for %s:%s" % (address, index))
|
||||
return site_data["encrypt_privatekey_%s" % index]
|
||||
|
||||
def getEncryptPublickey(self, address, param_index=0):
|
||||
assert param_index >= 0 and param_index <= 1000
|
||||
site_data = self.getSiteData(address)
|
||||
|
||||
if site_data.get("cert"): # Different privatekey for different cert provider
|
||||
index = param_index + self.getAddressAuthIndex(site_data["cert"])
|
||||
else:
|
||||
index = param_index
|
||||
|
||||
if "encrypt_publickey_%s" % index not in site_data:
|
||||
privatekey = self.getEncryptPrivatekey(address, param_index)
|
||||
publickey = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin_compressed")
|
||||
site_data["encrypt_publickey_%s" % index] = base64.b64encode(publickey)
|
||||
return site_data["encrypt_publickey_%s" % index]
|
|
@ -1,106 +0,0 @@
|
|||
import pytest
|
||||
from CryptMessage import CryptMessage
|
||||
|
||||
@pytest.mark.usefixtures("resetSettings")
|
||||
class TestCrypt:
|
||||
def testPublickey(self, ui_websocket):
|
||||
pub = ui_websocket.testAction("UserPublickey", 0)
|
||||
assert len(pub) == 44 # Compressed, b64 encoded publickey
|
||||
|
||||
# Different pubkey for specificed index
|
||||
assert ui_websocket.testAction("UserPublickey", 1) != ui_websocket.testAction("UserPublickey", 0)
|
||||
|
||||
# Same publickey for same index
|
||||
assert ui_websocket.testAction("UserPublickey", 2) == ui_websocket.testAction("UserPublickey", 2)
|
||||
|
||||
# Different publickey for different cert
|
||||
pub1 = ui_websocket.testAction("UserPublickey", 0)
|
||||
site_data = ui_websocket.user.getSiteData(ui_websocket.site.address)
|
||||
site_data["cert"] = "zeroid.bit"
|
||||
pub2 = ui_websocket.testAction("UserPublickey", 0)
|
||||
assert pub1 != pub2
|
||||
|
||||
|
||||
|
||||
def testEcies(self, ui_websocket):
|
||||
ui_websocket.actionUserPublickey(0, 0)
|
||||
pub = ui_websocket.ws.result
|
||||
|
||||
ui_websocket.actionEciesEncrypt(0, "hello", pub)
|
||||
encrypted = ui_websocket.ws.result
|
||||
assert len(encrypted) == 180
|
||||
|
||||
# Don't allow decrypt using other privatekey index
|
||||
ui_websocket.actionEciesDecrypt(0, encrypted, 123)
|
||||
decrypted = ui_websocket.ws.result
|
||||
assert decrypted != "hello"
|
||||
|
||||
# Decrypt using correct privatekey
|
||||
ui_websocket.actionEciesDecrypt(0, encrypted)
|
||||
decrypted = ui_websocket.ws.result
|
||||
assert decrypted == "hello"
|
||||
|
||||
# Decrypt batch
|
||||
ui_websocket.actionEciesDecrypt(0, [encrypted, "baad", encrypted])
|
||||
decrypted = ui_websocket.ws.result
|
||||
assert decrypted == ["hello", None, "hello"]
|
||||
|
||||
|
||||
def testEciesUtf8(self, ui_websocket):
|
||||
# Utf8 test
|
||||
utf8_text = u'\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9p'
|
||||
ui_websocket.actionEciesEncrypt(0, utf8_text)
|
||||
encrypted = ui_websocket.ws.result
|
||||
|
||||
ui_websocket.actionEciesDecrypt(0, encrypted)
|
||||
assert ui_websocket.ws.result == utf8_text
|
||||
|
||||
|
||||
def testEciesAes(self, ui_websocket):
|
||||
ui_websocket.actionEciesEncrypt(0, "hello", return_aes_key=True)
|
||||
ecies_encrypted, aes_key = ui_websocket.ws.result
|
||||
|
||||
# Decrypt using Ecies
|
||||
ui_websocket.actionEciesDecrypt(0, ecies_encrypted)
|
||||
assert ui_websocket.ws.result == "hello"
|
||||
|
||||
# Decrypt using AES
|
||||
aes_iv, aes_encrypted = CryptMessage.split(ecies_encrypted.decode("base64"))
|
||||
|
||||
ui_websocket.actionAesDecrypt(0, aes_iv.encode("base64"), aes_encrypted.encode("base64"), aes_key)
|
||||
assert ui_websocket.ws.result == "hello"
|
||||
|
||||
|
||||
def testAes(self, ui_websocket):
|
||||
ui_websocket.actionAesEncrypt(0, "hello")
|
||||
key, iv, encrypted = ui_websocket.ws.result
|
||||
|
||||
assert len(key) == 44
|
||||
assert len(iv) == 24
|
||||
assert len(encrypted) == 24
|
||||
|
||||
# Single decrypt
|
||||
ui_websocket.actionAesDecrypt(0, iv, encrypted, key)
|
||||
assert ui_websocket.ws.result == "hello"
|
||||
|
||||
# Batch decrypt
|
||||
ui_websocket.actionAesEncrypt(0, "hello")
|
||||
key2, iv2, encrypted2 = ui_websocket.ws.result
|
||||
|
||||
assert [key, iv, encrypted] != [key2, iv2, encrypted2]
|
||||
|
||||
# 2 correct key
|
||||
ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key])
|
||||
assert ui_websocket.ws.result == ["hello", "hello", None, None]
|
||||
|
||||
# 3 key
|
||||
ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key, key2])
|
||||
assert ui_websocket.ws.result == ["hello", "hello", None, "hello"]
|
||||
|
||||
def testAesUtf8(self, ui_websocket):
|
||||
utf8_text = u'\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9'
|
||||
ui_websocket.actionAesEncrypt(0, utf8_text)
|
||||
key, iv, encrypted = ui_websocket.ws.result
|
||||
|
||||
ui_websocket.actionAesDecrypt(0, iv, encrypted, key)
|
||||
assert ui_websocket.ws.result == utf8_text
|
|
@ -1 +0,0 @@
|
|||
from src.Test.conftest import *
|
|
@ -1,5 +0,0 @@
|
|||
[pytest]
|
||||
python_files = Test*.py
|
||||
addopts = -rsxX -v --durations=6
|
||||
markers =
|
||||
webtest: mark a test as a webtest.
|
|
@ -1 +0,0 @@
|
|||
import CryptMessagePlugin
|
|
@ -1,468 +0,0 @@
|
|||
import re
|
||||
import os
|
||||
import cgi
|
||||
import sys
|
||||
import math
|
||||
import time
|
||||
try:
|
||||
import cStringIO as StringIO
|
||||
except:
|
||||
import StringIO
|
||||
|
||||
|
||||
from Config import config
|
||||
from Plugin import PluginManager
|
||||
from Debug import Debug
|
||||
|
||||
plugin_dir = "plugins/Sidebar"
|
||||
media_dir = plugin_dir + "/media"
|
||||
sys.path.append(plugin_dir) # To able to load geoip lib
|
||||
|
||||
loc_cache = {}
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiRequest")
|
||||
class UiRequestPlugin(object):
|
||||
# Inject our resources to end of original file streams
|
||||
def actionUiMedia(self, path):
|
||||
if path == "/uimedia/all.js" or path == "/uimedia/all.css":
|
||||
# First yield the original file and header
|
||||
body_generator = super(UiRequestPlugin, self).actionUiMedia(path)
|
||||
for part in body_generator:
|
||||
yield part
|
||||
|
||||
# Append our media file to the end
|
||||
ext = re.match(".*(js|css)$", path).group(1)
|
||||
plugin_media_file = "%s/all.%s" % (media_dir, ext)
|
||||
if config.debug:
|
||||
# If debugging merge *.css to all.css and *.js to all.js
|
||||
from Debug import DebugMedia
|
||||
DebugMedia.merge(plugin_media_file)
|
||||
for part in self.actionFile(plugin_media_file, send_header=False):
|
||||
yield part
|
||||
elif path.startswith("/uimedia/globe/"): # Serve WebGL globe files
|
||||
file_name = re.match(".*/(.*)", path).group(1)
|
||||
plugin_media_file = "%s-globe/%s" % (media_dir, file_name)
|
||||
if config.debug and path.endswith("all.js"):
|
||||
# If debugging merge *.css to all.css and *.js to all.js
|
||||
from Debug import DebugMedia
|
||||
DebugMedia.merge(plugin_media_file)
|
||||
for part in self.actionFile(plugin_media_file):
|
||||
yield part
|
||||
else:
|
||||
for part in super(UiRequestPlugin, self).actionUiMedia(path):
|
||||
yield part
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiWebsocket")
|
||||
class UiWebsocketPlugin(object):
|
||||
|
||||
def sidebarRenderPeerStats(self, body, site):
|
||||
connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected])
|
||||
connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")])
|
||||
peers_total = len(site.peers)
|
||||
if peers_total:
|
||||
percent_connected = float(connected) / peers_total
|
||||
percent_connectable = float(connectable) / peers_total
|
||||
else:
|
||||
percent_connectable = percent_connected = 0
|
||||
body.append("""
|
||||
<li>
|
||||
<label>Peers</label>
|
||||
<ul class='graph'>
|
||||
<li style='width: 100%' class='total back-black' title="Total peers"></li>
|
||||
<li style='width: {percent_connectable:.0%}' class='connectable back-blue' title='Connectable peers'></li>
|
||||
<li style='width: {percent_connected:.0%}' class='connected back-green' title='Connected peers'></li>
|
||||
</ul>
|
||||
<ul class='graph-legend'>
|
||||
<li class='color-green'><span>connected:</span><b>{connected}</b></li>
|
||||
<li class='color-blue'><span>Connectable:</span><b>{connectable}</b></li>
|
||||
<li class='color-black'><span>Total:</span><b>{peers_total}</b></li>
|
||||
</ul>
|
||||
</li>
|
||||
""".format(**locals()))
|
||||
|
||||
def sidebarRenderTransferStats(self, body, site):
|
||||
recv = float(site.settings.get("bytes_recv", 0)) / 1024 / 1024
|
||||
sent = float(site.settings.get("bytes_sent", 0)) / 1024 / 1024
|
||||
transfer_total = recv + sent
|
||||
if transfer_total:
|
||||
percent_recv = recv / transfer_total
|
||||
percent_sent = sent / transfer_total
|
||||
else:
|
||||
percent_recv = 0.5
|
||||
percent_sent = 0.5
|
||||
body.append("""
|
||||
<li>
|
||||
<label>Data transfer</label>
|
||||
<ul class='graph graph-stacked'>
|
||||
<li style='width: {percent_recv:.0%}' class='received back-yellow' title="Received bytes"></li>
|
||||
<li style='width: {percent_sent:.0%}' class='sent back-green' title="Sent bytes"></li>
|
||||
</ul>
|
||||
<ul class='graph-legend'>
|
||||
<li class='color-yellow'><span>Received:</span><b>{recv:.2f}MB</b></li>
|
||||
<li class='color-green'<span>Sent:</span><b>{sent:.2f}MB</b></li>
|
||||
</ul>
|
||||
</li>
|
||||
""".format(**locals()))
|
||||
|
||||
def sidebarRenderFileStats(self, body, site):
|
||||
body.append("<li><label>Files</label><ul class='graph graph-stacked'>")
|
||||
|
||||
extensions = (
|
||||
("html", "yellow"),
|
||||
("css", "orange"),
|
||||
("js", "purple"),
|
||||
("image", "green"),
|
||||
("json", "blue"),
|
||||
("other", "white"),
|
||||
("total", "black")
|
||||
)
|
||||
# Collect stats
|
||||
size_filetypes = {}
|
||||
size_total = 0
|
||||
for content in site.content_manager.contents.values():
|
||||
if "files" not in content:
|
||||
continue
|
||||
for file_name, file_details in content["files"].items():
|
||||
size_total += file_details["size"]
|
||||
ext = file_name.split(".")[-1]
|
||||
size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"]
|
||||
size_other = size_total
|
||||
|
||||
# Bar
|
||||
for extension, color in extensions:
|
||||
if extension == "total":
|
||||
continue
|
||||
if extension == "other":
|
||||
size = size_other
|
||||
elif extension == "image":
|
||||
size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0)
|
||||
size_other -= size
|
||||
else:
|
||||
size = size_filetypes.get(extension, 0)
|
||||
size_other -= size
|
||||
percent = 100 * (float(size) / size_total)
|
||||
body.append(u"<li style='width: %.2f%%' class='%s back-%s' title='%s'></li>" % (percent, extension, color, extension))
|
||||
|
||||
# Legend
|
||||
body.append("</ul><ul class='graph-legend'>")
|
||||
for extension, color in extensions:
|
||||
if extension == "other":
|
||||
size = size_other
|
||||
elif extension == "image":
|
||||
size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0)
|
||||
elif extension == "total":
|
||||
size = size_total
|
||||
else:
|
||||
size = size_filetypes.get(extension, 0)
|
||||
|
||||
if extension == "js":
|
||||
title = "javascript"
|
||||
else:
|
||||
title = extension
|
||||
|
||||
if size > 1024 * 1024 * 10: # Format as mB is more than 10mB
|
||||
size_formatted = "%.0fMB" % (size / 1024 / 1024)
|
||||
else:
|
||||
size_formatted = "%.0fkB" % (size / 1024)
|
||||
|
||||
body.append(u"<li class='color-%s'><span>%s:</span><b>%s</b></li>" % (color, title, size_formatted))
|
||||
|
||||
body.append("</ul></li>")
|
||||
|
||||
def getFreeSpace(self):
|
||||
free_space = 0
|
||||
if "statvfs" in dir(os): # Unix
|
||||
statvfs = os.statvfs(config.data_dir)
|
||||
free_space = statvfs.f_frsize * statvfs.f_bavail
|
||||
else: # Windows
|
||||
try:
|
||||
import ctypes
|
||||
free_space_pointer = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||
ctypes.c_wchar_p(config.data_dir), None, None, ctypes.pointer(free_space_pointer)
|
||||
)
|
||||
free_space = free_space_pointer.value
|
||||
except Exception, err:
|
||||
self.log.debug("GetFreeSpace error: %s" % err)
|
||||
return free_space
|
||||
|
||||
def sidebarRenderSizeLimit(self, body, site):
|
||||
free_space = self.getFreeSpace() / 1024 / 1024
|
||||
size = float(site.settings["size"]) / 1024 / 1024
|
||||
size_limit = site.getSizeLimit()
|
||||
percent_used = size / size_limit
|
||||
body.append("""
|
||||
<li>
|
||||
<label>Size limit <small>(limit used: {percent_used:.0%}, free space: {free_space:,d}MB)</small></label>
|
||||
<input type='text' class='text text-num' value='{size_limit}' id='input-sitelimit'/><span class='text-post'>MB</span>
|
||||
<a href='#Set' class='button' id='button-sitelimit'>Set</a>
|
||||
</li>
|
||||
""".format(**locals()))
|
||||
|
||||
|
||||
def sidebarRenderOptionalFileStats(self, body, site):
|
||||
size_total = 0.0
|
||||
size_downloaded = 0.0
|
||||
for content in site.content_manager.contents.values():
|
||||
if "files_optional" not in content:
|
||||
continue
|
||||
for file_name, file_details in content["files_optional"].items():
|
||||
size_total += file_details["size"]
|
||||
if site.content_manager.hashfield.hasHash(file_details["sha512"]):
|
||||
size_downloaded += file_details["size"]
|
||||
|
||||
|
||||
if not size_total:
|
||||
return False
|
||||
|
||||
percent_downloaded = size_downloaded / size_total
|
||||
|
||||
size_formatted_total = size_total / 1024 / 1024
|
||||
size_formatted_downloaded = size_downloaded / 1024 / 1024
|
||||
|
||||
body.append("""
|
||||
<li>
|
||||
<label>Optional files</label>
|
||||
<ul class='graph'>
|
||||
<li style='width: 100%' class='total back-black' title="Total size"></li>
|
||||
<li style='width: {percent_downloaded:.0%}' class='connected back-green' title='Downloaded files'></li>
|
||||
</ul>
|
||||
<ul class='graph-legend'>
|
||||
<li class='color-green'><span>Downloaded:</span><b>{size_formatted_downloaded:.2f}MB</b></li>
|
||||
<li class='color-black'><span>Total:</span><b>{size_formatted_total:.2f}MB</b></li>
|
||||
</ul>
|
||||
</li>
|
||||
""".format(**locals()))
|
||||
|
||||
return True
|
||||
|
||||
def sidebarRenderOptionalFileSettings(self, body, site):
|
||||
if self.site.settings.get("autodownloadoptional"):
|
||||
checked = "checked='checked'"
|
||||
else:
|
||||
checked = ""
|
||||
body.append("""
|
||||
<li>
|
||||
<label>Download and help distribute all files</label>
|
||||
<input type="checkbox" class="checkbox" id="checkbox-autodownloadoptional" {checked}/><div class="checkbox-skin"></div>
|
||||
</li>
|
||||
""".format(**locals()))
|
||||
|
||||
def sidebarRenderDbOptions(self, body, site):
|
||||
if not site.storage.db:
|
||||
return False
|
||||
|
||||
inner_path = site.storage.getInnerPath(site.storage.db.db_path)
|
||||
size = float(site.storage.getSize(inner_path)) / 1024
|
||||
body.append(u"""
|
||||
<li>
|
||||
<label>Database <small>({size:.2f}kB)</small></label>
|
||||
<input type='text' class='text disabled' value='{inner_path}' disabled='disabled'/>
|
||||
<a href='#Reindex' class='button' style='display: none'>Reindex</a>
|
||||
</li>
|
||||
""".format(**locals()))
|
||||
|
||||
def sidebarRenderIdentity(self, body, site):
|
||||
auth_address = self.user.getAuthAddress(self.site.address)
|
||||
body.append("""
|
||||
<li>
|
||||
<label>Identity address</label>
|
||||
<span class='input text disabled'>{auth_address}</span>
|
||||
<a href='#Change' class='button' id='button-identity'>Change</a>
|
||||
</li>
|
||||
""".format(**locals()))
|
||||
|
||||
def sidebarRenderOwnedCheckbox(self, body, site):
|
||||
if self.site.settings["own"]:
|
||||
checked = "checked='checked'"
|
||||
else:
|
||||
checked = ""
|
||||
|
||||
body.append("""
|
||||
<h2 class='owned-title'>This is my site</h2>
|
||||
<input type="checkbox" class="checkbox" id="checkbox-owned" {checked}/><div class="checkbox-skin"></div>
|
||||
""".format(**locals()))
|
||||
|
||||
def sidebarRenderOwnSettings(self, body, site):
|
||||
title = cgi.escape(site.content_manager.contents["content.json"]["title"], True)
|
||||
description = cgi.escape(site.content_manager.contents["content.json"]["description"], True)
|
||||
privatekey = cgi.escape(self.user.getSiteData(site.address, create=False).get("privatekey", ""))
|
||||
|
||||
body.append(u"""
|
||||
<li>
|
||||
<label for='settings-title'>Site title</label>
|
||||
<input type='text' class='text' value="{title}" id='settings-title'/>
|
||||
</li>
|
||||
|
||||
<li>
|
||||
<label for='settings-description'>Site description</label>
|
||||
<input type='text' class='text' value="{description}" id='settings-description'/>
|
||||
</li>
|
||||
|
||||
<li style='display: none'>
|
||||
<label>Private key</label>
|
||||
<input type='text' class='text long' value="{privatekey}" placeholder='[Ask on signing]'/>
|
||||
</li>
|
||||
|
||||
<li>
|
||||
<a href='#Save' class='button' id='button-settings'>Save site settings</a>
|
||||
</li>
|
||||
""".format(**locals()))
|
||||
|
||||
def sidebarRenderContents(self, body, site):
|
||||
body.append("""
|
||||
<li>
|
||||
<label>Content publishing</label>
|
||||
<select id='select-contents'>
|
||||
""")
|
||||
|
||||
for inner_path in sorted(site.content_manager.contents.keys()):
|
||||
body.append(u"<option>%s</option>" % cgi.escape(inner_path, True))
|
||||
|
||||
body.append("""
|
||||
</select>
|
||||
<span class='select-down'>›</span>
|
||||
<a href='#Sign' class='button' id='button-sign'>Sign</a>
|
||||
<a href='#Publish' class='button' id='button-publish'>Publish</a>
|
||||
</li>
|
||||
""")
|
||||
|
||||
def actionSidebarGetHtmlTag(self, to):
|
||||
site = self.site
|
||||
|
||||
body = []
|
||||
|
||||
body.append("<div>")
|
||||
body.append("<h1>%s</h1>" % site.content_manager.contents["content.json"]["title"])
|
||||
|
||||
body.append("<div class='globe loading'></div>")
|
||||
|
||||
body.append("<ul class='fields'>")
|
||||
|
||||
self.sidebarRenderPeerStats(body, site)
|
||||
self.sidebarRenderTransferStats(body, site)
|
||||
self.sidebarRenderFileStats(body, site)
|
||||
self.sidebarRenderSizeLimit(body, site)
|
||||
has_optional = self.sidebarRenderOptionalFileStats(body, site)
|
||||
if has_optional:
|
||||
self.sidebarRenderOptionalFileSettings(body, site)
|
||||
self.sidebarRenderDbOptions(body, site)
|
||||
self.sidebarRenderIdentity(body, site)
|
||||
|
||||
self.sidebarRenderOwnedCheckbox(body, site)
|
||||
body.append("<div class='settings-owned'>")
|
||||
self.sidebarRenderOwnSettings(body, site)
|
||||
self.sidebarRenderContents(body, site)
|
||||
body.append("</div>")
|
||||
body.append("</ul>")
|
||||
body.append("</div>")
|
||||
|
||||
self.response(to, "".join(body))
|
||||
|
||||
def downloadGeoLiteDb(self, db_path):
|
||||
import urllib
|
||||
import gzip
|
||||
import shutil
|
||||
|
||||
self.log.info("Downloading GeoLite2 City database...")
|
||||
self.cmd("notification", ["geolite-info", "Downloading GeoLite2 City database (one time only, ~15MB)...", 0])
|
||||
try:
|
||||
# Download
|
||||
file = urllib.urlopen("http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz")
|
||||
data = StringIO.StringIO()
|
||||
while True:
|
||||
buff = file.read(1024 * 16)
|
||||
if not buff:
|
||||
break
|
||||
data.write(buff)
|
||||
self.log.info("GeoLite2 City database downloaded (%s bytes), unpacking..." % data.tell())
|
||||
data.seek(0)
|
||||
|
||||
# Unpack
|
||||
with gzip.GzipFile(fileobj=data) as gzip_file:
|
||||
shutil.copyfileobj(gzip_file, open(db_path, "wb"))
|
||||
|
||||
self.cmd("notification", ["geolite-done", "GeoLite2 City database downloaded!", 5000])
|
||||
time.sleep(2) # Wait for notify animation
|
||||
except Exception, err:
|
||||
self.cmd("notification", ["geolite-error", "GeoLite2 City database download error: %s!" % err, 0])
|
||||
raise err
|
||||
|
||||
def actionSidebarGetPeers(self, to):
|
||||
permissions = self.getPermissions(to)
|
||||
if "ADMIN" not in permissions:
|
||||
return self.response(to, "You don't have permission to run this command")
|
||||
try:
|
||||
import maxminddb
|
||||
db_path = config.data_dir + '/GeoLite2-City.mmdb'
|
||||
if not os.path.isfile(db_path):
|
||||
self.downloadGeoLiteDb(db_path)
|
||||
geodb = maxminddb.open_database(db_path)
|
||||
|
||||
peers = self.site.peers.values()
|
||||
# Find avg ping
|
||||
ping_times = [
|
||||
peer.connection.last_ping_delay
|
||||
for peer in peers
|
||||
if peer.connection and peer.connection.last_ping_delay and peer.connection.last_ping_delay
|
||||
]
|
||||
if ping_times:
|
||||
ping_avg = sum(ping_times) / float(len(ping_times))
|
||||
else:
|
||||
ping_avg = 0
|
||||
# Place bars
|
||||
globe_data = []
|
||||
placed = {} # Already placed bars here
|
||||
for peer in peers:
|
||||
# Height of bar
|
||||
if peer.connection and peer.connection.last_ping_delay:
|
||||
ping = min(0.20, math.log(1 + peer.connection.last_ping_delay / ping_avg, 300))
|
||||
else:
|
||||
ping = -0.03
|
||||
|
||||
# Query and cache location
|
||||
if peer.ip in loc_cache:
|
||||
loc = loc_cache[peer.ip]
|
||||
else:
|
||||
loc = geodb.get(peer.ip)
|
||||
loc_cache[peer.ip] = loc
|
||||
if not loc or "location" not in loc:
|
||||
continue
|
||||
|
||||
# Create position array
|
||||
lat, lon = (loc["location"]["latitude"], loc["location"]["longitude"])
|
||||
latlon = "%s,%s" % (lat, lon)
|
||||
if latlon in placed: # Dont place more than 1 bar to same place, fake repos using ip address last two part
|
||||
lat += float(128 - int(peer.ip.split(".")[-2])) / 50
|
||||
lon += float(128 - int(peer.ip.split(".")[-1])) / 50
|
||||
latlon = "%s,%s" % (lat, lon)
|
||||
placed[latlon] = True
|
||||
|
||||
globe_data += (lat, lon, ping)
|
||||
# Append myself
|
||||
loc = geodb.get(config.ip_external)
|
||||
if loc:
|
||||
lat, lon = (loc["location"]["latitude"], loc["location"]["longitude"])
|
||||
globe_data += (lat, lon, -0.135)
|
||||
|
||||
self.response(to, globe_data)
|
||||
except Exception, err:
|
||||
self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err))
|
||||
self.response(to, {"error": err})
|
||||
|
||||
def actionSiteSetOwned(self, to, owned):
|
||||
permissions = self.getPermissions(to)
|
||||
if "ADMIN" not in permissions:
|
||||
return self.response(to, "You don't have permission to run this command")
|
||||
self.site.settings["own"] = bool(owned)
|
||||
|
||||
|
||||
def actionSiteSetAutodownloadoptional(self, to, owned):
|
||||
permissions = self.getPermissions(to)
|
||||
if "ADMIN" not in permissions:
|
||||
return self.response(to, "You don't have permission to run this command")
|
||||
self.site.settings["autodownloadoptional"] = bool(owned)
|
||||
self.site.update()
|
||||
self.site.worker_manager.removeGoodFileTasks()
|
|
@ -1 +0,0 @@
|
|||
import SidebarPlugin
|
|
@ -1,46 +0,0 @@
|
|||
# pylint:disable=C0111
|
||||
import os
|
||||
|
||||
import maxminddb.reader
|
||||
|
||||
try:
|
||||
import maxminddb.extension
|
||||
except ImportError:
|
||||
maxminddb.extension = None
|
||||
|
||||
from maxminddb.const import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
|
||||
MODE_MEMORY)
|
||||
from maxminddb.decoder import InvalidDatabaseError
|
||||
|
||||
|
||||
def open_database(database, mode=MODE_AUTO):
|
||||
"""Open a Maxmind DB database
|
||||
|
||||
Arguments:
|
||||
database -- A path to a valid MaxMind DB file such as a GeoIP2
|
||||
database file.
|
||||
mode -- mode to open the database with. Valid mode are:
|
||||
* MODE_MMAP_EXT - use the C extension with memory map.
|
||||
* MODE_MMAP - read from memory map. Pure Python.
|
||||
* MODE_FILE - read database as standard file. Pure Python.
|
||||
* MODE_MEMORY - load database into memory. Pure Python.
|
||||
* MODE_AUTO - tries MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that
|
||||
order. Default mode.
|
||||
"""
|
||||
if (mode == MODE_AUTO and maxminddb.extension and
|
||||
hasattr(maxminddb.extension, 'Reader')) or mode == MODE_MMAP_EXT:
|
||||
return maxminddb.extension.Reader(database)
|
||||
elif mode in (MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY):
|
||||
return maxminddb.reader.Reader(database, mode)
|
||||
raise ValueError('Unsupported open mode: {0}'.format(mode))
|
||||
|
||||
|
||||
def Reader(database): # pylint: disable=invalid-name
|
||||
"""This exists for backwards compatibility. Use open_database instead"""
|
||||
return open_database(database)
|
||||
|
||||
__title__ = 'maxminddb'
|
||||
__version__ = '1.2.0'
|
||||
__author__ = 'Gregory Oschwald'
|
||||
__license__ = 'Apache License, Version 2.0'
|
||||
__copyright__ = 'Copyright 2014 Maxmind, Inc.'
|
|
@ -1,28 +0,0 @@
|
|||
import sys
|
||||
|
||||
# pylint: skip-file
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
import ipaddr as ipaddress # pylint:disable=F0401
|
||||
ipaddress.ip_address = ipaddress.IPAddress
|
||||
|
||||
int_from_byte = ord
|
||||
|
||||
FileNotFoundError = IOError
|
||||
|
||||
def int_from_bytes(b):
|
||||
if b:
|
||||
return int(b.encode("hex"), 16)
|
||||
return 0
|
||||
|
||||
byte_from_int = chr
|
||||
else:
|
||||
import ipaddress # pylint:disable=F0401
|
||||
|
||||
int_from_byte = lambda x: x
|
||||
|
||||
FileNotFoundError = FileNotFoundError
|
||||
|
||||
int_from_bytes = lambda x: int.from_bytes(x, 'big')
|
||||
|
||||
byte_from_int = lambda x: bytes([x])
|
|
@ -1,7 +0,0 @@
|
|||
"""Constants used in the API"""
|
||||
|
||||
MODE_AUTO = 0
|
||||
MODE_MMAP_EXT = 1
|
||||
MODE_MMAP = 2
|
||||
MODE_FILE = 4
|
||||
MODE_MEMORY = 8
|
|
@ -1,173 +0,0 @@
|
|||
"""
|
||||
maxminddb.decoder
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
This package contains code for decoding the MaxMind DB data section.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import struct
|
||||
|
||||
from maxminddb.compat import byte_from_int, int_from_bytes
|
||||
from maxminddb.errors import InvalidDatabaseError
|
||||
|
||||
|
||||
class Decoder(object): # pylint: disable=too-few-public-methods
|
||||
|
||||
"""Decoder for the data section of the MaxMind DB"""
|
||||
|
||||
def __init__(self, database_buffer, pointer_base=0, pointer_test=False):
|
||||
"""Created a Decoder for a MaxMind DB
|
||||
|
||||
Arguments:
|
||||
database_buffer -- an mmap'd MaxMind DB file.
|
||||
pointer_base -- the base number to use when decoding a pointer
|
||||
pointer_test -- used for internal unit testing of pointer code
|
||||
"""
|
||||
self._pointer_test = pointer_test
|
||||
self._buffer = database_buffer
|
||||
self._pointer_base = pointer_base
|
||||
|
||||
def _decode_array(self, size, offset):
|
||||
array = []
|
||||
for _ in range(size):
|
||||
(value, offset) = self.decode(offset)
|
||||
array.append(value)
|
||||
return array, offset
|
||||
|
||||
def _decode_boolean(self, size, offset):
|
||||
return size != 0, offset
|
||||
|
||||
def _decode_bytes(self, size, offset):
|
||||
new_offset = offset + size
|
||||
return self._buffer[offset:new_offset], new_offset
|
||||
|
||||
# pylint: disable=no-self-argument
|
||||
# |-> I am open to better ways of doing this as long as it doesn't involve
|
||||
# lots of code duplication.
|
||||
def _decode_packed_type(type_code, type_size, pad=False):
|
||||
# pylint: disable=protected-access, missing-docstring
|
||||
def unpack_type(self, size, offset):
|
||||
if not pad:
|
||||
self._verify_size(size, type_size)
|
||||
new_offset = offset + type_size
|
||||
packed_bytes = self._buffer[offset:new_offset]
|
||||
if pad:
|
||||
packed_bytes = packed_bytes.rjust(type_size, b'\x00')
|
||||
(value,) = struct.unpack(type_code, packed_bytes)
|
||||
return value, new_offset
|
||||
return unpack_type
|
||||
|
||||
def _decode_map(self, size, offset):
|
||||
container = {}
|
||||
for _ in range(size):
|
||||
(key, offset) = self.decode(offset)
|
||||
(value, offset) = self.decode(offset)
|
||||
container[key] = value
|
||||
return container, offset
|
||||
|
||||
_pointer_value_offset = {
|
||||
1: 0,
|
||||
2: 2048,
|
||||
3: 526336,
|
||||
4: 0,
|
||||
}
|
||||
|
||||
def _decode_pointer(self, size, offset):
|
||||
pointer_size = ((size >> 3) & 0x3) + 1
|
||||
new_offset = offset + pointer_size
|
||||
pointer_bytes = self._buffer[offset:new_offset]
|
||||
packed = pointer_bytes if pointer_size == 4 else struct.pack(
|
||||
b'!c', byte_from_int(size & 0x7)) + pointer_bytes
|
||||
unpacked = int_from_bytes(packed)
|
||||
pointer = unpacked + self._pointer_base + \
|
||||
self._pointer_value_offset[pointer_size]
|
||||
if self._pointer_test:
|
||||
return pointer, new_offset
|
||||
(value, _) = self.decode(pointer)
|
||||
return value, new_offset
|
||||
|
||||
def _decode_uint(self, size, offset):
|
||||
new_offset = offset + size
|
||||
uint_bytes = self._buffer[offset:new_offset]
|
||||
return int_from_bytes(uint_bytes), new_offset
|
||||
|
||||
def _decode_utf8_string(self, size, offset):
|
||||
new_offset = offset + size
|
||||
return self._buffer[offset:new_offset].decode('utf-8'), new_offset
|
||||
|
||||
_type_decoder = {
|
||||
1: _decode_pointer,
|
||||
2: _decode_utf8_string,
|
||||
3: _decode_packed_type(b'!d', 8), # double,
|
||||
4: _decode_bytes,
|
||||
5: _decode_uint, # uint16
|
||||
6: _decode_uint, # uint32
|
||||
7: _decode_map,
|
||||
8: _decode_packed_type(b'!i', 4, pad=True), # int32
|
||||
9: _decode_uint, # uint64
|
||||
10: _decode_uint, # uint128
|
||||
11: _decode_array,
|
||||
14: _decode_boolean,
|
||||
15: _decode_packed_type(b'!f', 4), # float,
|
||||
}
|
||||
|
||||
def decode(self, offset):
|
||||
"""Decode a section of the data section starting at offset
|
||||
|
||||
Arguments:
|
||||
offset -- the location of the data structure to decode
|
||||
"""
|
||||
new_offset = offset + 1
|
||||
(ctrl_byte,) = struct.unpack(b'!B', self._buffer[offset:new_offset])
|
||||
type_num = ctrl_byte >> 5
|
||||
# Extended type
|
||||
if not type_num:
|
||||
(type_num, new_offset) = self._read_extended(new_offset)
|
||||
|
||||
if not type_num in self._type_decoder:
|
||||
raise InvalidDatabaseError('Unexpected type number ({type}) '
|
||||
'encountered'.format(type=type_num))
|
||||
|
||||
(size, new_offset) = self._size_from_ctrl_byte(
|
||||
ctrl_byte, new_offset, type_num)
|
||||
return self._type_decoder[type_num](self, size, new_offset)
|
||||
|
||||
def _read_extended(self, offset):
|
||||
(next_byte,) = struct.unpack(b'!B', self._buffer[offset:offset + 1])
|
||||
type_num = next_byte + 7
|
||||
if type_num < 7:
|
||||
raise InvalidDatabaseError(
|
||||
'Something went horribly wrong in the decoder. An '
|
||||
'extended type resolved to a type number < 8 '
|
||||
'({type})'.format(type=type_num))
|
||||
return type_num, offset + 1
|
||||
|
||||
def _verify_size(self, expected, actual):
|
||||
if expected != actual:
|
||||
raise InvalidDatabaseError(
|
||||
'The MaxMind DB file\'s data section contains bad data '
|
||||
'(unknown data type or corrupt data)'
|
||||
)
|
||||
|
||||
def _size_from_ctrl_byte(self, ctrl_byte, offset, type_num):
|
||||
size = ctrl_byte & 0x1f
|
||||
if type_num == 1:
|
||||
return size, offset
|
||||
bytes_to_read = 0 if size < 29 else size - 28
|
||||
|
||||
new_offset = offset + bytes_to_read
|
||||
size_bytes = self._buffer[offset:new_offset]
|
||||
|
||||
# Using unpack rather than int_from_bytes as it is about 200 lookups
|
||||
# per second faster here.
|
||||
if size == 29:
|
||||
size = 29 + struct.unpack(b'!B', size_bytes)[0]
|
||||
elif size == 30:
|
||||
size = 285 + struct.unpack(b'!H', size_bytes)[0]
|
||||
elif size > 30:
|
||||
size = struct.unpack(
|
||||
b'!I', size_bytes.rjust(4, b'\x00'))[0] + 65821
|
||||
|
||||
return size, new_offset
|
|
@ -1,11 +0,0 @@
|
|||
"""
|
||||
maxminddb.errors
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains custom errors for the MaxMind DB reader
|
||||
"""
|
||||
|
||||
|
||||
class InvalidDatabaseError(RuntimeError):
|
||||
|
||||
"""This error is thrown when unexpected data is found in the database."""
|
|
@ -1,570 +0,0 @@
|
|||
#include <Python.h>
|
||||
#include <maxminddb.h>
|
||||
#include "structmember.h"
|
||||
|
||||
#define __STDC_FORMAT_MACROS
|
||||
#include <inttypes.h>
|
||||
|
||||
static PyTypeObject Reader_Type;
|
||||
static PyTypeObject Metadata_Type;
|
||||
static PyObject *MaxMindDB_error;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD /* no semicolon */
|
||||
MMDB_s *mmdb;
|
||||
} Reader_obj;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD /* no semicolon */
|
||||
PyObject *binary_format_major_version;
|
||||
PyObject *binary_format_minor_version;
|
||||
PyObject *build_epoch;
|
||||
PyObject *database_type;
|
||||
PyObject *description;
|
||||
PyObject *ip_version;
|
||||
PyObject *languages;
|
||||
PyObject *node_count;
|
||||
PyObject *record_size;
|
||||
} Metadata_obj;
|
||||
|
||||
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list);
|
||||
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list);
|
||||
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list);
|
||||
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list);
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define MOD_INIT(name) PyMODINIT_FUNC PyInit_ ## name(void)
|
||||
#define RETURN_MOD_INIT(m) return (m)
|
||||
#define FILE_NOT_FOUND_ERROR PyExc_FileNotFoundError
|
||||
#else
|
||||
#define MOD_INIT(name) PyMODINIT_FUNC init ## name(void)
|
||||
#define RETURN_MOD_INIT(m) return
|
||||
#define PyInt_FromLong PyLong_FromLong
|
||||
#define FILE_NOT_FOUND_ERROR PyExc_IOError
|
||||
#endif
|
||||
|
||||
#ifdef __GNUC__
|
||||
# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__))
|
||||
#else
|
||||
# define UNUSED(x) UNUSED_ ## x
|
||||
#endif
|
||||
|
||||
static int Reader_init(PyObject *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
char *filename;
|
||||
int mode = 0;
|
||||
|
||||
static char *kwlist[] = {"database", "mode", NULL};
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|i", kwlist, &filename, &mode)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (mode != 0 && mode != 1) {
|
||||
PyErr_Format(PyExc_ValueError, "Unsupported open mode (%i). Only "
|
||||
"MODE_AUTO and MODE_MMAP_EXT are supported by this extension.",
|
||||
mode);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (0 != access(filename, R_OK)) {
|
||||
PyErr_Format(FILE_NOT_FOUND_ERROR,
|
||||
"No such file or directory: '%s'",
|
||||
filename);
|
||||
return -1;
|
||||
}
|
||||
|
||||
MMDB_s *mmdb = (MMDB_s *)malloc(sizeof(MMDB_s));
|
||||
if (NULL == mmdb) {
|
||||
PyErr_NoMemory();
|
||||
return -1;
|
||||
}
|
||||
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
if (!mmdb_obj) {
|
||||
free(mmdb);
|
||||
PyErr_NoMemory();
|
||||
return -1;
|
||||
}
|
||||
|
||||
uint16_t status = MMDB_open(filename, MMDB_MODE_MMAP, mmdb);
|
||||
|
||||
if (MMDB_SUCCESS != status) {
|
||||
free(mmdb);
|
||||
PyErr_Format(
|
||||
MaxMindDB_error,
|
||||
"Error opening database file (%s). Is this a valid MaxMind DB file?",
|
||||
filename
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
|
||||
mmdb_obj->mmdb = mmdb;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *Reader_get(PyObject *self, PyObject *args)
|
||||
{
|
||||
char *ip_address = NULL;
|
||||
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
if (!PyArg_ParseTuple(args, "s", &ip_address)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MMDB_s *mmdb = mmdb_obj->mmdb;
|
||||
|
||||
if (NULL == mmdb) {
|
||||
PyErr_SetString(PyExc_ValueError,
|
||||
"Attempt to read from a closed MaxMind DB.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int gai_error = 0;
|
||||
int mmdb_error = MMDB_SUCCESS;
|
||||
MMDB_lookup_result_s result =
|
||||
MMDB_lookup_string(mmdb, ip_address, &gai_error,
|
||||
&mmdb_error);
|
||||
|
||||
if (0 != gai_error) {
|
||||
PyErr_Format(PyExc_ValueError,
|
||||
"'%s' does not appear to be an IPv4 or IPv6 address.",
|
||||
ip_address);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (MMDB_SUCCESS != mmdb_error) {
|
||||
PyObject *exception;
|
||||
if (MMDB_IPV6_LOOKUP_IN_IPV4_DATABASE_ERROR == mmdb_error) {
|
||||
exception = PyExc_ValueError;
|
||||
} else {
|
||||
exception = MaxMindDB_error;
|
||||
}
|
||||
PyErr_Format(exception, "Error looking up %s. %s",
|
||||
ip_address, MMDB_strerror(mmdb_error));
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (!result.found_entry) {
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
MMDB_entry_data_list_s *entry_data_list = NULL;
|
||||
int status = MMDB_get_entry_data_list(&result.entry, &entry_data_list);
|
||||
if (MMDB_SUCCESS != status) {
|
||||
PyErr_Format(MaxMindDB_error,
|
||||
"Error while looking up data for %s. %s",
|
||||
ip_address, MMDB_strerror(status));
|
||||
MMDB_free_entry_data_list(entry_data_list);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
|
||||
PyObject *py_obj = from_entry_data_list(&entry_data_list);
|
||||
MMDB_free_entry_data_list(original_entry_data_list);
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyObject *Reader_metadata(PyObject *self, PyObject *UNUSED(args))
|
||||
{
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
|
||||
if (NULL == mmdb_obj->mmdb) {
|
||||
PyErr_SetString(PyExc_IOError,
|
||||
"Attempt to read from a closed MaxMind DB.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
MMDB_entry_data_list_s *entry_data_list;
|
||||
MMDB_get_metadata_as_entry_data_list(mmdb_obj->mmdb, &entry_data_list);
|
||||
MMDB_entry_data_list_s *original_entry_data_list = entry_data_list;
|
||||
|
||||
PyObject *metadata_dict = from_entry_data_list(&entry_data_list);
|
||||
MMDB_free_entry_data_list(original_entry_data_list);
|
||||
if (NULL == metadata_dict || !PyDict_Check(metadata_dict)) {
|
||||
PyErr_SetString(MaxMindDB_error,
|
||||
"Error decoding metadata.");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyObject *args = PyTuple_New(0);
|
||||
if (NULL == args) {
|
||||
Py_DECREF(metadata_dict);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyObject *metadata = PyObject_Call((PyObject *)&Metadata_Type, args,
|
||||
metadata_dict);
|
||||
|
||||
Py_DECREF(metadata_dict);
|
||||
return metadata;
|
||||
}
|
||||
|
||||
static PyObject *Reader_close(PyObject *self, PyObject *UNUSED(args))
|
||||
{
|
||||
Reader_obj *mmdb_obj = (Reader_obj *)self;
|
||||
|
||||
if (NULL != mmdb_obj->mmdb) {
|
||||
MMDB_close(mmdb_obj->mmdb);
|
||||
free(mmdb_obj->mmdb);
|
||||
mmdb_obj->mmdb = NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static void Reader_dealloc(PyObject *self)
|
||||
{
|
||||
Reader_obj *obj = (Reader_obj *)self;
|
||||
if (NULL != obj->mmdb) {
|
||||
Reader_close(self, NULL);
|
||||
}
|
||||
|
||||
PyObject_Del(self);
|
||||
}
|
||||
|
||||
static int Metadata_init(PyObject *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
|
||||
PyObject
|
||||
*binary_format_major_version,
|
||||
*binary_format_minor_version,
|
||||
*build_epoch,
|
||||
*database_type,
|
||||
*description,
|
||||
*ip_version,
|
||||
*languages,
|
||||
*node_count,
|
||||
*record_size;
|
||||
|
||||
static char *kwlist[] = {
|
||||
"binary_format_major_version",
|
||||
"binary_format_minor_version",
|
||||
"build_epoch",
|
||||
"database_type",
|
||||
"description",
|
||||
"ip_version",
|
||||
"languages",
|
||||
"node_count",
|
||||
"record_size",
|
||||
NULL
|
||||
};
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist,
|
||||
&binary_format_major_version,
|
||||
&binary_format_minor_version,
|
||||
&build_epoch,
|
||||
&database_type,
|
||||
&description,
|
||||
&ip_version,
|
||||
&languages,
|
||||
&node_count,
|
||||
&record_size)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
Metadata_obj *obj = (Metadata_obj *)self;
|
||||
|
||||
obj->binary_format_major_version = binary_format_major_version;
|
||||
obj->binary_format_minor_version = binary_format_minor_version;
|
||||
obj->build_epoch = build_epoch;
|
||||
obj->database_type = database_type;
|
||||
obj->description = description;
|
||||
obj->ip_version = ip_version;
|
||||
obj->languages = languages;
|
||||
obj->node_count = node_count;
|
||||
obj->record_size = record_size;
|
||||
|
||||
Py_INCREF(obj->binary_format_major_version);
|
||||
Py_INCREF(obj->binary_format_minor_version);
|
||||
Py_INCREF(obj->build_epoch);
|
||||
Py_INCREF(obj->database_type);
|
||||
Py_INCREF(obj->description);
|
||||
Py_INCREF(obj->ip_version);
|
||||
Py_INCREF(obj->languages);
|
||||
Py_INCREF(obj->node_count);
|
||||
Py_INCREF(obj->record_size);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void Metadata_dealloc(PyObject *self)
|
||||
{
|
||||
Metadata_obj *obj = (Metadata_obj *)self;
|
||||
Py_DECREF(obj->binary_format_major_version);
|
||||
Py_DECREF(obj->binary_format_minor_version);
|
||||
Py_DECREF(obj->build_epoch);
|
||||
Py_DECREF(obj->database_type);
|
||||
Py_DECREF(obj->description);
|
||||
Py_DECREF(obj->ip_version);
|
||||
Py_DECREF(obj->languages);
|
||||
Py_DECREF(obj->node_count);
|
||||
Py_DECREF(obj->record_size);
|
||||
PyObject_Del(self);
|
||||
}
|
||||
|
||||
static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list)
|
||||
{
|
||||
if (NULL == entry_data_list || NULL == *entry_data_list) {
|
||||
PyErr_SetString(
|
||||
MaxMindDB_error,
|
||||
"Error while looking up data. Your database may be corrupt or you have found a bug in libmaxminddb."
|
||||
);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
switch ((*entry_data_list)->entry_data.type) {
|
||||
case MMDB_DATA_TYPE_MAP:
|
||||
return from_map(entry_data_list);
|
||||
case MMDB_DATA_TYPE_ARRAY:
|
||||
return from_array(entry_data_list);
|
||||
case MMDB_DATA_TYPE_UTF8_STRING:
|
||||
return PyUnicode_FromStringAndSize(
|
||||
(*entry_data_list)->entry_data.utf8_string,
|
||||
(*entry_data_list)->entry_data.data_size
|
||||
);
|
||||
case MMDB_DATA_TYPE_BYTES:
|
||||
return PyByteArray_FromStringAndSize(
|
||||
(const char *)(*entry_data_list)->entry_data.bytes,
|
||||
(Py_ssize_t)(*entry_data_list)->entry_data.data_size);
|
||||
case MMDB_DATA_TYPE_DOUBLE:
|
||||
return PyFloat_FromDouble((*entry_data_list)->entry_data.double_value);
|
||||
case MMDB_DATA_TYPE_FLOAT:
|
||||
return PyFloat_FromDouble((*entry_data_list)->entry_data.float_value);
|
||||
case MMDB_DATA_TYPE_UINT16:
|
||||
return PyLong_FromLong( (*entry_data_list)->entry_data.uint16);
|
||||
case MMDB_DATA_TYPE_UINT32:
|
||||
return PyLong_FromLong((*entry_data_list)->entry_data.uint32);
|
||||
case MMDB_DATA_TYPE_BOOLEAN:
|
||||
return PyBool_FromLong((*entry_data_list)->entry_data.boolean);
|
||||
case MMDB_DATA_TYPE_UINT64:
|
||||
return PyLong_FromUnsignedLongLong(
|
||||
(*entry_data_list)->entry_data.uint64);
|
||||
case MMDB_DATA_TYPE_UINT128:
|
||||
return from_uint128(*entry_data_list);
|
||||
case MMDB_DATA_TYPE_INT32:
|
||||
return PyLong_FromLong((*entry_data_list)->entry_data.int32);
|
||||
default:
|
||||
PyErr_Format(MaxMindDB_error,
|
||||
"Invalid data type arguments: %d",
|
||||
(*entry_data_list)->entry_data.type);
|
||||
return NULL;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list)
|
||||
{
|
||||
PyObject *py_obj = PyDict_New();
|
||||
if (NULL == py_obj) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const uint32_t map_size = (*entry_data_list)->entry_data.data_size;
|
||||
|
||||
uint i;
|
||||
// entry_data_list cannot start out NULL (see from_entry_data_list). We
|
||||
// check it in the loop because it may become NULL.
|
||||
// coverity[check_after_deref]
|
||||
for (i = 0; i < map_size && entry_data_list; i++) {
|
||||
*entry_data_list = (*entry_data_list)->next;
|
||||
|
||||
PyObject *key = PyUnicode_FromStringAndSize(
|
||||
(char *)(*entry_data_list)->entry_data.utf8_string,
|
||||
(*entry_data_list)->entry_data.data_size
|
||||
);
|
||||
|
||||
*entry_data_list = (*entry_data_list)->next;
|
||||
|
||||
PyObject *value = from_entry_data_list(entry_data_list);
|
||||
if (NULL == value) {
|
||||
Py_DECREF(key);
|
||||
Py_DECREF(py_obj);
|
||||
return NULL;
|
||||
}
|
||||
PyDict_SetItem(py_obj, key, value);
|
||||
Py_DECREF(value);
|
||||
Py_DECREF(key);
|
||||
}
|
||||
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list)
|
||||
{
|
||||
const uint32_t size = (*entry_data_list)->entry_data.data_size;
|
||||
|
||||
PyObject *py_obj = PyList_New(size);
|
||||
if (NULL == py_obj) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
uint i;
|
||||
// entry_data_list cannot start out NULL (see from_entry_data_list). We
|
||||
// check it in the loop because it may become NULL.
|
||||
// coverity[check_after_deref]
|
||||
for (i = 0; i < size && entry_data_list; i++) {
|
||||
*entry_data_list = (*entry_data_list)->next;
|
||||
PyObject *value = from_entry_data_list(entry_data_list);
|
||||
if (NULL == value) {
|
||||
Py_DECREF(py_obj);
|
||||
return NULL;
|
||||
}
|
||||
// PyList_SetItem 'steals' the reference
|
||||
PyList_SetItem(py_obj, i, value);
|
||||
}
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list)
|
||||
{
|
||||
uint64_t high = 0;
|
||||
uint64_t low = 0;
|
||||
#if MMDB_UINT128_IS_BYTE_ARRAY
|
||||
int i;
|
||||
for (i = 0; i < 8; i++) {
|
||||
high = (high << 8) | entry_data_list->entry_data.uint128[i];
|
||||
}
|
||||
|
||||
for (i = 8; i < 16; i++) {
|
||||
low = (low << 8) | entry_data_list->entry_data.uint128[i];
|
||||
}
|
||||
#else
|
||||
high = entry_data_list->entry_data.uint128 >> 64;
|
||||
low = (uint64_t)entry_data_list->entry_data.uint128;
|
||||
#endif
|
||||
|
||||
char *num_str = malloc(33);
|
||||
if (NULL == num_str) {
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
snprintf(num_str, 33, "%016" PRIX64 "%016" PRIX64, high, low);
|
||||
|
||||
PyObject *py_obj = PyLong_FromString(num_str, NULL, 16);
|
||||
|
||||
free(num_str);
|
||||
return py_obj;
|
||||
}
|
||||
|
||||
static PyMethodDef Reader_methods[] = {
|
||||
{ "get", Reader_get, METH_VARARGS,
|
||||
"Get record for IP address" },
|
||||
{ "metadata", Reader_metadata, METH_NOARGS,
|
||||
"Returns metadata object for database" },
|
||||
{ "close", Reader_close, METH_NOARGS, "Closes database"},
|
||||
{ NULL, NULL, 0, NULL }
|
||||
};
|
||||
|
||||
static PyTypeObject Reader_Type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
.tp_basicsize = sizeof(Reader_obj),
|
||||
.tp_dealloc = Reader_dealloc,
|
||||
.tp_doc = "Reader object",
|
||||
.tp_flags = Py_TPFLAGS_DEFAULT,
|
||||
.tp_methods = Reader_methods,
|
||||
.tp_name = "Reader",
|
||||
.tp_init = Reader_init,
|
||||
};
|
||||
|
||||
static PyMethodDef Metadata_methods[] = {
|
||||
{ NULL, NULL, 0, NULL }
|
||||
};
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
static PyMemberDef Metadata_members[] = {
|
||||
{ "binary_format_major_version", T_OBJECT, offsetof(
|
||||
Metadata_obj, binary_format_major_version), READONLY, NULL },
|
||||
{ "binary_format_minor_version", T_OBJECT, offsetof(
|
||||
Metadata_obj, binary_format_minor_version), READONLY, NULL },
|
||||
{ "build_epoch", T_OBJECT, offsetof(Metadata_obj, build_epoch),
|
||||
READONLY, NULL },
|
||||
{ "database_type", T_OBJECT, offsetof(Metadata_obj, database_type),
|
||||
READONLY, NULL },
|
||||
{ "description", T_OBJECT, offsetof(Metadata_obj, description),
|
||||
READONLY, NULL },
|
||||
{ "ip_version", T_OBJECT, offsetof(Metadata_obj, ip_version),
|
||||
READONLY, NULL },
|
||||
{ "languages", T_OBJECT, offsetof(Metadata_obj, languages), READONLY,
|
||||
NULL },
|
||||
{ "node_count", T_OBJECT, offsetof(Metadata_obj, node_count),
|
||||
READONLY, NULL },
|
||||
{ "record_size", T_OBJECT, offsetof(Metadata_obj, record_size),
|
||||
READONLY, NULL },
|
||||
{ NULL, 0, 0, 0, NULL }
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
static PyTypeObject Metadata_Type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
.tp_basicsize = sizeof(Metadata_obj),
|
||||
.tp_dealloc = Metadata_dealloc,
|
||||
.tp_doc = "Metadata object",
|
||||
.tp_flags = Py_TPFLAGS_DEFAULT,
|
||||
.tp_members = Metadata_members,
|
||||
.tp_methods = Metadata_methods,
|
||||
.tp_name = "Metadata",
|
||||
.tp_init = Metadata_init
|
||||
};
|
||||
|
||||
static PyMethodDef MaxMindDB_methods[] = {
|
||||
{ NULL, NULL, 0, NULL }
|
||||
};
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
static struct PyModuleDef MaxMindDB_module = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
.m_name = "extension",
|
||||
.m_doc = "This is a C extension to read MaxMind DB file format",
|
||||
.m_methods = MaxMindDB_methods,
|
||||
};
|
||||
#endif
|
||||
|
||||
MOD_INIT(extension){
|
||||
PyObject *m;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
m = PyModule_Create(&MaxMindDB_module);
|
||||
#else
|
||||
m = Py_InitModule("extension", MaxMindDB_methods);
|
||||
#endif
|
||||
|
||||
if (!m) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
|
||||
Reader_Type.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&Reader_Type)) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
Py_INCREF(&Reader_Type);
|
||||
PyModule_AddObject(m, "Reader", (PyObject *)&Reader_Type);
|
||||
|
||||
Metadata_Type.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&Metadata_Type)) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
PyModule_AddObject(m, "extension", (PyObject *)&Metadata_Type);
|
||||
|
||||
PyObject* error_mod = PyImport_ImportModule("maxminddb.errors");
|
||||
if (error_mod == NULL) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
|
||||
MaxMindDB_error = PyObject_GetAttrString(error_mod, "InvalidDatabaseError");
|
||||
Py_DECREF(error_mod);
|
||||
|
||||
if (MaxMindDB_error == NULL) {
|
||||
RETURN_MOD_INIT(NULL);
|
||||
}
|
||||
|
||||
Py_INCREF(MaxMindDB_error);
|
||||
|
||||
/* We primarily add it to the module for backwards compatibility */
|
||||
PyModule_AddObject(m, "InvalidDatabaseError", MaxMindDB_error);
|
||||
|
||||
RETURN_MOD_INIT(m);
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
"""For internal use only. It provides a slice-like file reader."""
|
||||
|
||||
import os
|
||||
|
||||
try:
|
||||
from multiprocessing import Lock
|
||||
except ImportError:
|
||||
from threading import Lock
|
||||
|
||||
|
||||
class FileBuffer(object):
|
||||
|
||||
"""A slice-able file reader"""
|
||||
|
||||
def __init__(self, database):
|
||||
self._handle = open(database, 'rb')
|
||||
self._size = os.fstat(self._handle.fileno()).st_size
|
||||
if not hasattr(os, 'pread'):
|
||||
self._lock = Lock()
|
||||
|
||||
def __getitem__(self, key):
|
||||
if isinstance(key, slice):
|
||||
return self._read(key.stop - key.start, key.start)
|
||||
elif isinstance(key, int):
|
||||
return self._read(1, key)
|
||||
else:
|
||||
raise TypeError("Invalid argument type.")
|
||||
|
||||
def rfind(self, needle, start):
|
||||
"""Reverse find needle from start"""
|
||||
pos = self._read(self._size - start - 1, start).rfind(needle)
|
||||
if pos == -1:
|
||||
return pos
|
||||
return start + pos
|
||||
|
||||
def size(self):
|
||||
"""Size of file"""
|
||||
return self._size
|
||||
|
||||
def close(self):
|
||||
"""Close file"""
|
||||
self._handle.close()
|
||||
|
||||
if hasattr(os, 'pread'):
|
||||
|
||||
def _read(self, buffersize, offset):
|
||||
"""read that uses pread"""
|
||||
# pylint: disable=no-member
|
||||
return os.pread(self._handle.fileno(), buffersize, offset)
|
||||
|
||||
else:
|
||||
|
||||
def _read(self, buffersize, offset):
|
||||
"""read with a lock
|
||||
|
||||
This lock is necessary as after a fork, the different processes
|
||||
will share the same file table entry, even if we dup the fd, and
|
||||
as such the same offsets. There does not appear to be a way to
|
||||
duplicate the file table entry and we cannot re-open based on the
|
||||
original path as that file may have replaced with another or
|
||||
unlinked.
|
||||
"""
|
||||
with self._lock:
|
||||
self._handle.seek(offset)
|
||||
return self._handle.read(buffersize)
|
File diff suppressed because it is too large
Load diff
|
@ -1,221 +0,0 @@
|
|||
"""
|
||||
maxminddb.reader
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains the pure Python database reader and related classes.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
import mmap
|
||||
except ImportError:
|
||||
# pylint: disable=invalid-name
|
||||
mmap = None
|
||||
|
||||
import struct
|
||||
|
||||
from maxminddb.compat import byte_from_int, int_from_byte, ipaddress
|
||||
from maxminddb.const import MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY
|
||||
from maxminddb.decoder import Decoder
|
||||
from maxminddb.errors import InvalidDatabaseError
|
||||
from maxminddb.file import FileBuffer
|
||||
|
||||
|
||||
class Reader(object):
|
||||
|
||||
"""
|
||||
Instances of this class provide a reader for the MaxMind DB format. IP
|
||||
addresses can be looked up using the ``get`` method.
|
||||
"""
|
||||
|
||||
_DATA_SECTION_SEPARATOR_SIZE = 16
|
||||
_METADATA_START_MARKER = b"\xAB\xCD\xEFMaxMind.com"
|
||||
|
||||
_ipv4_start = None
|
||||
|
||||
def __init__(self, database, mode=MODE_AUTO):
|
||||
"""Reader for the MaxMind DB file format
|
||||
|
||||
Arguments:
|
||||
database -- A path to a valid MaxMind DB file such as a GeoIP2
|
||||
database file.
|
||||
mode -- mode to open the database with. Valid mode are:
|
||||
* MODE_MMAP - read from memory map.
|
||||
* MODE_FILE - read database as standard file.
|
||||
* MODE_MEMORY - load database into memory.
|
||||
* MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default.
|
||||
"""
|
||||
if (mode == MODE_AUTO and mmap) or mode == MODE_MMAP:
|
||||
with open(database, 'rb') as db_file:
|
||||
self._buffer = mmap.mmap(
|
||||
db_file.fileno(), 0, access=mmap.ACCESS_READ)
|
||||
self._buffer_size = self._buffer.size()
|
||||
elif mode in (MODE_AUTO, MODE_FILE):
|
||||
self._buffer = FileBuffer(database)
|
||||
self._buffer_size = self._buffer.size()
|
||||
elif mode == MODE_MEMORY:
|
||||
with open(database, 'rb') as db_file:
|
||||
self._buffer = db_file.read()
|
||||
self._buffer_size = len(self._buffer)
|
||||
else:
|
||||
raise ValueError('Unsupported open mode ({0}). Only MODE_AUTO, '
|
||||
' MODE_FILE, and MODE_MEMORY are support by the pure Python '
|
||||
'Reader'.format(mode))
|
||||
|
||||
metadata_start = self._buffer.rfind(self._METADATA_START_MARKER,
|
||||
max(0, self._buffer_size
|
||||
- 128 * 1024))
|
||||
|
||||
if metadata_start == -1:
|
||||
self.close()
|
||||
raise InvalidDatabaseError('Error opening database file ({0}). '
|
||||
'Is this a valid MaxMind DB file?'
|
||||
''.format(database))
|
||||
|
||||
metadata_start += len(self._METADATA_START_MARKER)
|
||||
metadata_decoder = Decoder(self._buffer, metadata_start)
|
||||
(metadata, _) = metadata_decoder.decode(metadata_start)
|
||||
self._metadata = Metadata(
|
||||
**metadata) # pylint: disable=bad-option-value
|
||||
|
||||
self._decoder = Decoder(self._buffer, self._metadata.search_tree_size
|
||||
+ self._DATA_SECTION_SEPARATOR_SIZE)
|
||||
|
||||
def metadata(self):
|
||||
"""Return the metadata associated with the MaxMind DB file"""
|
||||
return self._metadata
|
||||
|
||||
def get(self, ip_address):
|
||||
"""Return the record for the ip_address in the MaxMind DB
|
||||
|
||||
|
||||
Arguments:
|
||||
ip_address -- an IP address in the standard string notation
|
||||
"""
|
||||
address = ipaddress.ip_address(ip_address)
|
||||
|
||||
if address.version == 6 and self._metadata.ip_version == 4:
|
||||
raise ValueError('Error looking up {0}. You attempted to look up '
|
||||
'an IPv6 address in an IPv4-only database.'.format(
|
||||
ip_address))
|
||||
pointer = self._find_address_in_tree(address)
|
||||
|
||||
return self._resolve_data_pointer(pointer) if pointer else None
|
||||
|
||||
def _find_address_in_tree(self, ip_address):
|
||||
packed = ip_address.packed
|
||||
|
||||
bit_count = len(packed) * 8
|
||||
node = self._start_node(bit_count)
|
||||
|
||||
for i in range(bit_count):
|
||||
if node >= self._metadata.node_count:
|
||||
break
|
||||
bit = 1 & (int_from_byte(packed[i >> 3]) >> 7 - (i % 8))
|
||||
node = self._read_node(node, bit)
|
||||
if node == self._metadata.node_count:
|
||||
# Record is empty
|
||||
return 0
|
||||
elif node > self._metadata.node_count:
|
||||
return node
|
||||
|
||||
raise InvalidDatabaseError('Invalid node in search tree')
|
||||
|
||||
def _start_node(self, length):
|
||||
if self._metadata.ip_version != 6 or length == 128:
|
||||
return 0
|
||||
|
||||
# We are looking up an IPv4 address in an IPv6 tree. Skip over the
|
||||
# first 96 nodes.
|
||||
if self._ipv4_start:
|
||||
return self._ipv4_start
|
||||
|
||||
node = 0
|
||||
for _ in range(96):
|
||||
if node >= self._metadata.node_count:
|
||||
break
|
||||
node = self._read_node(node, 0)
|
||||
self._ipv4_start = node
|
||||
return node
|
||||
|
||||
def _read_node(self, node_number, index):
|
||||
base_offset = node_number * self._metadata.node_byte_size
|
||||
|
||||
record_size = self._metadata.record_size
|
||||
if record_size == 24:
|
||||
offset = base_offset + index * 3
|
||||
node_bytes = b'\x00' + self._buffer[offset:offset + 3]
|
||||
elif record_size == 28:
|
||||
(middle,) = struct.unpack(
|
||||
b'!B', self._buffer[base_offset + 3:base_offset + 4])
|
||||
if index:
|
||||
middle &= 0x0F
|
||||
else:
|
||||
middle = (0xF0 & middle) >> 4
|
||||
offset = base_offset + index * 4
|
||||
node_bytes = byte_from_int(
|
||||
middle) + self._buffer[offset:offset + 3]
|
||||
elif record_size == 32:
|
||||
offset = base_offset + index * 4
|
||||
node_bytes = self._buffer[offset:offset + 4]
|
||||
else:
|
||||
raise InvalidDatabaseError(
|
||||
'Unknown record size: {0}'.format(record_size))
|
||||
return struct.unpack(b'!I', node_bytes)[0]
|
||||
|
||||
def _resolve_data_pointer(self, pointer):
|
||||
resolved = pointer - self._metadata.node_count + \
|
||||
self._metadata.search_tree_size
|
||||
|
||||
if resolved > self._buffer_size:
|
||||
raise InvalidDatabaseError(
|
||||
"The MaxMind DB file's search tree is corrupt")
|
||||
|
||||
(data, _) = self._decoder.decode(resolved)
|
||||
return data
|
||||
|
||||
def close(self):
|
||||
"""Closes the MaxMind DB file and returns the resources to the system"""
|
||||
# pylint: disable=unidiomatic-typecheck
|
||||
if type(self._buffer) not in (str, bytes):
|
||||
self._buffer.close()
|
||||
|
||||
|
||||
class Metadata(object):
|
||||
|
||||
"""Metadata for the MaxMind DB reader"""
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, **kwargs):
|
||||
"""Creates new Metadata object. kwargs are key/value pairs from spec"""
|
||||
# Although I could just update __dict__, that is less obvious and it
|
||||
# doesn't work well with static analysis tools and some IDEs
|
||||
self.node_count = kwargs['node_count']
|
||||
self.record_size = kwargs['record_size']
|
||||
self.ip_version = kwargs['ip_version']
|
||||
self.database_type = kwargs['database_type']
|
||||
self.languages = kwargs['languages']
|
||||
self.binary_format_major_version = kwargs[
|
||||
'binary_format_major_version']
|
||||
self.binary_format_minor_version = kwargs[
|
||||
'binary_format_minor_version']
|
||||
self.build_epoch = kwargs['build_epoch']
|
||||
self.description = kwargs['description']
|
||||
|
||||
@property
|
||||
def node_byte_size(self):
|
||||
"""The size of a node in bytes"""
|
||||
return self.record_size // 4
|
||||
|
||||
@property
|
||||
def search_tree_size(self):
|
||||
"""The size of the search tree"""
|
||||
return self.node_count * self.node_byte_size
|
||||
|
||||
def __repr__(self):
|
||||
args = ', '.join('%s=%r' % x for x in self.__dict__.items())
|
||||
return '{module}.{class_name}({data})'.format(
|
||||
module=self.__module__,
|
||||
class_name=self.__class__.__name__,
|
||||
data=args)
|
|
@ -1,60 +0,0 @@
|
|||
/**
|
||||
* @author alteredq / http://alteredqualia.com/
|
||||
* @author mr.doob / http://mrdoob.com/
|
||||
*/
|
||||
|
||||
Detector = {
|
||||
|
||||
canvas : !! window.CanvasRenderingContext2D,
|
||||
webgl : ( function () { try { return !! window.WebGLRenderingContext && !! document.createElement( 'canvas' ).getContext( 'experimental-webgl' ); } catch( e ) { return false; } } )(),
|
||||
workers : !! window.Worker,
|
||||
fileapi : window.File && window.FileReader && window.FileList && window.Blob,
|
||||
|
||||
getWebGLErrorMessage : function () {
|
||||
|
||||
var domElement = document.createElement( 'div' );
|
||||
|
||||
domElement.style.fontFamily = 'monospace';
|
||||
domElement.style.fontSize = '13px';
|
||||
domElement.style.textAlign = 'center';
|
||||
domElement.style.background = '#eee';
|
||||
domElement.style.color = '#000';
|
||||
domElement.style.padding = '1em';
|
||||
domElement.style.width = '475px';
|
||||
domElement.style.margin = '5em auto 0';
|
||||
|
||||
if ( ! this.webgl ) {
|
||||
|
||||
domElement.innerHTML = window.WebGLRenderingContext ? [
|
||||
'Sorry, your graphics card doesn\'t support <a href="http://khronos.org/webgl/wiki/Getting_a_WebGL_Implementation">WebGL</a>'
|
||||
].join( '\n' ) : [
|
||||
'Sorry, your browser doesn\'t support <a href="http://khronos.org/webgl/wiki/Getting_a_WebGL_Implementation">WebGL</a><br/>',
|
||||
'Please try with',
|
||||
'<a href="http://www.google.com/chrome">Chrome</a>, ',
|
||||
'<a href="http://www.mozilla.com/en-US/firefox/new/">Firefox 4</a> or',
|
||||
'<a href="http://nightly.webkit.org/">Webkit Nightly (Mac)</a>'
|
||||
].join( '\n' );
|
||||
|
||||
}
|
||||
|
||||
return domElement;
|
||||
|
||||
},
|
||||
|
||||
addGetWebGLMessage : function ( parameters ) {
|
||||
|
||||
var parent, id, domElement;
|
||||
|
||||
parameters = parameters || {};
|
||||
|
||||
parent = parameters.parent !== undefined ? parameters.parent : document.body;
|
||||
id = parameters.id !== undefined ? parameters.id : 'oldie';
|
||||
|
||||
domElement = Detector.getWebGLErrorMessage();
|
||||
domElement.id = id;
|
||||
|
||||
parent.appendChild( domElement );
|
||||
|
||||
}
|
||||
|
||||
};
|
|
@ -1,12 +0,0 @@
|
|||
// Tween.js - http://github.com/sole/tween.js
|
||||
var TWEEN=TWEEN||function(){var a,e,c,d,f=[];return{start:function(g){c=setInterval(this.update,1E3/(g||60))},stop:function(){clearInterval(c)},add:function(g){f.push(g)},remove:function(g){a=f.indexOf(g);a!==-1&&f.splice(a,1)},update:function(){a=0;e=f.length;for(d=(new Date).getTime();a<e;)if(f[a].update(d))a++;else{f.splice(a,1);e--}}}}();
|
||||
TWEEN.Tween=function(a){var e={},c={},d={},f=1E3,g=0,j=null,n=TWEEN.Easing.Linear.EaseNone,k=null,l=null,m=null;this.to=function(b,h){if(h!==null)f=h;for(var i in b)if(a[i]!==null)d[i]=b[i];return this};this.start=function(){TWEEN.add(this);j=(new Date).getTime()+g;for(var b in d)if(a[b]!==null){e[b]=a[b];c[b]=d[b]-a[b]}return this};this.stop=function(){TWEEN.remove(this);return this};this.delay=function(b){g=b;return this};this.easing=function(b){n=b;return this};this.chain=function(b){k=b};this.onUpdate=
|
||||
function(b){l=b;return this};this.onComplete=function(b){m=b;return this};this.update=function(b){var h,i;if(b<j)return true;b=(b-j)/f;b=b>1?1:b;i=n(b);for(h in c)a[h]=e[h]+c[h]*i;l!==null&&l.call(a,i);if(b==1){m!==null&&m.call(a);k!==null&&k.start();return false}return true}};TWEEN.Easing={Linear:{},Quadratic:{},Cubic:{},Quartic:{},Quintic:{},Sinusoidal:{},Exponential:{},Circular:{},Elastic:{},Back:{},Bounce:{}};TWEEN.Easing.Linear.EaseNone=function(a){return a};
|
||||
TWEEN.Easing.Quadratic.EaseIn=function(a){return a*a};TWEEN.Easing.Quadratic.EaseOut=function(a){return-a*(a-2)};TWEEN.Easing.Quadratic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a;return-0.5*(--a*(a-2)-1)};TWEEN.Easing.Cubic.EaseIn=function(a){return a*a*a};TWEEN.Easing.Cubic.EaseOut=function(a){return--a*a*a+1};TWEEN.Easing.Cubic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a;return 0.5*((a-=2)*a*a+2)};TWEEN.Easing.Quartic.EaseIn=function(a){return a*a*a*a};
|
||||
TWEEN.Easing.Quartic.EaseOut=function(a){return-(--a*a*a*a-1)};TWEEN.Easing.Quartic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a;return-0.5*((a-=2)*a*a*a-2)};TWEEN.Easing.Quintic.EaseIn=function(a){return a*a*a*a*a};TWEEN.Easing.Quintic.EaseOut=function(a){return(a-=1)*a*a*a*a+1};TWEEN.Easing.Quintic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a*a;return 0.5*((a-=2)*a*a*a*a+2)};TWEEN.Easing.Sinusoidal.EaseIn=function(a){return-Math.cos(a*Math.PI/2)+1};
|
||||
TWEEN.Easing.Sinusoidal.EaseOut=function(a){return Math.sin(a*Math.PI/2)};TWEEN.Easing.Sinusoidal.EaseInOut=function(a){return-0.5*(Math.cos(Math.PI*a)-1)};TWEEN.Easing.Exponential.EaseIn=function(a){return a==0?0:Math.pow(2,10*(a-1))};TWEEN.Easing.Exponential.EaseOut=function(a){return a==1?1:-Math.pow(2,-10*a)+1};TWEEN.Easing.Exponential.EaseInOut=function(a){if(a==0)return 0;if(a==1)return 1;if((a*=2)<1)return 0.5*Math.pow(2,10*(a-1));return 0.5*(-Math.pow(2,-10*(a-1))+2)};
|
||||
TWEEN.Easing.Circular.EaseIn=function(a){return-(Math.sqrt(1-a*a)-1)};TWEEN.Easing.Circular.EaseOut=function(a){return Math.sqrt(1- --a*a)};TWEEN.Easing.Circular.EaseInOut=function(a){if((a/=0.5)<1)return-0.5*(Math.sqrt(1-a*a)-1);return 0.5*(Math.sqrt(1-(a-=2)*a)+1)};TWEEN.Easing.Elastic.EaseIn=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return-(c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d))};
|
||||
TWEEN.Easing.Elastic.EaseOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return c*Math.pow(2,-10*a)*Math.sin((a-e)*2*Math.PI/d)+1};
|
||||
TWEEN.Easing.Elastic.EaseInOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);if((a*=2)<1)return-0.5*c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d);return c*Math.pow(2,-10*(a-=1))*Math.sin((a-e)*2*Math.PI/d)*0.5+1};TWEEN.Easing.Back.EaseIn=function(a){return a*a*(2.70158*a-1.70158)};TWEEN.Easing.Back.EaseOut=function(a){return(a-=1)*a*(2.70158*a+1.70158)+1};
|
||||
TWEEN.Easing.Back.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*(3.5949095*a-2.5949095);return 0.5*((a-=2)*a*(3.5949095*a+2.5949095)+2)};TWEEN.Easing.Bounce.EaseIn=function(a){return 1-TWEEN.Easing.Bounce.EaseOut(1-a)};TWEEN.Easing.Bounce.EaseOut=function(a){return(a/=1)<1/2.75?7.5625*a*a:a<2/2.75?7.5625*(a-=1.5/2.75)*a+0.75:a<2.5/2.75?7.5625*(a-=2.25/2.75)*a+0.9375:7.5625*(a-=2.625/2.75)*a+0.984375};
|
||||
TWEEN.Easing.Bounce.EaseInOut=function(a){if(a<0.5)return TWEEN.Easing.Bounce.EaseIn(a*2)*0.5;return TWEEN.Easing.Bounce.EaseOut(a*2-1)*0.5+0.5};
|
File diff suppressed because one or more lines are too long
|
@ -1,424 +0,0 @@
|
|||
/**
|
||||
* dat.globe Javascript WebGL Globe Toolkit
|
||||
* http://dataarts.github.com/dat.globe
|
||||
*
|
||||
* Copyright 2011 Data Arts Team, Google Creative Lab
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the 'License');
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*/
|
||||
|
||||
var DAT = DAT || {};
|
||||
|
||||
DAT.Globe = function(container, opts) {
|
||||
opts = opts || {};
|
||||
|
||||
var colorFn = opts.colorFn || function(x) {
|
||||
var c = new THREE.Color();
|
||||
c.setHSL( ( 0.5 - (x * 2) ), Math.max(0.8, 1.0 - (x * 3)), 0.5 );
|
||||
return c;
|
||||
};
|
||||
var imgDir = opts.imgDir || '/globe/';
|
||||
|
||||
var Shaders = {
|
||||
'earth' : {
|
||||
uniforms: {
|
||||
'texture': { type: 't', value: null }
|
||||
},
|
||||
vertexShader: [
|
||||
'varying vec3 vNormal;',
|
||||
'varying vec2 vUv;',
|
||||
'void main() {',
|
||||
'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
|
||||
'vNormal = normalize( normalMatrix * normal );',
|
||||
'vUv = uv;',
|
||||
'}'
|
||||
].join('\n'),
|
||||
fragmentShader: [
|
||||
'uniform sampler2D texture;',
|
||||
'varying vec3 vNormal;',
|
||||
'varying vec2 vUv;',
|
||||
'void main() {',
|
||||
'vec3 diffuse = texture2D( texture, vUv ).xyz;',
|
||||
'float intensity = 1.05 - dot( vNormal, vec3( 0.0, 0.0, 1.0 ) );',
|
||||
'vec3 atmosphere = vec3( 1.0, 1.0, 1.0 ) * pow( intensity, 3.0 );',
|
||||
'gl_FragColor = vec4( diffuse + atmosphere, 1.0 );',
|
||||
'}'
|
||||
].join('\n')
|
||||
},
|
||||
'atmosphere' : {
|
||||
uniforms: {},
|
||||
vertexShader: [
|
||||
'varying vec3 vNormal;',
|
||||
'void main() {',
|
||||
'vNormal = normalize( normalMatrix * normal );',
|
||||
'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
|
||||
'}'
|
||||
].join('\n'),
|
||||
fragmentShader: [
|
||||
'varying vec3 vNormal;',
|
||||
'void main() {',
|
||||
'float intensity = pow( 0.8 - dot( vNormal, vec3( 0, 0, 1.0 ) ), 12.0 );',
|
||||
'gl_FragColor = vec4( 1.0, 1.0, 1.0, 1.0 ) * intensity;',
|
||||
'}'
|
||||
].join('\n')
|
||||
}
|
||||
};
|
||||
|
||||
var camera, scene, renderer, w, h;
|
||||
var mesh, atmosphere, point, running;
|
||||
|
||||
var overRenderer;
|
||||
var running = true;
|
||||
|
||||
var curZoomSpeed = 0;
|
||||
var zoomSpeed = 50;
|
||||
|
||||
var mouse = { x: 0, y: 0 }, mouseOnDown = { x: 0, y: 0 };
|
||||
var rotation = { x: 0, y: 0 },
|
||||
target = { x: Math.PI*3/2, y: Math.PI / 6.0 },
|
||||
targetOnDown = { x: 0, y: 0 };
|
||||
|
||||
var distance = 100000, distanceTarget = 100000;
|
||||
var padding = 10;
|
||||
var PI_HALF = Math.PI / 2;
|
||||
|
||||
function init() {
|
||||
|
||||
container.style.color = '#fff';
|
||||
container.style.font = '13px/20px Arial, sans-serif';
|
||||
|
||||
var shader, uniforms, material;
|
||||
w = container.offsetWidth || window.innerWidth;
|
||||
h = container.offsetHeight || window.innerHeight;
|
||||
|
||||
camera = new THREE.PerspectiveCamera(30, w / h, 1, 10000);
|
||||
camera.position.z = distance;
|
||||
|
||||
scene = new THREE.Scene();
|
||||
|
||||
var geometry = new THREE.SphereGeometry(200, 40, 30);
|
||||
|
||||
shader = Shaders['earth'];
|
||||
uniforms = THREE.UniformsUtils.clone(shader.uniforms);
|
||||
|
||||
uniforms['texture'].value = THREE.ImageUtils.loadTexture(imgDir+'world.jpg');
|
||||
|
||||
material = new THREE.ShaderMaterial({
|
||||
|
||||
uniforms: uniforms,
|
||||
vertexShader: shader.vertexShader,
|
||||
fragmentShader: shader.fragmentShader
|
||||
|
||||
});
|
||||
|
||||
mesh = new THREE.Mesh(geometry, material);
|
||||
mesh.rotation.y = Math.PI;
|
||||
scene.add(mesh);
|
||||
|
||||
shader = Shaders['atmosphere'];
|
||||
uniforms = THREE.UniformsUtils.clone(shader.uniforms);
|
||||
|
||||
material = new THREE.ShaderMaterial({
|
||||
|
||||
uniforms: uniforms,
|
||||
vertexShader: shader.vertexShader,
|
||||
fragmentShader: shader.fragmentShader,
|
||||
side: THREE.BackSide,
|
||||
blending: THREE.AdditiveBlending,
|
||||
transparent: true
|
||||
|
||||
});
|
||||
|
||||
mesh = new THREE.Mesh(geometry, material);
|
||||
mesh.scale.set( 1.1, 1.1, 1.1 );
|
||||
scene.add(mesh);
|
||||
|
||||
geometry = new THREE.BoxGeometry(2.75, 2.75, 1);
|
||||
geometry.applyMatrix(new THREE.Matrix4().makeTranslation(0,0,-0.5));
|
||||
|
||||
point = new THREE.Mesh(geometry);
|
||||
|
||||
renderer = new THREE.WebGLRenderer({antialias: true});
|
||||
renderer.setSize(w, h);
|
||||
renderer.setClearColor( 0x212121, 1 );
|
||||
|
||||
renderer.domElement.style.position = 'relative';
|
||||
|
||||
container.appendChild(renderer.domElement);
|
||||
|
||||
container.addEventListener('mousedown', onMouseDown, false);
|
||||
|
||||
container.addEventListener('mousewheel', onMouseWheel, false);
|
||||
|
||||
document.addEventListener('keydown', onDocumentKeyDown, false);
|
||||
|
||||
window.addEventListener('resize', onWindowResize, false);
|
||||
|
||||
container.addEventListener('mouseover', function() {
|
||||
overRenderer = true;
|
||||
}, false);
|
||||
|
||||
container.addEventListener('mouseout', function() {
|
||||
overRenderer = false;
|
||||
}, false);
|
||||
}
|
||||
|
||||
function addData(data, opts) {
|
||||
var lat, lng, size, color, i, step, colorFnWrapper;
|
||||
|
||||
opts.animated = opts.animated || false;
|
||||
this.is_animated = opts.animated;
|
||||
opts.format = opts.format || 'magnitude'; // other option is 'legend'
|
||||
if (opts.format === 'magnitude') {
|
||||
step = 3;
|
||||
colorFnWrapper = function(data, i) { return colorFn(data[i+2]); }
|
||||
} else if (opts.format === 'legend') {
|
||||
step = 4;
|
||||
colorFnWrapper = function(data, i) { return colorFn(data[i+3]); }
|
||||
} else if (opts.format === 'peer') {
|
||||
colorFnWrapper = function(data, i) { return colorFn(data[i+2]); }
|
||||
} else {
|
||||
throw('error: format not supported: '+opts.format);
|
||||
}
|
||||
|
||||
if (opts.animated) {
|
||||
if (this._baseGeometry === undefined) {
|
||||
this._baseGeometry = new THREE.Geometry();
|
||||
for (i = 0; i < data.length; i += step) {
|
||||
lat = data[i];
|
||||
lng = data[i + 1];
|
||||
// size = data[i + 2];
|
||||
color = colorFnWrapper(data,i);
|
||||
size = 0;
|
||||
addPoint(lat, lng, size, color, this._baseGeometry);
|
||||
}
|
||||
}
|
||||
if(this._morphTargetId === undefined) {
|
||||
this._morphTargetId = 0;
|
||||
} else {
|
||||
this._morphTargetId += 1;
|
||||
}
|
||||
opts.name = opts.name || 'morphTarget'+this._morphTargetId;
|
||||
}
|
||||
var subgeo = new THREE.Geometry();
|
||||
for (i = 0; i < data.length; i += step) {
|
||||
lat = data[i];
|
||||
lng = data[i + 1];
|
||||
color = colorFnWrapper(data,i);
|
||||
size = data[i + 2];
|
||||
size = size*200;
|
||||
addPoint(lat, lng, size, color, subgeo);
|
||||
}
|
||||
if (opts.animated) {
|
||||
this._baseGeometry.morphTargets.push({'name': opts.name, vertices: subgeo.vertices});
|
||||
} else {
|
||||
this._baseGeometry = subgeo;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
function createPoints() {
|
||||
if (this._baseGeometry !== undefined) {
|
||||
if (this.is_animated === false) {
|
||||
this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({
|
||||
color: 0xffffff,
|
||||
vertexColors: THREE.FaceColors,
|
||||
morphTargets: false
|
||||
}));
|
||||
} else {
|
||||
if (this._baseGeometry.morphTargets.length < 8) {
|
||||
console.log('t l',this._baseGeometry.morphTargets.length);
|
||||
var padding = 8-this._baseGeometry.morphTargets.length;
|
||||
console.log('padding', padding);
|
||||
for(var i=0; i<=padding; i++) {
|
||||
console.log('padding',i);
|
||||
this._baseGeometry.morphTargets.push({'name': 'morphPadding'+i, vertices: this._baseGeometry.vertices});
|
||||
}
|
||||
}
|
||||
this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({
|
||||
color: 0xffffff,
|
||||
vertexColors: THREE.FaceColors,
|
||||
morphTargets: true
|
||||
}));
|
||||
}
|
||||
scene.add(this.points);
|
||||
}
|
||||
}
|
||||
|
||||
function addPoint(lat, lng, size, color, subgeo) {
|
||||
|
||||
var phi = (90 - lat) * Math.PI / 180;
|
||||
var theta = (180 - lng) * Math.PI / 180;
|
||||
|
||||
point.position.x = 200 * Math.sin(phi) * Math.cos(theta);
|
||||
point.position.y = 200 * Math.cos(phi);
|
||||
point.position.z = 200 * Math.sin(phi) * Math.sin(theta);
|
||||
|
||||
point.lookAt(mesh.position);
|
||||
|
||||
point.scale.z = Math.max( size, 0.1 ); // avoid non-invertible matrix
|
||||
point.updateMatrix();
|
||||
|
||||
for (var i = 0; i < point.geometry.faces.length; i++) {
|
||||
|
||||
point.geometry.faces[i].color = color;
|
||||
|
||||
}
|
||||
if(point.matrixAutoUpdate){
|
||||
point.updateMatrix();
|
||||
}
|
||||
subgeo.merge(point.geometry, point.matrix);
|
||||
}
|
||||
|
||||
function onMouseDown(event) {
|
||||
event.preventDefault();
|
||||
|
||||
container.addEventListener('mousemove', onMouseMove, false);
|
||||
container.addEventListener('mouseup', onMouseUp, false);
|
||||
container.addEventListener('mouseout', onMouseOut, false);
|
||||
|
||||
mouseOnDown.x = - event.clientX;
|
||||
mouseOnDown.y = event.clientY;
|
||||
|
||||
targetOnDown.x = target.x;
|
||||
targetOnDown.y = target.y;
|
||||
|
||||
container.style.cursor = 'move';
|
||||
}
|
||||
|
||||
function onMouseMove(event) {
|
||||
mouse.x = - event.clientX;
|
||||
mouse.y = event.clientY;
|
||||
|
||||
var zoomDamp = distance/1000;
|
||||
|
||||
target.x = targetOnDown.x + (mouse.x - mouseOnDown.x) * 0.005 * zoomDamp;
|
||||
target.y = targetOnDown.y + (mouse.y - mouseOnDown.y) * 0.005 * zoomDamp;
|
||||
|
||||
target.y = target.y > PI_HALF ? PI_HALF : target.y;
|
||||
target.y = target.y < - PI_HALF ? - PI_HALF : target.y;
|
||||
}
|
||||
|
||||
function onMouseUp(event) {
|
||||
container.removeEventListener('mousemove', onMouseMove, false);
|
||||
container.removeEventListener('mouseup', onMouseUp, false);
|
||||
container.removeEventListener('mouseout', onMouseOut, false);
|
||||
container.style.cursor = 'auto';
|
||||
}
|
||||
|
||||
function onMouseOut(event) {
|
||||
container.removeEventListener('mousemove', onMouseMove, false);
|
||||
container.removeEventListener('mouseup', onMouseUp, false);
|
||||
container.removeEventListener('mouseout', onMouseOut, false);
|
||||
}
|
||||
|
||||
function onMouseWheel(event) {
|
||||
event.preventDefault();
|
||||
if (overRenderer) {
|
||||
zoom(event.wheelDeltaY * 0.3);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function onDocumentKeyDown(event) {
|
||||
switch (event.keyCode) {
|
||||
case 38:
|
||||
zoom(100);
|
||||
event.preventDefault();
|
||||
break;
|
||||
case 40:
|
||||
zoom(-100);
|
||||
event.preventDefault();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
function onWindowResize( event ) {
|
||||
camera.aspect = container.offsetWidth / container.offsetHeight;
|
||||
camera.updateProjectionMatrix();
|
||||
renderer.setSize( container.offsetWidth, container.offsetHeight );
|
||||
}
|
||||
|
||||
function zoom(delta) {
|
||||
distanceTarget -= delta;
|
||||
distanceTarget = distanceTarget > 855 ? 855 : distanceTarget;
|
||||
distanceTarget = distanceTarget < 350 ? 350 : distanceTarget;
|
||||
}
|
||||
|
||||
function animate() {
|
||||
if (!running) return
|
||||
requestAnimationFrame(animate);
|
||||
render();
|
||||
}
|
||||
|
||||
function render() {
|
||||
zoom(curZoomSpeed);
|
||||
|
||||
rotation.x += (target.x - rotation.x) * 0.1;
|
||||
rotation.y += (target.y - rotation.y) * 0.1;
|
||||
distance += (distanceTarget - distance) * 0.3;
|
||||
|
||||
camera.position.x = distance * Math.sin(rotation.x) * Math.cos(rotation.y);
|
||||
camera.position.y = distance * Math.sin(rotation.y);
|
||||
camera.position.z = distance * Math.cos(rotation.x) * Math.cos(rotation.y);
|
||||
|
||||
camera.lookAt(mesh.position);
|
||||
|
||||
renderer.render(scene, camera);
|
||||
}
|
||||
|
||||
function unload() {
|
||||
running = false
|
||||
container.removeEventListener('mousedown', onMouseDown, false);
|
||||
container.removeEventListener('mousewheel', onMouseWheel, false);
|
||||
document.removeEventListener('keydown', onDocumentKeyDown, false);
|
||||
window.removeEventListener('resize', onWindowResize, false);
|
||||
|
||||
}
|
||||
|
||||
init();
|
||||
this.animate = animate;
|
||||
this.unload = unload;
|
||||
|
||||
|
||||
this.__defineGetter__('time', function() {
|
||||
return this._time || 0;
|
||||
});
|
||||
|
||||
this.__defineSetter__('time', function(t) {
|
||||
var validMorphs = [];
|
||||
var morphDict = this.points.morphTargetDictionary;
|
||||
for(var k in morphDict) {
|
||||
if(k.indexOf('morphPadding') < 0) {
|
||||
validMorphs.push(morphDict[k]);
|
||||
}
|
||||
}
|
||||
validMorphs.sort();
|
||||
var l = validMorphs.length-1;
|
||||
var scaledt = t*l+1;
|
||||
var index = Math.floor(scaledt);
|
||||
for (i=0;i<validMorphs.length;i++) {
|
||||
this.points.morphTargetInfluences[validMorphs[i]] = 0;
|
||||
}
|
||||
var lastIndex = index - 1;
|
||||
var leftover = scaledt - index;
|
||||
if (lastIndex >= 0) {
|
||||
this.points.morphTargetInfluences[lastIndex] = 1 - leftover;
|
||||
}
|
||||
this.points.morphTargetInfluences[index] = leftover;
|
||||
this._time = t;
|
||||
});
|
||||
|
||||
this.addData = addData;
|
||||
this.createPoints = createPoints;
|
||||
this.renderer = renderer;
|
||||
this.scene = scene;
|
||||
|
||||
return this;
|
||||
|
||||
};
|
||||
|
814
plugins/Sidebar/media-globe/three.min.js
vendored
814
plugins/Sidebar/media-globe/three.min.js
vendored
File diff suppressed because one or more lines are too long
Binary file not shown.
Before Width: | Height: | Size: 93 KiB |
|
@ -1,23 +0,0 @@
|
|||
class Class
|
||||
trace: true
|
||||
|
||||
log: (args...) ->
|
||||
return unless @trace
|
||||
return if typeof console is 'undefined'
|
||||
args.unshift("[#{@.constructor.name}]")
|
||||
console.log(args...)
|
||||
@
|
||||
|
||||
logStart: (name, args...) ->
|
||||
return unless @trace
|
||||
@logtimers or= {}
|
||||
@logtimers[name] = +(new Date)
|
||||
@log "#{name}", args..., "(started)" if args.length > 0
|
||||
@
|
||||
|
||||
logEnd: (name, args...) ->
|
||||
ms = +(new Date)-@logtimers[name]
|
||||
@log "#{name}", args..., "(Done in #{ms}ms)"
|
||||
@
|
||||
|
||||
window.Class = Class
|
|
@ -1,91 +0,0 @@
|
|||
/* via http://jsfiddle.net/elGrecode/00dgurnn/ */
|
||||
|
||||
window.initScrollable = function () {
|
||||
|
||||
var scrollContainer = document.querySelector('.scrollable'),
|
||||
scrollContentWrapper = document.querySelector('.scrollable .content-wrapper'),
|
||||
scrollContent = document.querySelector('.scrollable .content'),
|
||||
contentPosition = 0,
|
||||
scrollerBeingDragged = false,
|
||||
scroller,
|
||||
topPosition,
|
||||
scrollerHeight;
|
||||
|
||||
function calculateScrollerHeight() {
|
||||
// *Calculation of how tall scroller should be
|
||||
var visibleRatio = scrollContainer.offsetHeight / scrollContentWrapper.scrollHeight;
|
||||
if (visibleRatio == 1)
|
||||
scroller.style.display = "none";
|
||||
else
|
||||
scroller.style.display = "block";
|
||||
return visibleRatio * scrollContainer.offsetHeight;
|
||||
}
|
||||
|
||||
function moveScroller(evt) {
|
||||
// Move Scroll bar to top offset
|
||||
var scrollPercentage = evt.target.scrollTop / scrollContentWrapper.scrollHeight;
|
||||
topPosition = scrollPercentage * (scrollContainer.offsetHeight - 5); // 5px arbitrary offset so scroll bar doesn't move too far beyond content wrapper bounding box
|
||||
scroller.style.top = topPosition + 'px';
|
||||
}
|
||||
|
||||
function startDrag(evt) {
|
||||
normalizedPosition = evt.pageY;
|
||||
contentPosition = scrollContentWrapper.scrollTop;
|
||||
scrollerBeingDragged = true;
|
||||
window.addEventListener('mousemove', scrollBarScroll);
|
||||
return false;
|
||||
}
|
||||
|
||||
function stopDrag(evt) {
|
||||
scrollerBeingDragged = false;
|
||||
window.removeEventListener('mousemove', scrollBarScroll);
|
||||
}
|
||||
|
||||
function scrollBarScroll(evt) {
|
||||
if (scrollerBeingDragged === true) {
|
||||
evt.preventDefault();
|
||||
var mouseDifferential = evt.pageY - normalizedPosition;
|
||||
var scrollEquivalent = mouseDifferential * (scrollContentWrapper.scrollHeight / scrollContainer.offsetHeight);
|
||||
scrollContentWrapper.scrollTop = contentPosition + scrollEquivalent;
|
||||
}
|
||||
}
|
||||
|
||||
function updateHeight() {
|
||||
scrollerHeight = calculateScrollerHeight() - 10;
|
||||
scroller.style.height = scrollerHeight + 'px';
|
||||
}
|
||||
|
||||
function createScroller() {
|
||||
// *Creates scroller element and appends to '.scrollable' div
|
||||
// create scroller element
|
||||
scroller = document.createElement("div");
|
||||
scroller.className = 'scroller';
|
||||
|
||||
// determine how big scroller should be based on content
|
||||
scrollerHeight = calculateScrollerHeight() - 10;
|
||||
|
||||
if (scrollerHeight / scrollContainer.offsetHeight < 1) {
|
||||
// *If there is a need to have scroll bar based on content size
|
||||
scroller.style.height = scrollerHeight + 'px';
|
||||
|
||||
// append scroller to scrollContainer div
|
||||
scrollContainer.appendChild(scroller);
|
||||
|
||||
// show scroll path divot
|
||||
scrollContainer.className += ' showScroll';
|
||||
|
||||
// attach related draggable listeners
|
||||
scroller.addEventListener('mousedown', startDrag);
|
||||
window.addEventListener('mouseup', stopDrag);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
createScroller();
|
||||
|
||||
|
||||
// *** Listeners ***
|
||||
scrollContentWrapper.addEventListener('scroll', moveScroller);
|
||||
|
||||
return updateHeight;
|
||||
};
|
|
@ -1,44 +0,0 @@
|
|||
.scrollable {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.scrollable.showScroll::after {
|
||||
position: absolute;
|
||||
content: '';
|
||||
top: 5%;
|
||||
right: 7px;
|
||||
height: 90%;
|
||||
width: 3px;
|
||||
background: rgba(224, 224, 255, .3);
|
||||
}
|
||||
|
||||
.scrollable .content-wrapper {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
padding-right: 50%;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
.scroller {
|
||||
margin-top: 5px;
|
||||
z-index: 5;
|
||||
cursor: pointer;
|
||||
position: absolute;
|
||||
width: 7px;
|
||||
border-radius: 5px;
|
||||
background: #151515;
|
||||
top: 0px;
|
||||
left: 395px;
|
||||
-webkit-transition: top .08s;
|
||||
-moz-transition: top .08s;
|
||||
-ms-transition: top .08s;
|
||||
-o-transition: top .08s;
|
||||
transition: top .08s;
|
||||
}
|
||||
.scroller {
|
||||
-webkit-touch-callout: none;
|
||||
-webkit-user-select: none;
|
||||
-khtml-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
|
@ -1,326 +0,0 @@
|
|||
class Sidebar extends Class
|
||||
constructor: ->
|
||||
@tag = null
|
||||
@container = null
|
||||
@opened = false
|
||||
@width = 410
|
||||
@fixbutton = $(".fixbutton")
|
||||
@fixbutton_addx = 0
|
||||
@fixbutton_initx = 0
|
||||
@fixbutton_targetx = 0
|
||||
@frame = $("#inner-iframe")
|
||||
@initFixbutton()
|
||||
@dragStarted = 0
|
||||
@globe = null
|
||||
|
||||
@original_set_site_info = wrapper.setSiteInfo # We going to override this, save the original
|
||||
|
||||
# Start in opened state for debugging
|
||||
if false
|
||||
@startDrag()
|
||||
@moved()
|
||||
@fixbutton_targetx = @fixbutton_initx - @width
|
||||
@stopDrag()
|
||||
|
||||
|
||||
initFixbutton: ->
|
||||
# Detect dragging
|
||||
@fixbutton.on "mousedown", (e) =>
|
||||
e.preventDefault()
|
||||
|
||||
# Disable previous listeners
|
||||
@fixbutton.off "click"
|
||||
@fixbutton.off "mousemove"
|
||||
|
||||
# Make sure its not a click
|
||||
@dragStarted = (+ new Date)
|
||||
@fixbutton.one "mousemove", (e) =>
|
||||
@fixbutton_addx = @fixbutton.offset().left-e.pageX
|
||||
@startDrag()
|
||||
@fixbutton.parent().on "click", (e) =>
|
||||
@stopDrag()
|
||||
@fixbutton_initx = @fixbutton.offset().left # Initial x position
|
||||
|
||||
|
||||
# Start dragging the fixbutton
|
||||
startDrag: ->
|
||||
@log "startDrag"
|
||||
@fixbutton_targetx = @fixbutton_initx # Fallback x position
|
||||
|
||||
@fixbutton.addClass("dragging")
|
||||
|
||||
# Fullscreen drag bg to capture mouse events over iframe
|
||||
$("<div class='drag-bg'></div>").appendTo(document.body)
|
||||
|
||||
# IE position wrap fix
|
||||
if navigator.userAgent.indexOf('MSIE') != -1 or navigator.appVersion.indexOf('Trident/') > 0
|
||||
@fixbutton.css("pointer-events", "none")
|
||||
|
||||
# Don't go to homepage
|
||||
@fixbutton.one "click", (e) =>
|
||||
@stopDrag()
|
||||
@fixbutton.removeClass("dragging")
|
||||
if Math.abs(@fixbutton.offset().left - @fixbutton_initx) > 5
|
||||
# If moved more than some pixel the button then don't go to homepage
|
||||
e.preventDefault()
|
||||
|
||||
# Animate drag
|
||||
@fixbutton.parents().on "mousemove", @animDrag
|
||||
@fixbutton.parents().on "mousemove" ,@waitMove
|
||||
|
||||
# Stop dragging listener
|
||||
@fixbutton.parents().on "mouseup", (e) =>
|
||||
e.preventDefault()
|
||||
@stopDrag()
|
||||
|
||||
|
||||
# Wait for moving the fixbutton
|
||||
waitMove: (e) =>
|
||||
if Math.abs(@fixbutton.offset().left - @fixbutton_targetx) > 10 and (+ new Date)-@dragStarted > 100
|
||||
@moved()
|
||||
@fixbutton.parents().off "mousemove" ,@waitMove
|
||||
|
||||
moved: ->
|
||||
@log "Moved"
|
||||
@createHtmltag()
|
||||
$(document.body).css("perspective", "1000px").addClass("body-sidebar")
|
||||
$(window).off "resize"
|
||||
$(window).on "resize", =>
|
||||
$(document.body).css "height", $(window).height()
|
||||
@scrollable()
|
||||
$(window).trigger "resize"
|
||||
|
||||
# Override setsiteinfo to catch changes
|
||||
wrapper.setSiteInfo = (site_info) =>
|
||||
@setSiteInfo(site_info)
|
||||
@original_set_site_info.apply(wrapper, arguments)
|
||||
|
||||
setSiteInfo: (site_info) ->
|
||||
@updateHtmlTag()
|
||||
@displayGlobe()
|
||||
|
||||
|
||||
# Create the sidebar html tag
|
||||
createHtmltag: ->
|
||||
if not @container
|
||||
@container = $("""
|
||||
<div class="sidebar-container"><div class="sidebar scrollable"><div class="content-wrapper"><div class="content">
|
||||
</div></div></div></div>
|
||||
""")
|
||||
@container.appendTo(document.body)
|
||||
@tag = @container.find(".sidebar")
|
||||
@updateHtmlTag()
|
||||
@scrollable = window.initScrollable()
|
||||
|
||||
|
||||
updateHtmlTag: ->
|
||||
wrapper.ws.cmd "sidebarGetHtmlTag", {}, (res) =>
|
||||
if @tag.find(".content").children().length == 0 # First update
|
||||
@log "Creating content"
|
||||
morphdom(@tag.find(".content")[0], '<div class="content">'+res+'</div>')
|
||||
@scrollable()
|
||||
|
||||
else # Not first update, patch the html to keep unchanged dom elements
|
||||
@log "Patching content"
|
||||
morphdom @tag.find(".content")[0], '<div class="content">'+res+'</div>', {
|
||||
onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state
|
||||
if from_el.className == "globe"
|
||||
return false
|
||||
else
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
animDrag: (e) =>
|
||||
mousex = e.pageX
|
||||
|
||||
overdrag = @fixbutton_initx-@width-mousex
|
||||
if overdrag > 0 # Overdragged
|
||||
overdrag_percent = 1+overdrag/300
|
||||
mousex = (e.pageX + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent)
|
||||
targetx = @fixbutton_initx-mousex-@fixbutton_addx
|
||||
|
||||
@fixbutton.offset
|
||||
left: mousex+@fixbutton_addx
|
||||
|
||||
if @tag
|
||||
@tag.css("transform", "translateX(#{0-targetx}px)")
|
||||
|
||||
# Check if opened
|
||||
if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9)
|
||||
@fixbutton_targetx = @fixbutton_initx - @width # Make it opened
|
||||
else
|
||||
@fixbutton_targetx = @fixbutton_initx
|
||||
|
||||
|
||||
# Stop dragging the fixbutton
|
||||
stopDrag: ->
|
||||
@fixbutton.parents().off "mousemove"
|
||||
@fixbutton.off "mousemove"
|
||||
@fixbutton.css("pointer-events", "")
|
||||
$(".drag-bg").remove()
|
||||
if not @fixbutton.hasClass("dragging")
|
||||
return
|
||||
@fixbutton.removeClass("dragging")
|
||||
|
||||
# Move back to initial position
|
||||
if @fixbutton_targetx != @fixbutton.offset().left
|
||||
# Animate fixbutton
|
||||
@fixbutton.stop().animate {"left": @fixbutton_targetx}, 500, "easeOutBack", =>
|
||||
# Switch back to auto align
|
||||
if @fixbutton_targetx == @fixbutton_initx # Closed
|
||||
@fixbutton.css("left", "auto")
|
||||
else # Opened
|
||||
@fixbutton.css("left", @fixbutton_targetx)
|
||||
|
||||
$(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status
|
||||
|
||||
# Animate sidebar and iframe
|
||||
if @fixbutton_targetx == @fixbutton_initx
|
||||
# Closed
|
||||
targetx = 0
|
||||
@opened = false
|
||||
else
|
||||
# Opened
|
||||
targetx = @width
|
||||
if not @opened
|
||||
@onOpened()
|
||||
@opened = true
|
||||
|
||||
# Revent sidebar transitions
|
||||
@tag.css("transition", "0.4s ease-out")
|
||||
@tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, =>
|
||||
@tag.css("transition", "")
|
||||
if not @opened
|
||||
@container.remove()
|
||||
@container = null
|
||||
@tag.remove()
|
||||
@tag = null
|
||||
|
||||
# Revert body transformations
|
||||
@log "stopdrag", "opened:", @opened
|
||||
if not @opened
|
||||
@onClosed()
|
||||
|
||||
|
||||
onOpened: ->
|
||||
@log "Opened"
|
||||
@scrollable()
|
||||
|
||||
# Re-calculate height when site admin opened or closed
|
||||
@tag.find("#checkbox-owned").off("click").on "click", =>
|
||||
setTimeout (=>
|
||||
@scrollable()
|
||||
), 300
|
||||
|
||||
# Site limit button
|
||||
@tag.find("#button-sitelimit").on "click", =>
|
||||
wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), =>
|
||||
wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000
|
||||
@updateHtmlTag()
|
||||
return false
|
||||
|
||||
# Owned checkbox
|
||||
@tag.find("#checkbox-owned").on "click", =>
|
||||
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
|
||||
|
||||
# Owned checkbox
|
||||
@tag.find("#checkbox-autodownloadoptional").on "click", =>
|
||||
wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")]
|
||||
|
||||
# Change identity button
|
||||
@tag.find("#button-identity").on "click", =>
|
||||
wrapper.ws.cmd "certSelect"
|
||||
return false
|
||||
|
||||
# Owned checkbox
|
||||
@tag.find("#checkbox-owned").on "click", =>
|
||||
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
|
||||
|
||||
# Save settings
|
||||
@tag.find("#button-settings").on "click", =>
|
||||
wrapper.ws.cmd "fileGet", "content.json", (res) =>
|
||||
data = JSON.parse(res)
|
||||
data["title"] = $("#settings-title").val()
|
||||
data["description"] = $("#settings-description").val()
|
||||
json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t')))
|
||||
wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw)], (res) =>
|
||||
if res != "ok" # fileWrite failed
|
||||
wrapper.notifications.add "file-write", "error", "File write error: #{res}"
|
||||
else
|
||||
wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000
|
||||
@updateHtmlTag()
|
||||
return false
|
||||
|
||||
# Sign content.json
|
||||
@tag.find("#button-sign").on "click", =>
|
||||
inner_path = @tag.find("#select-contents").val()
|
||||
|
||||
if wrapper.site_info.privatekey
|
||||
# Privatekey stored in users.json
|
||||
wrapper.ws.cmd "siteSign", ["stored", inner_path], (res) =>
|
||||
wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000
|
||||
|
||||
else
|
||||
# Ask the user for privatekey
|
||||
wrapper.displayPrompt "Enter your private key:", "password", "Sign", (privatekey) => # Prompt the private key
|
||||
wrapper.ws.cmd "siteSign", [privatekey, inner_path], (res) =>
|
||||
if res == "ok"
|
||||
wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000
|
||||
|
||||
return false
|
||||
|
||||
# Publish content.json
|
||||
@tag.find("#button-publish").on "click", =>
|
||||
inner_path = @tag.find("#select-contents").val()
|
||||
@tag.find("#button-publish").addClass "loading"
|
||||
wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}, =>
|
||||
@tag.find("#button-publish").removeClass "loading"
|
||||
|
||||
@loadGlobe()
|
||||
|
||||
|
||||
onClosed: ->
|
||||
$(window).off "resize"
|
||||
$(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) =>
|
||||
if e.target == document.body
|
||||
$(document.body).css("height", "auto").css("perspective", "").css("transition", "").off transitionEnd
|
||||
@unloadGlobe()
|
||||
|
||||
# We dont need site info anymore
|
||||
wrapper.setSiteInfo = @original_set_site_info
|
||||
|
||||
|
||||
loadGlobe: =>
|
||||
if @tag.find(".globe").hasClass("loading")
|
||||
setTimeout (=>
|
||||
if typeof(DAT) == "undefined" # Globe script not loaded, do it first
|
||||
$.getScript("/uimedia/globe/all.js", @displayGlobe)
|
||||
else
|
||||
@displayGlobe()
|
||||
), 600
|
||||
|
||||
|
||||
displayGlobe: =>
|
||||
wrapper.ws.cmd "sidebarGetPeers", [], (globe_data) =>
|
||||
if @globe
|
||||
@globe.scene.remove(@globe.points)
|
||||
@globe.addData( globe_data, {format: 'magnitude', name: "hello", animated: false} )
|
||||
@globe.createPoints()
|
||||
else
|
||||
@globe = new DAT.Globe( @tag.find(".globe")[0], {"imgDir": "/uimedia/globe/"} )
|
||||
@globe.addData( globe_data, {format: 'magnitude', name: "hello"} )
|
||||
@globe.createPoints()
|
||||
@globe.animate()
|
||||
@tag.find(".globe").removeClass("loading")
|
||||
|
||||
|
||||
unloadGlobe: =>
|
||||
if not @globe
|
||||
return false
|
||||
@globe.unload()
|
||||
@globe = null
|
||||
|
||||
|
||||
window.sidebar = new Sidebar()
|
||||
window.transitionEnd = 'transitionend webkitTransitionEnd oTransitionEnd otransitionend'
|
|
@ -1,99 +0,0 @@
|
|||
.drag-bg { width: 100%; height: 100%; position: absolute; }
|
||||
.fixbutton.dragging { cursor: -webkit-grabbing; }
|
||||
.fixbutton-bg:active { cursor: -webkit-grabbing; }
|
||||
|
||||
|
||||
.body-sidebar { background-color: #666 !important; }
|
||||
#inner-iframe { transition: 0.3s ease-in-out; transform-origin: left; backface-visibility: hidden; outline: 1px solid transparent }
|
||||
.body-sidebar iframe { transform: rotateY(5deg); opacity: 0.8; pointer-events: none } /* translateX(-200px) scale(0.95)*/
|
||||
|
||||
/* SIDEBAR */
|
||||
|
||||
.sidebar-container { width: 100%; height: 100%; overflow: hidden; position: absolute; }
|
||||
.sidebar { background-color: #212121; position: absolute; right: -1200px; height: 100%; width: 1200px; } /*box-shadow: inset 0px 0px 10px #000*/
|
||||
.sidebar .content { margin: 30px; font-family: "Segoe UI Light", "Segoe UI", "Helvetica Neue"; color: white; width: 375px; height: 300px; font-weight: 200 }
|
||||
.sidebar h1, .sidebar h2 { font-weight: lighter; }
|
||||
.sidebar .button { margin: 0px; display: inline-block; }
|
||||
|
||||
|
||||
/* FIELDS */
|
||||
|
||||
.sidebar .fields { padding: 0px; list-style-type: none; width: 355px; }
|
||||
.sidebar .fields > li, .sidebar .fields .settings-owned > li { margin-bottom: 30px }
|
||||
.sidebar .fields > li:after, .sidebar .fields .settings-owned > li:after { clear: both; content: ''; display: block }
|
||||
.sidebar .fields label {
|
||||
font-family: Consolas, monospace; text-transform: uppercase; font-size: 13px; color: #ACACAC; display: inline-block; margin-bottom: 10px;
|
||||
vertical-align: text-bottom; margin-right: 10px;
|
||||
}
|
||||
.sidebar .fields label small { font-weight: normal; color: white; text-transform: none; }
|
||||
.sidebar .fields .text { background-color: black; border: 0px; padding: 10px; color: white; border-radius: 3px; width: 250px; font-family: Consolas, monospace; }
|
||||
.sidebar .fields .text.long { width: 330px; font-size: 72%; }
|
||||
.sidebar .fields .disabled { color: #AAA; background-color: #3B3B3B; }
|
||||
.sidebar .fields .text-num { width: 30px; text-align: right; padding-right: 30px; }
|
||||
.sidebar .fields .text-post { color: white; font-family: Consolas, monospace; display: inline-block; font-size: 13px; margin-left: -25px; width: 25px; }
|
||||
|
||||
/* Select */
|
||||
.sidebar .fields select {
|
||||
width: 225px; background-color: #3B3B3B; color: white; font-family: Consolas, monospace; appearance: none;
|
||||
padding: 5px; padding-right: 25px; border: 0px; border-radius: 3px; height: 35px; vertical-align: 1px; box-shadow: 0px 1px 2px rgba(0,0,0,0.5);
|
||||
}
|
||||
.sidebar .fields .select-down { margin-left: -39px; width: 34px; display: inline-block; transform: rotateZ(90deg); height: 35px; vertical-align: -8px; pointer-events: none; font-weight: bold }
|
||||
|
||||
/* Checkbox */
|
||||
.sidebar .fields .checkbox { width: 50px; height: 24px; position: relative; z-index: 999; opacity: 0; }
|
||||
.sidebar .fields .checkbox-skin { background-color: #CCC; width: 50px; height: 24px; border-radius: 15px; transition: all 0.3s ease-in-out; display: inline-block; margin-left: -59px; }
|
||||
.sidebar .fields .checkbox-skin:before {
|
||||
content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; border-radius: 100%; margin-top: 2px; margin-left: 2px;
|
||||
transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86);
|
||||
}
|
||||
.sidebar .fields .checkbox:checked ~ .checkbox-skin:before { margin-left: 27px; }
|
||||
.sidebar .fields .checkbox:checked ~ .checkbox-skin { background-color: #2ECC71; }
|
||||
|
||||
/* Fake input */
|
||||
.sidebar .input { font-size: 13px; width: 250px; display: inline-block; overflow: hidden; text-overflow: ellipsis; vertical-align: top }
|
||||
|
||||
/* GRAPH */
|
||||
|
||||
.graph { padding: 0px; list-style-type: none; width: 351px; background-color: black; height: 10px; border-radius: 8px; overflow: hidden; position: relative;}
|
||||
.graph li { height: 100%; position: absolute; transition: all 0.3s; }
|
||||
.graph-stacked li { position: static; float: left; }
|
||||
|
||||
.graph-legend { padding: 0px; list-style-type: none; margin-top: 13px; font-family: Consolas, "Andale Mono", monospace; font-size: 13px; text-transform: capitalize; }
|
||||
.sidebar .graph-legend li { margin: 0px; margin-top: 5px; margin-left: 0px; width: 160px; float: left; position: relative; }
|
||||
.sidebar .graph-legend li:nth-child(odd) { margin-right: 29px }
|
||||
.graph-legend span { position: absolute; }
|
||||
.graph-legend b { text-align: right; display: inline-block; width: 50px; float: right; font-weight: normal; }
|
||||
.graph-legend li:before { content: '\2022'; font-size: 23px; line-height: 0px; vertical-align: -3px; margin-right: 5px; }
|
||||
|
||||
/* COLORS */
|
||||
|
||||
.back-green { background-color: #2ECC71 }
|
||||
.color-green:before { color: #2ECC71 }
|
||||
.back-blue { background-color: #3BAFDA }
|
||||
.color-blue:before { color: #3BAFDA }
|
||||
.back-darkblue { background-color: #2196F3 }
|
||||
.color-darkblue:before { color: #2196F3 }
|
||||
.back-purple { background-color: #B10DC9 }
|
||||
.color-purple:before { color: #B10DC9 }
|
||||
.back-yellow { background-color: #FFDC00 }
|
||||
.color-yellow:before { color: #FFDC00 }
|
||||
.back-orange { background-color: #FF9800 }
|
||||
.color-orange:before { color: #FF9800 }
|
||||
.back-gray { background-color: #ECF0F1 }
|
||||
.color-gray:before { color: #ECF0F1 }
|
||||
.back-black { background-color: #34495E }
|
||||
.color-black:before { color: #34495E }
|
||||
.back-white { background-color: #EEE }
|
||||
.color-white:before { color: #EEE }
|
||||
|
||||
|
||||
/* Settings owned */
|
||||
|
||||
.owned-title { float: left }
|
||||
#checkbox-owned { margin-bottom: 25px; margin-top: 26px; margin-left: 11px; }
|
||||
#checkbox-owned ~ .settings-owned { opacity: 0; max-height: 0px; transition: all 0.3s linear; overflow: hidden }
|
||||
#checkbox-owned:checked ~ .settings-owned { opacity: 1; max-height: 400px }
|
||||
|
||||
/* Globe */
|
||||
.globe { width: 360px; height: 360px }
|
||||
.globe.loading { background: url(/uimedia/img/loading-circle.gif) center center no-repeat }
|
|
@ -1,153 +0,0 @@
|
|||
|
||||
|
||||
/* ---- plugins/Sidebar/media/Scrollbable.css ---- */
|
||||
|
||||
|
||||
.scrollable {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.scrollable.showScroll::after {
|
||||
position: absolute;
|
||||
content: '';
|
||||
top: 5%;
|
||||
right: 7px;
|
||||
height: 90%;
|
||||
width: 3px;
|
||||
background: rgba(224, 224, 255, .3);
|
||||
}
|
||||
|
||||
.scrollable .content-wrapper {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
padding-right: 50%;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
.scroller {
|
||||
margin-top: 5px;
|
||||
z-index: 5;
|
||||
cursor: pointer;
|
||||
position: absolute;
|
||||
width: 7px;
|
||||
-webkit-border-radius: 5px; -moz-border-radius: 5px; -o-border-radius: 5px; -ms-border-radius: 5px; border-radius: 5px ;
|
||||
background: #151515;
|
||||
top: 0px;
|
||||
left: 395px;
|
||||
-webkit-transition: top .08s;
|
||||
-moz-transition: top .08s;
|
||||
-ms-transition: top .08s;
|
||||
-o-transition: top .08s;
|
||||
-webkit-transition: top .08s; -moz-transition: top .08s; -o-transition: top .08s; -ms-transition: top .08s; transition: top .08s ;
|
||||
}
|
||||
.scroller {
|
||||
-webkit-touch-callout: none;
|
||||
-webkit-user-select: none;
|
||||
-khtml-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
|
||||
/* ---- plugins/Sidebar/media/Sidebar.css ---- */
|
||||
|
||||
|
||||
.drag-bg { width: 100%; height: 100%; position: absolute; }
|
||||
.fixbutton.dragging { cursor: -webkit-grabbing; }
|
||||
.fixbutton-bg:active { cursor: -webkit-grabbing; }
|
||||
|
||||
|
||||
.body-sidebar { background-color: #666 !important; }
|
||||
#inner-iframe { -webkit-transition: 0.3s ease-in-out; -moz-transition: 0.3s ease-in-out; -o-transition: 0.3s ease-in-out; -ms-transition: 0.3s ease-in-out; transition: 0.3s ease-in-out ; transform-origin: left; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; outline: 1px solid transparent }
|
||||
.body-sidebar iframe { -webkit-transform: rotateY(5deg); -moz-transform: rotateY(5deg); -o-transform: rotateY(5deg); -ms-transform: rotateY(5deg); transform: rotateY(5deg) ; opacity: 0.8; pointer-events: none } /* translateX(-200px) scale(0.95)*/
|
||||
|
||||
/* SIDEBAR */
|
||||
|
||||
.sidebar-container { width: 100%; height: 100%; overflow: hidden; position: absolute; }
|
||||
.sidebar { background-color: #212121; position: absolute; right: -1200px; height: 100%; width: 1200px; } /*box-shadow: inset 0px 0px 10px #000*/
|
||||
.sidebar .content { margin: 30px; font-family: "Segoe UI Light", "Segoe UI", "Helvetica Neue"; color: white; width: 375px; height: 300px; font-weight: 200 }
|
||||
.sidebar h1, .sidebar h2 { font-weight: lighter; }
|
||||
.sidebar .button { margin: 0px; display: inline-block; }
|
||||
|
||||
|
||||
/* FIELDS */
|
||||
|
||||
.sidebar .fields { padding: 0px; list-style-type: none; width: 355px; }
|
||||
.sidebar .fields > li, .sidebar .fields .settings-owned > li { margin-bottom: 30px }
|
||||
.sidebar .fields > li:after, .sidebar .fields .settings-owned > li:after { clear: both; content: ''; display: block }
|
||||
.sidebar .fields label {
|
||||
font-family: Consolas, monospace; text-transform: uppercase; font-size: 13px; color: #ACACAC; display: inline-block; margin-bottom: 10px;
|
||||
vertical-align: text-bottom; margin-right: 10px;
|
||||
}
|
||||
.sidebar .fields label small { font-weight: normal; color: white; text-transform: none; }
|
||||
.sidebar .fields .text { background-color: black; border: 0px; padding: 10px; color: white; -webkit-border-radius: 3px; -moz-border-radius: 3px; -o-border-radius: 3px; -ms-border-radius: 3px; border-radius: 3px ; width: 250px; font-family: Consolas, monospace; }
|
||||
.sidebar .fields .text.long { width: 330px; font-size: 72%; }
|
||||
.sidebar .fields .disabled { color: #AAA; background-color: #3B3B3B; }
|
||||
.sidebar .fields .text-num { width: 30px; text-align: right; padding-right: 30px; }
|
||||
.sidebar .fields .text-post { color: white; font-family: Consolas, monospace; display: inline-block; font-size: 13px; margin-left: -25px; width: 25px; }
|
||||
|
||||
/* Select */
|
||||
.sidebar .fields select {
|
||||
width: 225px; background-color: #3B3B3B; color: white; font-family: Consolas, monospace; -webkit-appearance: none; -moz-appearance: none; -o-appearance: none; -ms-appearance: none; appearance: none ;
|
||||
padding: 5px; padding-right: 25px; border: 0px; -webkit-border-radius: 3px; -moz-border-radius: 3px; -o-border-radius: 3px; -ms-border-radius: 3px; border-radius: 3px ; height: 35px; vertical-align: 1px; -webkit-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); -moz-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); -o-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); -ms-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); box-shadow: 0px 1px 2px rgba(0,0,0,0.5) ;
|
||||
}
|
||||
.sidebar .fields .select-down { margin-left: -39px; width: 34px; display: inline-block; -webkit-transform: rotateZ(90deg); -moz-transform: rotateZ(90deg); -o-transform: rotateZ(90deg); -ms-transform: rotateZ(90deg); transform: rotateZ(90deg) ; height: 35px; vertical-align: -8px; pointer-events: none; font-weight: bold }
|
||||
|
||||
/* Checkbox */
|
||||
.sidebar .fields .checkbox { width: 50px; height: 24px; position: relative; z-index: 999; opacity: 0; }
|
||||
.sidebar .fields .checkbox-skin { background-color: #CCC; width: 50px; height: 24px; -webkit-border-radius: 15px; -moz-border-radius: 15px; -o-border-radius: 15px; -ms-border-radius: 15px; border-radius: 15px ; -webkit-transition: all 0.3s ease-in-out; -moz-transition: all 0.3s ease-in-out; -o-transition: all 0.3s ease-in-out; -ms-transition: all 0.3s ease-in-out; transition: all 0.3s ease-in-out ; display: inline-block; margin-left: -59px; }
|
||||
.sidebar .fields .checkbox-skin:before {
|
||||
content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; -webkit-border-radius: 100%; -moz-border-radius: 100%; -o-border-radius: 100%; -ms-border-radius: 100%; border-radius: 100% ; margin-top: 2px; margin-left: 2px;
|
||||
-webkit-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -moz-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -o-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -ms-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86) ;
|
||||
}
|
||||
.sidebar .fields .checkbox:checked ~ .checkbox-skin:before { margin-left: 27px; }
|
||||
.sidebar .fields .checkbox:checked ~ .checkbox-skin { background-color: #2ECC71; }
|
||||
|
||||
/* Fake input */
|
||||
.sidebar .input { font-size: 13px; width: 250px; display: inline-block; overflow: hidden; text-overflow: ellipsis; vertical-align: top }
|
||||
|
||||
/* GRAPH */
|
||||
|
||||
.graph { padding: 0px; list-style-type: none; width: 351px; background-color: black; height: 10px; -webkit-border-radius: 8px; -moz-border-radius: 8px; -o-border-radius: 8px; -ms-border-radius: 8px; border-radius: 8px ; overflow: hidden; position: relative;}
|
||||
.graph li { height: 100%; position: absolute; -webkit-transition: all 0.3s; -moz-transition: all 0.3s; -o-transition: all 0.3s; -ms-transition: all 0.3s; transition: all 0.3s ; }
|
||||
.graph-stacked li { position: static; float: left; }
|
||||
|
||||
.graph-legend { padding: 0px; list-style-type: none; margin-top: 13px; font-family: Consolas, "Andale Mono", monospace; font-size: 13px; text-transform: capitalize; }
|
||||
.sidebar .graph-legend li { margin: 0px; margin-top: 5px; margin-left: 0px; width: 160px; float: left; position: relative; }
|
||||
.sidebar .graph-legend li:nth-child(odd) { margin-right: 29px }
|
||||
.graph-legend span { position: absolute; }
|
||||
.graph-legend b { text-align: right; display: inline-block; width: 50px; float: right; font-weight: normal; }
|
||||
.graph-legend li:before { content: '\2022'; font-size: 23px; line-height: 0px; vertical-align: -3px; margin-right: 5px; }
|
||||
|
||||
/* COLORS */
|
||||
|
||||
.back-green { background-color: #2ECC71 }
|
||||
.color-green:before { color: #2ECC71 }
|
||||
.back-blue { background-color: #3BAFDA }
|
||||
.color-blue:before { color: #3BAFDA }
|
||||
.back-darkblue { background-color: #2196F3 }
|
||||
.color-darkblue:before { color: #2196F3 }
|
||||
.back-purple { background-color: #B10DC9 }
|
||||
.color-purple:before { color: #B10DC9 }
|
||||
.back-yellow { background-color: #FFDC00 }
|
||||
.color-yellow:before { color: #FFDC00 }
|
||||
.back-orange { background-color: #FF9800 }
|
||||
.color-orange:before { color: #FF9800 }
|
||||
.back-gray { background-color: #ECF0F1 }
|
||||
.color-gray:before { color: #ECF0F1 }
|
||||
.back-black { background-color: #34495E }
|
||||
.color-black:before { color: #34495E }
|
||||
.back-white { background-color: #EEE }
|
||||
.color-white:before { color: #EEE }
|
||||
|
||||
|
||||
/* Settings owned */
|
||||
|
||||
.owned-title { float: left }
|
||||
#checkbox-owned { margin-bottom: 25px; margin-top: 26px; margin-left: 11px; }
|
||||
#checkbox-owned ~ .settings-owned { opacity: 0; max-height: 0px; -webkit-transition: all 0.3s linear; -moz-transition: all 0.3s linear; -o-transition: all 0.3s linear; -ms-transition: all 0.3s linear; transition: all 0.3s linear ; overflow: hidden }
|
||||
#checkbox-owned:checked ~ .settings-owned { opacity: 1; max-height: 400px }
|
||||
|
||||
/* Globe */
|
||||
.globe { width: 360px; height: 360px }
|
||||
.globe.loading { background: url(/uimedia/img/loading-circle.gif) center center no-repeat }
|
|
@ -1,893 +0,0 @@
|
|||
|
||||
|
||||
/* ---- plugins/Sidebar/media/Class.coffee ---- */
|
||||
|
||||
|
||||
(function() {
|
||||
var Class,
|
||||
__slice = [].slice;
|
||||
|
||||
Class = (function() {
|
||||
function Class() {}
|
||||
|
||||
Class.prototype.trace = true;
|
||||
|
||||
Class.prototype.log = function() {
|
||||
var args;
|
||||
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
|
||||
if (!this.trace) {
|
||||
return;
|
||||
}
|
||||
if (typeof console === 'undefined') {
|
||||
return;
|
||||
}
|
||||
args.unshift("[" + this.constructor.name + "]");
|
||||
console.log.apply(console, args);
|
||||
return this;
|
||||
};
|
||||
|
||||
Class.prototype.logStart = function() {
|
||||
var args, name;
|
||||
name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : [];
|
||||
if (!this.trace) {
|
||||
return;
|
||||
}
|
||||
this.logtimers || (this.logtimers = {});
|
||||
this.logtimers[name] = +(new Date);
|
||||
if (args.length > 0) {
|
||||
this.log.apply(this, ["" + name].concat(__slice.call(args), ["(started)"]));
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
Class.prototype.logEnd = function() {
|
||||
var args, ms, name;
|
||||
name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : [];
|
||||
ms = +(new Date) - this.logtimers[name];
|
||||
this.log.apply(this, ["" + name].concat(__slice.call(args), ["(Done in " + ms + "ms)"]));
|
||||
return this;
|
||||
};
|
||||
|
||||
return Class;
|
||||
|
||||
})();
|
||||
|
||||
window.Class = Class;
|
||||
|
||||
}).call(this);
|
||||
|
||||
|
||||
/* ---- plugins/Sidebar/media/Scrollable.js ---- */
|
||||
|
||||
|
||||
/* via http://jsfiddle.net/elGrecode/00dgurnn/ */
|
||||
|
||||
window.initScrollable = function () {
|
||||
|
||||
var scrollContainer = document.querySelector('.scrollable'),
|
||||
scrollContentWrapper = document.querySelector('.scrollable .content-wrapper'),
|
||||
scrollContent = document.querySelector('.scrollable .content'),
|
||||
contentPosition = 0,
|
||||
scrollerBeingDragged = false,
|
||||
scroller,
|
||||
topPosition,
|
||||
scrollerHeight;
|
||||
|
||||
function calculateScrollerHeight() {
|
||||
// *Calculation of how tall scroller should be
|
||||
var visibleRatio = scrollContainer.offsetHeight / scrollContentWrapper.scrollHeight;
|
||||
if (visibleRatio == 1)
|
||||
scroller.style.display = "none";
|
||||
else
|
||||
scroller.style.display = "block";
|
||||
return visibleRatio * scrollContainer.offsetHeight;
|
||||
}
|
||||
|
||||
function moveScroller(evt) {
|
||||
// Move Scroll bar to top offset
|
||||
var scrollPercentage = evt.target.scrollTop / scrollContentWrapper.scrollHeight;
|
||||
topPosition = scrollPercentage * (scrollContainer.offsetHeight - 5); // 5px arbitrary offset so scroll bar doesn't move too far beyond content wrapper bounding box
|
||||
scroller.style.top = topPosition + 'px';
|
||||
}
|
||||
|
||||
function startDrag(evt) {
|
||||
normalizedPosition = evt.pageY;
|
||||
contentPosition = scrollContentWrapper.scrollTop;
|
||||
scrollerBeingDragged = true;
|
||||
window.addEventListener('mousemove', scrollBarScroll);
|
||||
return false;
|
||||
}
|
||||
|
||||
function stopDrag(evt) {
|
||||
scrollerBeingDragged = false;
|
||||
window.removeEventListener('mousemove', scrollBarScroll);
|
||||
}
|
||||
|
||||
function scrollBarScroll(evt) {
|
||||
if (scrollerBeingDragged === true) {
|
||||
evt.preventDefault();
|
||||
var mouseDifferential = evt.pageY - normalizedPosition;
|
||||
var scrollEquivalent = mouseDifferential * (scrollContentWrapper.scrollHeight / scrollContainer.offsetHeight);
|
||||
scrollContentWrapper.scrollTop = contentPosition + scrollEquivalent;
|
||||
}
|
||||
}
|
||||
|
||||
function updateHeight() {
|
||||
scrollerHeight = calculateScrollerHeight() - 10;
|
||||
scroller.style.height = scrollerHeight + 'px';
|
||||
}
|
||||
|
||||
function createScroller() {
|
||||
// *Creates scroller element and appends to '.scrollable' div
|
||||
// create scroller element
|
||||
scroller = document.createElement("div");
|
||||
scroller.className = 'scroller';
|
||||
|
||||
// determine how big scroller should be based on content
|
||||
scrollerHeight = calculateScrollerHeight() - 10;
|
||||
|
||||
if (scrollerHeight / scrollContainer.offsetHeight < 1) {
|
||||
// *If there is a need to have scroll bar based on content size
|
||||
scroller.style.height = scrollerHeight + 'px';
|
||||
|
||||
// append scroller to scrollContainer div
|
||||
scrollContainer.appendChild(scroller);
|
||||
|
||||
// show scroll path divot
|
||||
scrollContainer.className += ' showScroll';
|
||||
|
||||
// attach related draggable listeners
|
||||
scroller.addEventListener('mousedown', startDrag);
|
||||
window.addEventListener('mouseup', stopDrag);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
createScroller();
|
||||
|
||||
|
||||
// *** Listeners ***
|
||||
scrollContentWrapper.addEventListener('scroll', moveScroller);
|
||||
|
||||
return updateHeight;
|
||||
};
|
||||
|
||||
|
||||
/* ---- plugins/Sidebar/media/Sidebar.coffee ---- */
|
||||
|
||||
|
||||
(function() {
|
||||
var Sidebar,
|
||||
__bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; },
|
||||
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
|
||||
__hasProp = {}.hasOwnProperty;
|
||||
|
||||
Sidebar = (function(_super) {
|
||||
__extends(Sidebar, _super);
|
||||
|
||||
function Sidebar() {
|
||||
this.unloadGlobe = __bind(this.unloadGlobe, this);
|
||||
this.displayGlobe = __bind(this.displayGlobe, this);
|
||||
this.loadGlobe = __bind(this.loadGlobe, this);
|
||||
this.animDrag = __bind(this.animDrag, this);
|
||||
this.waitMove = __bind(this.waitMove, this);
|
||||
this.tag = null;
|
||||
this.container = null;
|
||||
this.opened = false;
|
||||
this.width = 410;
|
||||
this.fixbutton = $(".fixbutton");
|
||||
this.fixbutton_addx = 0;
|
||||
this.fixbutton_initx = 0;
|
||||
this.fixbutton_targetx = 0;
|
||||
this.frame = $("#inner-iframe");
|
||||
this.initFixbutton();
|
||||
this.dragStarted = 0;
|
||||
this.globe = null;
|
||||
this.original_set_site_info = wrapper.setSiteInfo;
|
||||
if (false) {
|
||||
this.startDrag();
|
||||
this.moved();
|
||||
this.fixbutton_targetx = this.fixbutton_initx - this.width;
|
||||
this.stopDrag();
|
||||
}
|
||||
}
|
||||
|
||||
Sidebar.prototype.initFixbutton = function() {
|
||||
this.fixbutton.on("mousedown", (function(_this) {
|
||||
return function(e) {
|
||||
e.preventDefault();
|
||||
_this.fixbutton.off("click");
|
||||
_this.fixbutton.off("mousemove");
|
||||
_this.dragStarted = +(new Date);
|
||||
return _this.fixbutton.one("mousemove", function(e) {
|
||||
_this.fixbutton_addx = _this.fixbutton.offset().left - e.pageX;
|
||||
return _this.startDrag();
|
||||
});
|
||||
};
|
||||
})(this));
|
||||
this.fixbutton.parent().on("click", (function(_this) {
|
||||
return function(e) {
|
||||
return _this.stopDrag();
|
||||
};
|
||||
})(this));
|
||||
return this.fixbutton_initx = this.fixbutton.offset().left;
|
||||
};
|
||||
|
||||
Sidebar.prototype.startDrag = function() {
|
||||
this.log("startDrag");
|
||||
this.fixbutton_targetx = this.fixbutton_initx;
|
||||
this.fixbutton.addClass("dragging");
|
||||
$("<div class='drag-bg'></div>").appendTo(document.body);
|
||||
if (navigator.userAgent.indexOf('MSIE') !== -1 || navigator.appVersion.indexOf('Trident/') > 0) {
|
||||
this.fixbutton.css("pointer-events", "none");
|
||||
}
|
||||
this.fixbutton.one("click", (function(_this) {
|
||||
return function(e) {
|
||||
_this.stopDrag();
|
||||
_this.fixbutton.removeClass("dragging");
|
||||
if (Math.abs(_this.fixbutton.offset().left - _this.fixbutton_initx) > 5) {
|
||||
return e.preventDefault();
|
||||
}
|
||||
};
|
||||
})(this));
|
||||
this.fixbutton.parents().on("mousemove", this.animDrag);
|
||||
this.fixbutton.parents().on("mousemove", this.waitMove);
|
||||
return this.fixbutton.parents().on("mouseup", (function(_this) {
|
||||
return function(e) {
|
||||
e.preventDefault();
|
||||
return _this.stopDrag();
|
||||
};
|
||||
})(this));
|
||||
};
|
||||
|
||||
Sidebar.prototype.waitMove = function(e) {
|
||||
if (Math.abs(this.fixbutton.offset().left - this.fixbutton_targetx) > 10 && (+(new Date)) - this.dragStarted > 100) {
|
||||
this.moved();
|
||||
return this.fixbutton.parents().off("mousemove", this.waitMove);
|
||||
}
|
||||
};
|
||||
|
||||
Sidebar.prototype.moved = function() {
|
||||
this.log("Moved");
|
||||
this.createHtmltag();
|
||||
$(document.body).css("perspective", "1000px").addClass("body-sidebar");
|
||||
$(window).off("resize");
|
||||
$(window).on("resize", (function(_this) {
|
||||
return function() {
|
||||
$(document.body).css("height", $(window).height());
|
||||
return _this.scrollable();
|
||||
};
|
||||
})(this));
|
||||
$(window).trigger("resize");
|
||||
return wrapper.setSiteInfo = (function(_this) {
|
||||
return function(site_info) {
|
||||
_this.setSiteInfo(site_info);
|
||||
return _this.original_set_site_info.apply(wrapper, arguments);
|
||||
};
|
||||
})(this);
|
||||
};
|
||||
|
||||
Sidebar.prototype.setSiteInfo = function(site_info) {
|
||||
this.updateHtmlTag();
|
||||
return this.displayGlobe();
|
||||
};
|
||||
|
||||
Sidebar.prototype.createHtmltag = function() {
|
||||
if (!this.container) {
|
||||
this.container = $("<div class=\"sidebar-container\"><div class=\"sidebar scrollable\"><div class=\"content-wrapper\"><div class=\"content\">\n</div></div></div></div>");
|
||||
this.container.appendTo(document.body);
|
||||
this.tag = this.container.find(".sidebar");
|
||||
this.updateHtmlTag();
|
||||
return this.scrollable = window.initScrollable();
|
||||
}
|
||||
};
|
||||
|
||||
Sidebar.prototype.updateHtmlTag = function() {
|
||||
return wrapper.ws.cmd("sidebarGetHtmlTag", {}, (function(_this) {
|
||||
return function(res) {
|
||||
if (_this.tag.find(".content").children().length === 0) {
|
||||
_this.log("Creating content");
|
||||
morphdom(_this.tag.find(".content")[0], '<div class="content">' + res + '</div>');
|
||||
return _this.scrollable();
|
||||
} else {
|
||||
_this.log("Patching content");
|
||||
return morphdom(_this.tag.find(".content")[0], '<div class="content">' + res + '</div>', {
|
||||
onBeforeMorphEl: function(from_el, to_el) {
|
||||
if (from_el.className === "globe") {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
})(this));
|
||||
};
|
||||
|
||||
Sidebar.prototype.animDrag = function(e) {
|
||||
var mousex, overdrag, overdrag_percent, targetx;
|
||||
mousex = e.pageX;
|
||||
overdrag = this.fixbutton_initx - this.width - mousex;
|
||||
if (overdrag > 0) {
|
||||
overdrag_percent = 1 + overdrag / 300;
|
||||
mousex = (e.pageX + (this.fixbutton_initx - this.width) * overdrag_percent) / (1 + overdrag_percent);
|
||||
}
|
||||
targetx = this.fixbutton_initx - mousex - this.fixbutton_addx;
|
||||
this.fixbutton.offset({
|
||||
left: mousex + this.fixbutton_addx
|
||||
});
|
||||
if (this.tag) {
|
||||
this.tag.css("transform", "translateX(" + (0 - targetx) + "px)");
|
||||
}
|
||||
if ((!this.opened && targetx > this.width / 3) || (this.opened && targetx > this.width * 0.9)) {
|
||||
return this.fixbutton_targetx = this.fixbutton_initx - this.width;
|
||||
} else {
|
||||
return this.fixbutton_targetx = this.fixbutton_initx;
|
||||
}
|
||||
};
|
||||
|
||||
Sidebar.prototype.stopDrag = function() {
|
||||
var targetx;
|
||||
this.fixbutton.parents().off("mousemove");
|
||||
this.fixbutton.off("mousemove");
|
||||
this.fixbutton.css("pointer-events", "");
|
||||
$(".drag-bg").remove();
|
||||
if (!this.fixbutton.hasClass("dragging")) {
|
||||
return;
|
||||
}
|
||||
this.fixbutton.removeClass("dragging");
|
||||
if (this.fixbutton_targetx !== this.fixbutton.offset().left) {
|
||||
this.fixbutton.stop().animate({
|
||||
"left": this.fixbutton_targetx
|
||||
}, 500, "easeOutBack", (function(_this) {
|
||||
return function() {
|
||||
if (_this.fixbutton_targetx === _this.fixbutton_initx) {
|
||||
_this.fixbutton.css("left", "auto");
|
||||
} else {
|
||||
_this.fixbutton.css("left", _this.fixbutton_targetx);
|
||||
}
|
||||
return $(".fixbutton-bg").trigger("mouseout");
|
||||
};
|
||||
})(this));
|
||||
if (this.fixbutton_targetx === this.fixbutton_initx) {
|
||||
targetx = 0;
|
||||
this.opened = false;
|
||||
} else {
|
||||
targetx = this.width;
|
||||
if (!this.opened) {
|
||||
this.onOpened();
|
||||
}
|
||||
this.opened = true;
|
||||
}
|
||||
this.tag.css("transition", "0.4s ease-out");
|
||||
this.tag.css("transform", "translateX(-" + targetx + "px)").one(transitionEnd, (function(_this) {
|
||||
return function() {
|
||||
_this.tag.css("transition", "");
|
||||
if (!_this.opened) {
|
||||
_this.container.remove();
|
||||
_this.container = null;
|
||||
_this.tag.remove();
|
||||
return _this.tag = null;
|
||||
}
|
||||
};
|
||||
})(this));
|
||||
this.log("stopdrag", "opened:", this.opened);
|
||||
if (!this.opened) {
|
||||
return this.onClosed();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Sidebar.prototype.onOpened = function() {
|
||||
this.log("Opened");
|
||||
this.scrollable();
|
||||
this.tag.find("#checkbox-owned").off("click").on("click", (function(_this) {
|
||||
return function() {
|
||||
return setTimeout((function() {
|
||||
return _this.scrollable();
|
||||
}), 300);
|
||||
};
|
||||
})(this));
|
||||
this.tag.find("#button-sitelimit").on("click", (function(_this) {
|
||||
return function() {
|
||||
wrapper.ws.cmd("siteSetLimit", $("#input-sitelimit").val(), function() {
|
||||
wrapper.notifications.add("done-sitelimit", "done", "Site storage limit modified!", 5000);
|
||||
return _this.updateHtmlTag();
|
||||
});
|
||||
return false;
|
||||
};
|
||||
})(this));
|
||||
this.tag.find("#checkbox-owned").on("click", (function(_this) {
|
||||
return function() {
|
||||
return wrapper.ws.cmd("siteSetOwned", [_this.tag.find("#checkbox-owned").is(":checked")]);
|
||||
};
|
||||
})(this));
|
||||
this.tag.find("#checkbox-autodownloadoptional").on("click", (function(_this) {
|
||||
return function() {
|
||||
return wrapper.ws.cmd("siteSetAutodownloadoptional", [_this.tag.find("#checkbox-autodownloadoptional").is(":checked")]);
|
||||
};
|
||||
})(this));
|
||||
this.tag.find("#button-identity").on("click", (function(_this) {
|
||||
return function() {
|
||||
wrapper.ws.cmd("certSelect");
|
||||
return false;
|
||||
};
|
||||
})(this));
|
||||
this.tag.find("#checkbox-owned").on("click", (function(_this) {
|
||||
return function() {
|
||||
return wrapper.ws.cmd("siteSetOwned", [_this.tag.find("#checkbox-owned").is(":checked")]);
|
||||
};
|
||||
})(this));
|
||||
this.tag.find("#button-settings").on("click", (function(_this) {
|
||||
return function() {
|
||||
wrapper.ws.cmd("fileGet", "content.json", function(res) {
|
||||
var data, json_raw;
|
||||
data = JSON.parse(res);
|
||||
data["title"] = $("#settings-title").val();
|
||||
data["description"] = $("#settings-description").val();
|
||||
json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t')));
|
||||
return wrapper.ws.cmd("fileWrite", ["content.json", btoa(json_raw)], function(res) {
|
||||
if (res !== "ok") {
|
||||
return wrapper.notifications.add("file-write", "error", "File write error: " + res);
|
||||
} else {
|
||||
wrapper.notifications.add("file-write", "done", "Site settings saved!", 5000);
|
||||
return _this.updateHtmlTag();
|
||||
}
|
||||
});
|
||||
});
|
||||
return false;
|
||||
};
|
||||
})(this));
|
||||
this.tag.find("#button-sign").on("click", (function(_this) {
|
||||
return function() {
|
||||
var inner_path;
|
||||
inner_path = _this.tag.find("#select-contents").val();
|
||||
if (wrapper.site_info.privatekey) {
|
||||
wrapper.ws.cmd("siteSign", ["stored", inner_path], function(res) {
|
||||
return wrapper.notifications.add("sign", "done", inner_path + " Signed!", 5000);
|
||||
});
|
||||
} else {
|
||||
wrapper.displayPrompt("Enter your private key:", "password", "Sign", function(privatekey) {
|
||||
return wrapper.ws.cmd("siteSign", [privatekey, inner_path], function(res) {
|
||||
if (res === "ok") {
|
||||
return wrapper.notifications.add("sign", "done", inner_path + " Signed!", 5000);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
return false;
|
||||
};
|
||||
})(this));
|
||||
this.tag.find("#button-publish").on("click", (function(_this) {
|
||||
return function() {
|
||||
var inner_path;
|
||||
inner_path = _this.tag.find("#select-contents").val();
|
||||
_this.tag.find("#button-publish").addClass("loading");
|
||||
return wrapper.ws.cmd("sitePublish", {
|
||||
"inner_path": inner_path,
|
||||
"sign": false
|
||||
}, function() {
|
||||
return _this.tag.find("#button-publish").removeClass("loading");
|
||||
});
|
||||
};
|
||||
})(this));
|
||||
return this.loadGlobe();
|
||||
};
|
||||
|
||||
Sidebar.prototype.onClosed = function() {
|
||||
$(window).off("resize");
|
||||
$(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on(transitionEnd, (function(_this) {
|
||||
return function(e) {
|
||||
if (e.target === document.body) {
|
||||
$(document.body).css("height", "auto").css("perspective", "").css("transition", "").off(transitionEnd);
|
||||
return _this.unloadGlobe();
|
||||
}
|
||||
};
|
||||
})(this));
|
||||
return wrapper.setSiteInfo = this.original_set_site_info;
|
||||
};
|
||||
|
||||
Sidebar.prototype.loadGlobe = function() {
|
||||
if (this.tag.find(".globe").hasClass("loading")) {
|
||||
return setTimeout(((function(_this) {
|
||||
return function() {
|
||||
if (typeof DAT === "undefined") {
|
||||
return $.getScript("/uimedia/globe/all.js", _this.displayGlobe);
|
||||
} else {
|
||||
return _this.displayGlobe();
|
||||
}
|
||||
};
|
||||
})(this)), 600);
|
||||
}
|
||||
};
|
||||
|
||||
Sidebar.prototype.displayGlobe = function() {
|
||||
return wrapper.ws.cmd("sidebarGetPeers", [], (function(_this) {
|
||||
return function(globe_data) {
|
||||
if (_this.globe) {
|
||||
_this.globe.scene.remove(_this.globe.points);
|
||||
_this.globe.addData(globe_data, {
|
||||
format: 'magnitude',
|
||||
name: "hello",
|
||||
animated: false
|
||||
});
|
||||
_this.globe.createPoints();
|
||||
} else {
|
||||
_this.globe = new DAT.Globe(_this.tag.find(".globe")[0], {
|
||||
"imgDir": "/uimedia/globe/"
|
||||
});
|
||||
_this.globe.addData(globe_data, {
|
||||
format: 'magnitude',
|
||||
name: "hello"
|
||||
});
|
||||
_this.globe.createPoints();
|
||||
_this.globe.animate();
|
||||
}
|
||||
return _this.tag.find(".globe").removeClass("loading");
|
||||
};
|
||||
})(this));
|
||||
};
|
||||
|
||||
Sidebar.prototype.unloadGlobe = function() {
|
||||
if (!this.globe) {
|
||||
return false;
|
||||
}
|
||||
this.globe.unload();
|
||||
return this.globe = null;
|
||||
};
|
||||
|
||||
return Sidebar;
|
||||
|
||||
})(Class);
|
||||
|
||||
window.sidebar = new Sidebar();
|
||||
|
||||
window.transitionEnd = 'transitionend webkitTransitionEnd oTransitionEnd otransitionend';
|
||||
|
||||
}).call(this);
|
||||
|
||||
|
||||
/* ---- plugins/Sidebar/media/morphdom.js ---- */
|
||||
|
||||
|
||||
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.morphdom = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
|
||||
var specialElHandlers = {
|
||||
/**
|
||||
* Needed for IE. Apparently IE doesn't think
|
||||
* that "selected" is an attribute when reading
|
||||
* over the attributes using selectEl.attributes
|
||||
*/
|
||||
OPTION: function(fromEl, toEl) {
|
||||
if ((fromEl.selected = toEl.selected)) {
|
||||
fromEl.setAttribute('selected', '');
|
||||
} else {
|
||||
fromEl.removeAttribute('selected', '');
|
||||
}
|
||||
},
|
||||
/**
|
||||
* The "value" attribute is special for the <input> element
|
||||
* since it sets the initial value. Changing the "value"
|
||||
* attribute without changing the "value" property will have
|
||||
* no effect since it is only used to the set the initial value.
|
||||
* Similar for the "checked" attribute.
|
||||
*/
|
||||
/*INPUT: function(fromEl, toEl) {
|
||||
fromEl.checked = toEl.checked;
|
||||
fromEl.value = toEl.value;
|
||||
|
||||
if (!toEl.hasAttribute('checked')) {
|
||||
fromEl.removeAttribute('checked');
|
||||
}
|
||||
|
||||
if (!toEl.hasAttribute('value')) {
|
||||
fromEl.removeAttribute('value');
|
||||
}
|
||||
}*/
|
||||
};
|
||||
|
||||
function noop() {}
|
||||
|
||||
/**
|
||||
* Loop over all of the attributes on the target node and make sure the
|
||||
* original DOM node has the same attributes. If an attribute
|
||||
* found on the original node is not on the new node then remove it from
|
||||
* the original node
|
||||
* @param {HTMLElement} fromNode
|
||||
* @param {HTMLElement} toNode
|
||||
*/
|
||||
function morphAttrs(fromNode, toNode) {
|
||||
var attrs = toNode.attributes;
|
||||
var i;
|
||||
var attr;
|
||||
var attrName;
|
||||
var attrValue;
|
||||
var foundAttrs = {};
|
||||
|
||||
for (i=attrs.length-1; i>=0; i--) {
|
||||
attr = attrs[i];
|
||||
if (attr.specified !== false) {
|
||||
attrName = attr.name;
|
||||
attrValue = attr.value;
|
||||
foundAttrs[attrName] = true;
|
||||
|
||||
if (fromNode.getAttribute(attrName) !== attrValue) {
|
||||
fromNode.setAttribute(attrName, attrValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete any extra attributes found on the original DOM element that weren't
|
||||
// found on the target element.
|
||||
attrs = fromNode.attributes;
|
||||
|
||||
for (i=attrs.length-1; i>=0; i--) {
|
||||
attr = attrs[i];
|
||||
if (attr.specified !== false) {
|
||||
attrName = attr.name;
|
||||
if (!foundAttrs.hasOwnProperty(attrName)) {
|
||||
fromNode.removeAttribute(attrName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies the children of one DOM element to another DOM element
|
||||
*/
|
||||
function moveChildren(from, to) {
|
||||
var curChild = from.firstChild;
|
||||
while(curChild) {
|
||||
var nextChild = curChild.nextSibling;
|
||||
to.appendChild(curChild);
|
||||
curChild = nextChild;
|
||||
}
|
||||
return to;
|
||||
}
|
||||
|
||||
function morphdom(fromNode, toNode, options) {
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
|
||||
if (typeof toNode === 'string') {
|
||||
var newBodyEl = document.createElement('body');
|
||||
newBodyEl.innerHTML = toNode;
|
||||
toNode = newBodyEl.childNodes[0];
|
||||
}
|
||||
|
||||
var savedEls = {}; // Used to save off DOM elements with IDs
|
||||
var unmatchedEls = {};
|
||||
var onNodeDiscarded = options.onNodeDiscarded || noop;
|
||||
var onBeforeMorphEl = options.onBeforeMorphEl || noop;
|
||||
var onBeforeMorphElChildren = options.onBeforeMorphElChildren || noop;
|
||||
|
||||
function removeNodeHelper(node, nestedInSavedEl) {
|
||||
var id = node.id;
|
||||
// If the node has an ID then save it off since we will want
|
||||
// to reuse it in case the target DOM tree has a DOM element
|
||||
// with the same ID
|
||||
if (id) {
|
||||
savedEls[id] = node;
|
||||
} else if (!nestedInSavedEl) {
|
||||
// If we are not nested in a saved element then we know that this node has been
|
||||
// completely discarded and will not exist in the final DOM.
|
||||
onNodeDiscarded(node);
|
||||
}
|
||||
|
||||
if (node.nodeType === 1) {
|
||||
var curChild = node.firstChild;
|
||||
while(curChild) {
|
||||
removeNodeHelper(curChild, nestedInSavedEl || id);
|
||||
curChild = curChild.nextSibling;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function walkDiscardedChildNodes(node) {
|
||||
if (node.nodeType === 1) {
|
||||
var curChild = node.firstChild;
|
||||
while(curChild) {
|
||||
|
||||
|
||||
if (!curChild.id) {
|
||||
// We only want to handle nodes that don't have an ID to avoid double
|
||||
// walking the same saved element.
|
||||
|
||||
onNodeDiscarded(curChild);
|
||||
|
||||
// Walk recursively
|
||||
walkDiscardedChildNodes(curChild);
|
||||
}
|
||||
|
||||
curChild = curChild.nextSibling;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function removeNode(node, parentNode, alreadyVisited) {
|
||||
parentNode.removeChild(node);
|
||||
|
||||
if (alreadyVisited) {
|
||||
if (!node.id) {
|
||||
onNodeDiscarded(node);
|
||||
walkDiscardedChildNodes(node);
|
||||
}
|
||||
} else {
|
||||
removeNodeHelper(node);
|
||||
}
|
||||
}
|
||||
|
||||
function morphEl(fromNode, toNode, alreadyVisited) {
|
||||
if (toNode.id) {
|
||||
// If an element with an ID is being morphed then it is will be in the final
|
||||
// DOM so clear it out of the saved elements collection
|
||||
delete savedEls[toNode.id];
|
||||
}
|
||||
|
||||
if (onBeforeMorphEl(fromNode, toNode) === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
morphAttrs(fromNode, toNode);
|
||||
|
||||
if (onBeforeMorphElChildren(fromNode, toNode) === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
var curToNodeChild = toNode.firstChild;
|
||||
var curFromNodeChild = fromNode.firstChild;
|
||||
var curToNodeId;
|
||||
|
||||
var fromNextSibling;
|
||||
var toNextSibling;
|
||||
var savedEl;
|
||||
var unmatchedEl;
|
||||
|
||||
outer: while(curToNodeChild) {
|
||||
toNextSibling = curToNodeChild.nextSibling;
|
||||
curToNodeId = curToNodeChild.id;
|
||||
|
||||
while(curFromNodeChild) {
|
||||
var curFromNodeId = curFromNodeChild.id;
|
||||
fromNextSibling = curFromNodeChild.nextSibling;
|
||||
|
||||
if (!alreadyVisited) {
|
||||
if (curFromNodeId && (unmatchedEl = unmatchedEls[curFromNodeId])) {
|
||||
unmatchedEl.parentNode.replaceChild(curFromNodeChild, unmatchedEl);
|
||||
morphEl(curFromNodeChild, unmatchedEl, alreadyVisited);
|
||||
curFromNodeChild = fromNextSibling;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
var curFromNodeType = curFromNodeChild.nodeType;
|
||||
|
||||
if (curFromNodeType === curToNodeChild.nodeType) {
|
||||
var isCompatible = false;
|
||||
|
||||
if (curFromNodeType === 1) { // Both nodes being compared are Element nodes
|
||||
if (curFromNodeChild.tagName === curToNodeChild.tagName) {
|
||||
// We have compatible DOM elements
|
||||
if (curFromNodeId || curToNodeId) {
|
||||
// If either DOM element has an ID then we handle
|
||||
// those differently since we want to match up
|
||||
// by ID
|
||||
if (curToNodeId === curFromNodeId) {
|
||||
isCompatible = true;
|
||||
}
|
||||
} else {
|
||||
isCompatible = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (isCompatible) {
|
||||
// We found compatible DOM elements so add a
|
||||
// task to morph the compatible DOM elements
|
||||
morphEl(curFromNodeChild, curToNodeChild, alreadyVisited);
|
||||
}
|
||||
} else if (curFromNodeType === 3) { // Both nodes being compared are Text nodes
|
||||
isCompatible = true;
|
||||
curFromNodeChild.nodeValue = curToNodeChild.nodeValue;
|
||||
}
|
||||
|
||||
if (isCompatible) {
|
||||
curToNodeChild = toNextSibling;
|
||||
curFromNodeChild = fromNextSibling;
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
|
||||
// No compatible match so remove the old node from the DOM
|
||||
removeNode(curFromNodeChild, fromNode, alreadyVisited);
|
||||
|
||||
curFromNodeChild = fromNextSibling;
|
||||
}
|
||||
|
||||
if (curToNodeId) {
|
||||
if ((savedEl = savedEls[curToNodeId])) {
|
||||
morphEl(savedEl, curToNodeChild, true);
|
||||
curToNodeChild = savedEl; // We want to append the saved element instead
|
||||
} else {
|
||||
// The current DOM element in the target tree has an ID
|
||||
// but we did not find a match in any of the corresponding
|
||||
// siblings. We just put the target element in the old DOM tree
|
||||
// but if we later find an element in the old DOM tree that has
|
||||
// a matching ID then we will replace the target element
|
||||
// with the corresponding old element and morph the old element
|
||||
unmatchedEls[curToNodeId] = curToNodeChild;
|
||||
}
|
||||
}
|
||||
|
||||
// If we got this far then we did not find a candidate match for our "to node"
|
||||
// and we exhausted all of the children "from" nodes. Therefore, we will just
|
||||
// append the current "to node" to the end
|
||||
fromNode.appendChild(curToNodeChild);
|
||||
|
||||
curToNodeChild = toNextSibling;
|
||||
curFromNodeChild = fromNextSibling;
|
||||
}
|
||||
|
||||
// We have processed all of the "to nodes". If curFromNodeChild is non-null then
|
||||
// we still have some from nodes left over that need to be removed
|
||||
while(curFromNodeChild) {
|
||||
fromNextSibling = curFromNodeChild.nextSibling;
|
||||
removeNode(curFromNodeChild, fromNode, alreadyVisited);
|
||||
curFromNodeChild = fromNextSibling;
|
||||
}
|
||||
|
||||
var specialElHandler = specialElHandlers[fromNode.tagName];
|
||||
if (specialElHandler) {
|
||||
specialElHandler(fromNode, toNode);
|
||||
}
|
||||
}
|
||||
|
||||
var morphedNode = fromNode;
|
||||
var morphedNodeType = morphedNode.nodeType;
|
||||
var toNodeType = toNode.nodeType;
|
||||
|
||||
// Handle the case where we are given two DOM nodes that are not
|
||||
// compatible (e.g. <div> --> <span> or <div> --> TEXT)
|
||||
if (morphedNodeType === 1) {
|
||||
if (toNodeType === 1) {
|
||||
if (morphedNode.tagName !== toNode.tagName) {
|
||||
onNodeDiscarded(fromNode);
|
||||
morphedNode = moveChildren(morphedNode, document.createElement(toNode.tagName));
|
||||
}
|
||||
} else {
|
||||
// Going from an element node to a text node
|
||||
return toNode;
|
||||
}
|
||||
} else if (morphedNodeType === 3) { // Text node
|
||||
if (toNodeType === 3) {
|
||||
morphedNode.nodeValue = toNode.nodeValue;
|
||||
return morphedNode;
|
||||
} else {
|
||||
onNodeDiscarded(fromNode);
|
||||
// Text node to something else
|
||||
return toNode;
|
||||
}
|
||||
}
|
||||
|
||||
morphEl(morphedNode, toNode, false);
|
||||
|
||||
// Fire the "onNodeDiscarded" event for any saved elements
|
||||
// that never found a new home in the morphed DOM
|
||||
for (var savedElId in savedEls) {
|
||||
if (savedEls.hasOwnProperty(savedElId)) {
|
||||
var savedEl = savedEls[savedElId];
|
||||
onNodeDiscarded(savedEl);
|
||||
walkDiscardedChildNodes(savedEl);
|
||||
}
|
||||
}
|
||||
|
||||
if (morphedNode !== fromNode && fromNode.parentNode) {
|
||||
fromNode.parentNode.replaceChild(morphedNode, fromNode);
|
||||
}
|
||||
|
||||
return morphedNode;
|
||||
}
|
||||
|
||||
module.exports = morphdom;
|
||||
},{}]},{},[1])(1)
|
||||
});
|
|
@ -1,340 +0,0 @@
|
|||
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.morphdom = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
|
||||
var specialElHandlers = {
|
||||
/**
|
||||
* Needed for IE. Apparently IE doesn't think
|
||||
* that "selected" is an attribute when reading
|
||||
* over the attributes using selectEl.attributes
|
||||
*/
|
||||
OPTION: function(fromEl, toEl) {
|
||||
if ((fromEl.selected = toEl.selected)) {
|
||||
fromEl.setAttribute('selected', '');
|
||||
} else {
|
||||
fromEl.removeAttribute('selected', '');
|
||||
}
|
||||
},
|
||||
/**
|
||||
* The "value" attribute is special for the <input> element
|
||||
* since it sets the initial value. Changing the "value"
|
||||
* attribute without changing the "value" property will have
|
||||
* no effect since it is only used to the set the initial value.
|
||||
* Similar for the "checked" attribute.
|
||||
*/
|
||||
/*INPUT: function(fromEl, toEl) {
|
||||
fromEl.checked = toEl.checked;
|
||||
fromEl.value = toEl.value;
|
||||
|
||||
if (!toEl.hasAttribute('checked')) {
|
||||
fromEl.removeAttribute('checked');
|
||||
}
|
||||
|
||||
if (!toEl.hasAttribute('value')) {
|
||||
fromEl.removeAttribute('value');
|
||||
}
|
||||
}*/
|
||||
};
|
||||
|
||||
function noop() {}
|
||||
|
||||
/**
|
||||
* Loop over all of the attributes on the target node and make sure the
|
||||
* original DOM node has the same attributes. If an attribute
|
||||
* found on the original node is not on the new node then remove it from
|
||||
* the original node
|
||||
* @param {HTMLElement} fromNode
|
||||
* @param {HTMLElement} toNode
|
||||
*/
|
||||
function morphAttrs(fromNode, toNode) {
|
||||
var attrs = toNode.attributes;
|
||||
var i;
|
||||
var attr;
|
||||
var attrName;
|
||||
var attrValue;
|
||||
var foundAttrs = {};
|
||||
|
||||
for (i=attrs.length-1; i>=0; i--) {
|
||||
attr = attrs[i];
|
||||
if (attr.specified !== false) {
|
||||
attrName = attr.name;
|
||||
attrValue = attr.value;
|
||||
foundAttrs[attrName] = true;
|
||||
|
||||
if (fromNode.getAttribute(attrName) !== attrValue) {
|
||||
fromNode.setAttribute(attrName, attrValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete any extra attributes found on the original DOM element that weren't
|
||||
// found on the target element.
|
||||
attrs = fromNode.attributes;
|
||||
|
||||
for (i=attrs.length-1; i>=0; i--) {
|
||||
attr = attrs[i];
|
||||
if (attr.specified !== false) {
|
||||
attrName = attr.name;
|
||||
if (!foundAttrs.hasOwnProperty(attrName)) {
|
||||
fromNode.removeAttribute(attrName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies the children of one DOM element to another DOM element
|
||||
*/
|
||||
function moveChildren(from, to) {
|
||||
var curChild = from.firstChild;
|
||||
while(curChild) {
|
||||
var nextChild = curChild.nextSibling;
|
||||
to.appendChild(curChild);
|
||||
curChild = nextChild;
|
||||
}
|
||||
return to;
|
||||
}
|
||||
|
||||
function morphdom(fromNode, toNode, options) {
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
|
||||
if (typeof toNode === 'string') {
|
||||
var newBodyEl = document.createElement('body');
|
||||
newBodyEl.innerHTML = toNode;
|
||||
toNode = newBodyEl.childNodes[0];
|
||||
}
|
||||
|
||||
var savedEls = {}; // Used to save off DOM elements with IDs
|
||||
var unmatchedEls = {};
|
||||
var onNodeDiscarded = options.onNodeDiscarded || noop;
|
||||
var onBeforeMorphEl = options.onBeforeMorphEl || noop;
|
||||
var onBeforeMorphElChildren = options.onBeforeMorphElChildren || noop;
|
||||
|
||||
function removeNodeHelper(node, nestedInSavedEl) {
|
||||
var id = node.id;
|
||||
// If the node has an ID then save it off since we will want
|
||||
// to reuse it in case the target DOM tree has a DOM element
|
||||
// with the same ID
|
||||
if (id) {
|
||||
savedEls[id] = node;
|
||||
} else if (!nestedInSavedEl) {
|
||||
// If we are not nested in a saved element then we know that this node has been
|
||||
// completely discarded and will not exist in the final DOM.
|
||||
onNodeDiscarded(node);
|
||||
}
|
||||
|
||||
if (node.nodeType === 1) {
|
||||
var curChild = node.firstChild;
|
||||
while(curChild) {
|
||||
removeNodeHelper(curChild, nestedInSavedEl || id);
|
||||
curChild = curChild.nextSibling;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function walkDiscardedChildNodes(node) {
|
||||
if (node.nodeType === 1) {
|
||||
var curChild = node.firstChild;
|
||||
while(curChild) {
|
||||
|
||||
|
||||
if (!curChild.id) {
|
||||
// We only want to handle nodes that don't have an ID to avoid double
|
||||
// walking the same saved element.
|
||||
|
||||
onNodeDiscarded(curChild);
|
||||
|
||||
// Walk recursively
|
||||
walkDiscardedChildNodes(curChild);
|
||||
}
|
||||
|
||||
curChild = curChild.nextSibling;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function removeNode(node, parentNode, alreadyVisited) {
|
||||
parentNode.removeChild(node);
|
||||
|
||||
if (alreadyVisited) {
|
||||
if (!node.id) {
|
||||
onNodeDiscarded(node);
|
||||
walkDiscardedChildNodes(node);
|
||||
}
|
||||
} else {
|
||||
removeNodeHelper(node);
|
||||
}
|
||||
}
|
||||
|
||||
function morphEl(fromNode, toNode, alreadyVisited) {
|
||||
if (toNode.id) {
|
||||
// If an element with an ID is being morphed then it is will be in the final
|
||||
// DOM so clear it out of the saved elements collection
|
||||
delete savedEls[toNode.id];
|
||||
}
|
||||
|
||||
if (onBeforeMorphEl(fromNode, toNode) === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
morphAttrs(fromNode, toNode);
|
||||
|
||||
if (onBeforeMorphElChildren(fromNode, toNode) === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
var curToNodeChild = toNode.firstChild;
|
||||
var curFromNodeChild = fromNode.firstChild;
|
||||
var curToNodeId;
|
||||
|
||||
var fromNextSibling;
|
||||
var toNextSibling;
|
||||
var savedEl;
|
||||
var unmatchedEl;
|
||||
|
||||
outer: while(curToNodeChild) {
|
||||
toNextSibling = curToNodeChild.nextSibling;
|
||||
curToNodeId = curToNodeChild.id;
|
||||
|
||||
while(curFromNodeChild) {
|
||||
var curFromNodeId = curFromNodeChild.id;
|
||||
fromNextSibling = curFromNodeChild.nextSibling;
|
||||
|
||||
if (!alreadyVisited) {
|
||||
if (curFromNodeId && (unmatchedEl = unmatchedEls[curFromNodeId])) {
|
||||
unmatchedEl.parentNode.replaceChild(curFromNodeChild, unmatchedEl);
|
||||
morphEl(curFromNodeChild, unmatchedEl, alreadyVisited);
|
||||
curFromNodeChild = fromNextSibling;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
var curFromNodeType = curFromNodeChild.nodeType;
|
||||
|
||||
if (curFromNodeType === curToNodeChild.nodeType) {
|
||||
var isCompatible = false;
|
||||
|
||||
if (curFromNodeType === 1) { // Both nodes being compared are Element nodes
|
||||
if (curFromNodeChild.tagName === curToNodeChild.tagName) {
|
||||
// We have compatible DOM elements
|
||||
if (curFromNodeId || curToNodeId) {
|
||||
// If either DOM element has an ID then we handle
|
||||
// those differently since we want to match up
|
||||
// by ID
|
||||
if (curToNodeId === curFromNodeId) {
|
||||
isCompatible = true;
|
||||
}
|
||||
} else {
|
||||
isCompatible = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (isCompatible) {
|
||||
// We found compatible DOM elements so add a
|
||||
// task to morph the compatible DOM elements
|
||||
morphEl(curFromNodeChild, curToNodeChild, alreadyVisited);
|
||||
}
|
||||
} else if (curFromNodeType === 3) { // Both nodes being compared are Text nodes
|
||||
isCompatible = true;
|
||||
curFromNodeChild.nodeValue = curToNodeChild.nodeValue;
|
||||
}
|
||||
|
||||
if (isCompatible) {
|
||||
curToNodeChild = toNextSibling;
|
||||
curFromNodeChild = fromNextSibling;
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
|
||||
// No compatible match so remove the old node from the DOM
|
||||
removeNode(curFromNodeChild, fromNode, alreadyVisited);
|
||||
|
||||
curFromNodeChild = fromNextSibling;
|
||||
}
|
||||
|
||||
if (curToNodeId) {
|
||||
if ((savedEl = savedEls[curToNodeId])) {
|
||||
morphEl(savedEl, curToNodeChild, true);
|
||||
curToNodeChild = savedEl; // We want to append the saved element instead
|
||||
} else {
|
||||
// The current DOM element in the target tree has an ID
|
||||
// but we did not find a match in any of the corresponding
|
||||
// siblings. We just put the target element in the old DOM tree
|
||||
// but if we later find an element in the old DOM tree that has
|
||||
// a matching ID then we will replace the target element
|
||||
// with the corresponding old element and morph the old element
|
||||
unmatchedEls[curToNodeId] = curToNodeChild;
|
||||
}
|
||||
}
|
||||
|
||||
// If we got this far then we did not find a candidate match for our "to node"
|
||||
// and we exhausted all of the children "from" nodes. Therefore, we will just
|
||||
// append the current "to node" to the end
|
||||
fromNode.appendChild(curToNodeChild);
|
||||
|
||||
curToNodeChild = toNextSibling;
|
||||
curFromNodeChild = fromNextSibling;
|
||||
}
|
||||
|
||||
// We have processed all of the "to nodes". If curFromNodeChild is non-null then
|
||||
// we still have some from nodes left over that need to be removed
|
||||
while(curFromNodeChild) {
|
||||
fromNextSibling = curFromNodeChild.nextSibling;
|
||||
removeNode(curFromNodeChild, fromNode, alreadyVisited);
|
||||
curFromNodeChild = fromNextSibling;
|
||||
}
|
||||
|
||||
var specialElHandler = specialElHandlers[fromNode.tagName];
|
||||
if (specialElHandler) {
|
||||
specialElHandler(fromNode, toNode);
|
||||
}
|
||||
}
|
||||
|
||||
var morphedNode = fromNode;
|
||||
var morphedNodeType = morphedNode.nodeType;
|
||||
var toNodeType = toNode.nodeType;
|
||||
|
||||
// Handle the case where we are given two DOM nodes that are not
|
||||
// compatible (e.g. <div> --> <span> or <div> --> TEXT)
|
||||
if (morphedNodeType === 1) {
|
||||
if (toNodeType === 1) {
|
||||
if (morphedNode.tagName !== toNode.tagName) {
|
||||
onNodeDiscarded(fromNode);
|
||||
morphedNode = moveChildren(morphedNode, document.createElement(toNode.tagName));
|
||||
}
|
||||
} else {
|
||||
// Going from an element node to a text node
|
||||
return toNode;
|
||||
}
|
||||
} else if (morphedNodeType === 3) { // Text node
|
||||
if (toNodeType === 3) {
|
||||
morphedNode.nodeValue = toNode.nodeValue;
|
||||
return morphedNode;
|
||||
} else {
|
||||
onNodeDiscarded(fromNode);
|
||||
// Text node to something else
|
||||
return toNode;
|
||||
}
|
||||
}
|
||||
|
||||
morphEl(morphedNode, toNode, false);
|
||||
|
||||
// Fire the "onNodeDiscarded" event for any saved elements
|
||||
// that never found a new home in the morphed DOM
|
||||
for (var savedElId in savedEls) {
|
||||
if (savedEls.hasOwnProperty(savedElId)) {
|
||||
var savedEl = savedEls[savedElId];
|
||||
onNodeDiscarded(savedEl);
|
||||
walkDiscardedChildNodes(savedEl);
|
||||
}
|
||||
}
|
||||
|
||||
if (morphedNode !== fromNode && fromNode.parentNode) {
|
||||
fromNode.parentNode.replaceChild(morphedNode, fromNode);
|
||||
}
|
||||
|
||||
return morphedNode;
|
||||
}
|
||||
|
||||
module.exports = morphdom;
|
||||
},{}]},{},[1])(1)
|
||||
});
|
File diff suppressed because one or more lines are too long
|
@ -1 +0,0 @@
|
|||
import StatsPlugin
|
|
@ -1,138 +0,0 @@
|
|||
import time
|
||||
import os
|
||||
import sys
|
||||
import atexit
|
||||
|
||||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
|
||||
allow_reload = False # No source reload supported in this plugin
|
||||
|
||||
|
||||
@PluginManager.registerTo("Actions")
|
||||
class ActionsPlugin(object):
|
||||
|
||||
def main(self):
|
||||
global notificationicon, winfolders
|
||||
from lib import notificationicon, winfolders
|
||||
import gevent.threadpool
|
||||
|
||||
self.main = sys.modules["main"]
|
||||
|
||||
fs_encoding = sys.getfilesystemencoding()
|
||||
|
||||
icon = notificationicon.NotificationIcon(
|
||||
os.path.join(os.path.dirname(os.path.abspath(__file__).decode(fs_encoding)), 'trayicon.ico'),
|
||||
"ZeroNet %s" % config.version
|
||||
)
|
||||
self.icon = icon
|
||||
|
||||
if not config.debug: # Hide console if not in debug mode
|
||||
notificationicon.hideConsole()
|
||||
self.console = False
|
||||
else:
|
||||
self.console = True
|
||||
|
||||
@atexit.register
|
||||
def hideIcon():
|
||||
icon.die()
|
||||
|
||||
ui_ip = config.ui_ip if config.ui_ip != "*" else "127.0.0.1"
|
||||
|
||||
icon.items = (
|
||||
(self.titleIp, False),
|
||||
(self.titleConnections, False),
|
||||
(self.titleTransfer, False),
|
||||
(self.titleConsole, self.toggleConsole),
|
||||
(self.titleAutorun, self.toggleAutorun),
|
||||
"--",
|
||||
("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet")),
|
||||
("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/")),
|
||||
("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet")),
|
||||
("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues")),
|
||||
"--",
|
||||
("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (ui_ip, config.ui_port))),
|
||||
"--",
|
||||
("Quit", self.quit),
|
||||
|
||||
)
|
||||
|
||||
icon.clicked = lambda: self.opensite("http://%s:%s" % (ui_ip, config.ui_port))
|
||||
gevent.threadpool.start_new_thread(icon._run, ()) # Start in real thread (not gevent compatible)
|
||||
super(ActionsPlugin, self).main()
|
||||
icon._die = True
|
||||
|
||||
def quit(self):
|
||||
self.icon.die()
|
||||
time.sleep(0.1)
|
||||
sys.exit()
|
||||
# self.main.ui_server.stop()
|
||||
# self.main.file_server.stop()
|
||||
|
||||
def opensite(self, url):
|
||||
import webbrowser
|
||||
webbrowser.open(url, new=0)
|
||||
|
||||
def titleIp(self):
|
||||
title = "!IP: %s" % config.ip_external
|
||||
if self.main.file_server.port_opened:
|
||||
title += " (active)"
|
||||
else:
|
||||
title += " (passive)"
|
||||
return title
|
||||
|
||||
def titleConnections(self):
|
||||
title = "Connections: %s" % len(self.main.file_server.connections)
|
||||
return title
|
||||
|
||||
def titleTransfer(self):
|
||||
title = "Received: %.2f MB | Sent: %.2f MB" % (
|
||||
float(self.main.file_server.bytes_recv) / 1024 / 1024,
|
||||
float(self.main.file_server.bytes_sent) / 1024 / 1024
|
||||
)
|
||||
return title
|
||||
|
||||
def titleConsole(self):
|
||||
if self.console:
|
||||
return "+Show console window"
|
||||
else:
|
||||
return "Show console window"
|
||||
|
||||
def toggleConsole(self):
|
||||
if self.console:
|
||||
notificationicon.hideConsole()
|
||||
self.console = False
|
||||
else:
|
||||
notificationicon.showConsole()
|
||||
self.console = True
|
||||
|
||||
def getAutorunPath(self):
|
||||
return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP)
|
||||
|
||||
def formatAutorun(self):
|
||||
args = sys.argv[:]
|
||||
args.insert(0, sys.executable)
|
||||
if sys.platform == 'win32':
|
||||
args = ['"%s"' % arg for arg in args]
|
||||
cmd = " ".join(args)
|
||||
|
||||
# Dont open browser on autorun
|
||||
cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "").strip()
|
||||
|
||||
return "@echo off\ncd /D %s\n%s" % (os.getcwd(), cmd)
|
||||
|
||||
def isAutorunEnabled(self):
|
||||
path = self.getAutorunPath()
|
||||
return os.path.isfile(path) and open(path).read() == self.formatAutorun()
|
||||
|
||||
def titleAutorun(self):
|
||||
if self.isAutorunEnabled():
|
||||
return "+Start ZeroNet when Windows starts"
|
||||
else:
|
||||
return "Start ZeroNet when Windows starts"
|
||||
|
||||
def toggleAutorun(self):
|
||||
if self.isAutorunEnabled():
|
||||
os.unlink(self.getAutorunPath())
|
||||
else:
|
||||
open(self.getAutorunPath(), "w").write(self.formatAutorun())
|
|
@ -1,4 +0,0 @@
|
|||
import sys
|
||||
|
||||
if sys.platform == 'win32':
|
||||
import TrayiconPlugin
|
|
@ -1,806 +0,0 @@
|
|||
# Pure ctypes windows taskbar notification icon
|
||||
# via https://gist.github.com/jasonbot/5759510
|
||||
# Modified for ZeroNet
|
||||
|
||||
import ctypes
|
||||
import ctypes.wintypes
|
||||
import os
|
||||
#import threading
|
||||
#import Queue
|
||||
import uuid
|
||||
import time
|
||||
import gevent
|
||||
|
||||
__all__ = ['NotificationIcon']
|
||||
|
||||
# Create popup menu
|
||||
|
||||
CreatePopupMenu = ctypes.windll.user32.CreatePopupMenu
|
||||
CreatePopupMenu.restype = ctypes.wintypes.HMENU
|
||||
CreatePopupMenu.argtypes = []
|
||||
|
||||
MF_BYCOMMAND = 0x0
|
||||
MF_BYPOSITION = 0x400
|
||||
|
||||
MF_BITMAP = 0x4
|
||||
MF_CHECKED = 0x8
|
||||
MF_DISABLED = 0x2
|
||||
MF_ENABLED = 0x0
|
||||
MF_GRAYED = 0x1
|
||||
MF_MENUBARBREAK = 0x20
|
||||
MF_MENUBREAK = 0x40
|
||||
MF_OWNERDRAW = 0x100
|
||||
MF_POPUP = 0x10
|
||||
MF_SEPARATOR = 0x800
|
||||
MF_STRING = 0x0
|
||||
MF_UNCHECKED = 0x0
|
||||
|
||||
InsertMenu = ctypes.windll.user32.InsertMenuW
|
||||
InsertMenu.restype = ctypes.wintypes.BOOL
|
||||
InsertMenu.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.wintypes.UINT, ctypes.wintypes.UINT, ctypes.wintypes.LPCWSTR]
|
||||
|
||||
AppendMenu = ctypes.windll.user32.AppendMenuW
|
||||
AppendMenu.restype = ctypes.wintypes.BOOL
|
||||
AppendMenu.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.wintypes.UINT, ctypes.wintypes.LPCWSTR]
|
||||
|
||||
SetMenuDefaultItem = ctypes.windll.user32.SetMenuDefaultItem
|
||||
SetMenuDefaultItem.restype = ctypes.wintypes.BOOL
|
||||
SetMenuDefaultItem.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.wintypes.UINT]
|
||||
|
||||
#class MENUITEMINFO(ctypes.Structure):
|
||||
# UINT cbSize;
|
||||
# UINT fMask;
|
||||
# UINT fType;
|
||||
# UINT fState;
|
||||
# UINT wID;
|
||||
# HMENU hSubMenu;
|
||||
# HBITMAP hbmpChecked;
|
||||
# HBITMAP hbmpUnchecked;
|
||||
# ULONG_PTR dwItemData;
|
||||
# LPTSTR dwTypeData;
|
||||
# UINT cch;
|
||||
# HBITMAP hbmpItem;
|
||||
#
|
||||
#BOOL WINAPI InsertMenuItem(
|
||||
# __in HMENU hMenu,
|
||||
# __in UINT uItem,
|
||||
# __in BOOL fByPosition,
|
||||
# __in LPCMENUITEMINFO lpmii
|
||||
#);
|
||||
#
|
||||
|
||||
class POINT(ctypes.Structure):
|
||||
_fields_ = [ ('x', ctypes.wintypes.LONG),
|
||||
('y', ctypes.wintypes.LONG)]
|
||||
|
||||
GetCursorPos = ctypes.windll.user32.GetCursorPos
|
||||
GetCursorPos.argtypes = [ctypes.POINTER(POINT)]
|
||||
|
||||
SetForegroundWindow = ctypes.windll.user32.SetForegroundWindow
|
||||
SetForegroundWindow.argtypes = [ctypes.wintypes.HWND]
|
||||
|
||||
TPM_LEFTALIGN = 0x0
|
||||
TPM_CENTERALIGN = 0x4
|
||||
TPM_RIGHTALIGN = 0x8
|
||||
|
||||
TPM_TOPALIGN = 0x0
|
||||
TPM_VCENTERALIGN = 0x10
|
||||
TPM_BOTTOMALIGN = 0x20
|
||||
|
||||
TPM_NONOTIFY = 0x80
|
||||
TPM_RETURNCMD = 0x100
|
||||
|
||||
TPM_LEFTBUTTON = 0x0
|
||||
TPM_RIGHTBUTTON = 0x2
|
||||
|
||||
TPM_HORNEGANIMATION = 0x800
|
||||
TPM_HORPOSANIMATION = 0x400
|
||||
TPM_NOANIMATION = 0x4000
|
||||
TPM_VERNEGANIMATION = 0x2000
|
||||
TPM_VERPOSANIMATION = 0x1000
|
||||
|
||||
TrackPopupMenu = ctypes.windll.user32.TrackPopupMenu
|
||||
TrackPopupMenu.restype = ctypes.wintypes.BOOL
|
||||
TrackPopupMenu.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.wintypes.HWND, ctypes.c_void_p]
|
||||
|
||||
PostMessage = ctypes.windll.user32.PostMessageW
|
||||
PostMessage.restype = ctypes.wintypes.BOOL
|
||||
PostMessage.argtypes = [ctypes.wintypes.HWND, ctypes.wintypes.UINT, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM]
|
||||
|
||||
DestroyMenu = ctypes.windll.user32.DestroyMenu
|
||||
DestroyMenu.restype = ctypes.wintypes.BOOL
|
||||
DestroyMenu.argtypes = [ctypes.wintypes.HMENU]
|
||||
|
||||
# Create notification icon
|
||||
|
||||
GUID = ctypes.c_ubyte * 16
|
||||
|
||||
class TimeoutVersionUnion(ctypes.Union):
|
||||
_fields_ = [('uTimeout', ctypes.wintypes.UINT),
|
||||
('uVersion', ctypes.wintypes.UINT),]
|
||||
|
||||
NIS_HIDDEN = 0x1
|
||||
NIS_SHAREDICON = 0x2
|
||||
|
||||
class NOTIFYICONDATA(ctypes.Structure):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(NOTIFYICONDATA, self).__init__(*args, **kwargs)
|
||||
self.cbSize = ctypes.sizeof(self)
|
||||
_fields_ = [
|
||||
('cbSize', ctypes.wintypes.DWORD),
|
||||
('hWnd', ctypes.wintypes.HWND),
|
||||
('uID', ctypes.wintypes.UINT),
|
||||
('uFlags', ctypes.wintypes.UINT),
|
||||
('uCallbackMessage', ctypes.wintypes.UINT),
|
||||
('hIcon', ctypes.wintypes.HICON),
|
||||
('szTip', ctypes.wintypes.WCHAR * 64),
|
||||
('dwState', ctypes.wintypes.DWORD),
|
||||
('dwStateMask', ctypes.wintypes.DWORD),
|
||||
('szInfo', ctypes.wintypes.WCHAR * 256),
|
||||
('union', TimeoutVersionUnion),
|
||||
('szInfoTitle', ctypes.wintypes.WCHAR * 64),
|
||||
('dwInfoFlags', ctypes.wintypes.DWORD),
|
||||
('guidItem', GUID),
|
||||
('hBalloonIcon', ctypes.wintypes.HICON),
|
||||
]
|
||||
|
||||
NIM_ADD = 0
|
||||
NIM_MODIFY = 1
|
||||
NIM_DELETE = 2
|
||||
NIM_SETFOCUS = 3
|
||||
NIM_SETVERSION = 4
|
||||
|
||||
NIF_MESSAGE = 1
|
||||
NIF_ICON = 2
|
||||
NIF_TIP = 4
|
||||
NIF_STATE = 8
|
||||
NIF_INFO = 16
|
||||
NIF_GUID = 32
|
||||
NIF_REALTIME = 64
|
||||
NIF_SHOWTIP = 128
|
||||
|
||||
NIIF_NONE = 0
|
||||
NIIF_INFO = 1
|
||||
NIIF_WARNING = 2
|
||||
NIIF_ERROR = 3
|
||||
NIIF_USER = 4
|
||||
|
||||
NOTIFYICON_VERSION = 3
|
||||
NOTIFYICON_VERSION_4 = 4
|
||||
|
||||
Shell_NotifyIcon = ctypes.windll.shell32.Shell_NotifyIconW
|
||||
Shell_NotifyIcon.restype = ctypes.wintypes.BOOL
|
||||
Shell_NotifyIcon.argtypes = [ctypes.wintypes.DWORD, ctypes.POINTER(NOTIFYICONDATA)]
|
||||
|
||||
# Load icon/image
|
||||
|
||||
IMAGE_BITMAP = 0
|
||||
IMAGE_ICON = 1
|
||||
IMAGE_CURSOR = 2
|
||||
|
||||
LR_CREATEDIBSECTION = 0x00002000
|
||||
LR_DEFAULTCOLOR = 0x00000000
|
||||
LR_DEFAULTSIZE = 0x00000040
|
||||
LR_LOADFROMFILE = 0x00000010
|
||||
LR_LOADMAP3DCOLORS = 0x00001000
|
||||
LR_LOADTRANSPARENT = 0x00000020
|
||||
LR_MONOCHROME = 0x00000001
|
||||
LR_SHARED = 0x00008000
|
||||
LR_VGACOLOR = 0x00000080
|
||||
|
||||
OIC_SAMPLE = 32512
|
||||
OIC_HAND = 32513
|
||||
OIC_QUES = 32514
|
||||
OIC_BANG = 32515
|
||||
OIC_NOTE = 32516
|
||||
OIC_WINLOGO = 32517
|
||||
OIC_WARNING = OIC_BANG
|
||||
OIC_ERROR = OIC_HAND
|
||||
OIC_INFORMATION = OIC_NOTE
|
||||
|
||||
LoadImage = ctypes.windll.user32.LoadImageW
|
||||
LoadImage.restype = ctypes.wintypes.HANDLE
|
||||
LoadImage.argtypes = [ctypes.wintypes.HINSTANCE, ctypes.wintypes.LPCWSTR, ctypes.wintypes.UINT, ctypes.c_int, ctypes.c_int, ctypes.wintypes.UINT]
|
||||
|
||||
# CreateWindow call
|
||||
|
||||
WNDPROC = ctypes.WINFUNCTYPE(ctypes.c_int, ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM)
|
||||
DefWindowProc = ctypes.windll.user32.DefWindowProcW
|
||||
DefWindowProc.restype = ctypes.c_int
|
||||
DefWindowProc.argtypes = [ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM]
|
||||
|
||||
WS_OVERLAPPED = 0x00000000L
|
||||
WS_POPUP = 0x80000000L
|
||||
WS_CHILD = 0x40000000L
|
||||
WS_MINIMIZE = 0x20000000L
|
||||
WS_VISIBLE = 0x10000000L
|
||||
WS_DISABLED = 0x08000000L
|
||||
WS_CLIPSIBLINGS = 0x04000000L
|
||||
WS_CLIPCHILDREN = 0x02000000L
|
||||
WS_MAXIMIZE = 0x01000000L
|
||||
WS_CAPTION = 0x00C00000L
|
||||
WS_BORDER = 0x00800000L
|
||||
WS_DLGFRAME = 0x00400000L
|
||||
WS_VSCROLL = 0x00200000L
|
||||
WS_HSCROLL = 0x00100000L
|
||||
WS_SYSMENU = 0x00080000L
|
||||
WS_THICKFRAME = 0x00040000L
|
||||
WS_GROUP = 0x00020000L
|
||||
WS_TABSTOP = 0x00010000L
|
||||
|
||||
WS_MINIMIZEBOX = 0x00020000L
|
||||
WS_MAXIMIZEBOX = 0x00010000L
|
||||
|
||||
WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED |
|
||||
WS_CAPTION |
|
||||
WS_SYSMENU |
|
||||
WS_THICKFRAME |
|
||||
WS_MINIMIZEBOX |
|
||||
WS_MAXIMIZEBOX)
|
||||
|
||||
SM_XVIRTUALSCREEN = 76
|
||||
SM_YVIRTUALSCREEN = 77
|
||||
SM_CXVIRTUALSCREEN = 78
|
||||
SM_CYVIRTUALSCREEN = 79
|
||||
SM_CMONITORS = 80
|
||||
SM_SAMEDISPLAYFORMAT = 81
|
||||
|
||||
WM_NULL = 0x0000
|
||||
WM_CREATE = 0x0001
|
||||
WM_DESTROY = 0x0002
|
||||
WM_MOVE = 0x0003
|
||||
WM_SIZE = 0x0005
|
||||
WM_ACTIVATE = 0x0006
|
||||
WM_SETFOCUS = 0x0007
|
||||
WM_KILLFOCUS = 0x0008
|
||||
WM_ENABLE = 0x000A
|
||||
WM_SETREDRAW = 0x000B
|
||||
WM_SETTEXT = 0x000C
|
||||
WM_GETTEXT = 0x000D
|
||||
WM_GETTEXTLENGTH = 0x000E
|
||||
WM_PAINT = 0x000F
|
||||
WM_CLOSE = 0x0010
|
||||
WM_QUERYENDSESSION = 0x0011
|
||||
WM_QUIT = 0x0012
|
||||
WM_QUERYOPEN = 0x0013
|
||||
WM_ERASEBKGND = 0x0014
|
||||
WM_SYSCOLORCHANGE = 0x0015
|
||||
WM_ENDSESSION = 0x0016
|
||||
WM_SHOWWINDOW = 0x0018
|
||||
WM_CTLCOLOR = 0x0019
|
||||
WM_WININICHANGE = 0x001A
|
||||
WM_SETTINGCHANGE = 0x001A
|
||||
WM_DEVMODECHANGE = 0x001B
|
||||
WM_ACTIVATEAPP = 0x001C
|
||||
WM_FONTCHANGE = 0x001D
|
||||
WM_TIMECHANGE = 0x001E
|
||||
WM_CANCELMODE = 0x001F
|
||||
WM_SETCURSOR = 0x0020
|
||||
WM_MOUSEACTIVATE = 0x0021
|
||||
WM_CHILDACTIVATE = 0x0022
|
||||
WM_QUEUESYNC = 0x0023
|
||||
WM_GETMINMAXINFO = 0x0024
|
||||
WM_PAINTICON = 0x0026
|
||||
WM_ICONERASEBKGND = 0x0027
|
||||
WM_NEXTDLGCTL = 0x0028
|
||||
WM_SPOOLERSTATUS = 0x002A
|
||||
WM_DRAWITEM = 0x002B
|
||||
WM_MEASUREITEM = 0x002C
|
||||
WM_DELETEITEM = 0x002D
|
||||
WM_VKEYTOITEM = 0x002E
|
||||
WM_CHARTOITEM = 0x002F
|
||||
WM_SETFONT = 0x0030
|
||||
WM_GETFONT = 0x0031
|
||||
WM_SETHOTKEY = 0x0032
|
||||
WM_GETHOTKEY = 0x0033
|
||||
WM_QUERYDRAGICON = 0x0037
|
||||
WM_COMPAREITEM = 0x0039
|
||||
WM_GETOBJECT = 0x003D
|
||||
WM_COMPACTING = 0x0041
|
||||
WM_COMMNOTIFY = 0x0044
|
||||
WM_WINDOWPOSCHANGING = 0x0046
|
||||
WM_WINDOWPOSCHANGED = 0x0047
|
||||
WM_POWER = 0x0048
|
||||
WM_COPYDATA = 0x004A
|
||||
WM_CANCELJOURNAL = 0x004B
|
||||
WM_NOTIFY = 0x004E
|
||||
WM_INPUTLANGCHANGEREQUEST = 0x0050
|
||||
WM_INPUTLANGCHANGE = 0x0051
|
||||
WM_TCARD = 0x0052
|
||||
WM_HELP = 0x0053
|
||||
WM_USERCHANGED = 0x0054
|
||||
WM_NOTIFYFORMAT = 0x0055
|
||||
WM_CONTEXTMENU = 0x007B
|
||||
WM_STYLECHANGING = 0x007C
|
||||
WM_STYLECHANGED = 0x007D
|
||||
WM_DISPLAYCHANGE = 0x007E
|
||||
WM_GETICON = 0x007F
|
||||
WM_SETICON = 0x0080
|
||||
WM_NCCREATE = 0x0081
|
||||
WM_NCDESTROY = 0x0082
|
||||
WM_NCCALCSIZE = 0x0083
|
||||
WM_NCHITTEST = 0x0084
|
||||
WM_NCPAINT = 0x0085
|
||||
WM_NCACTIVATE = 0x0086
|
||||
WM_GETDLGCODE = 0x0087
|
||||
WM_SYNCPAINT = 0x0088
|
||||
WM_NCMOUSEMOVE = 0x00A0
|
||||
WM_NCLBUTTONDOWN = 0x00A1
|
||||
WM_NCLBUTTONUP = 0x00A2
|
||||
WM_NCLBUTTONDBLCLK = 0x00A3
|
||||
WM_NCRBUTTONDOWN = 0x00A4
|
||||
WM_NCRBUTTONUP = 0x00A5
|
||||
WM_NCRBUTTONDBLCLK = 0x00A6
|
||||
WM_NCMBUTTONDOWN = 0x00A7
|
||||
WM_NCMBUTTONUP = 0x00A8
|
||||
WM_NCMBUTTONDBLCLK = 0x00A9
|
||||
WM_KEYDOWN = 0x0100
|
||||
WM_KEYUP = 0x0101
|
||||
WM_CHAR = 0x0102
|
||||
WM_DEADCHAR = 0x0103
|
||||
WM_SYSKEYDOWN = 0x0104
|
||||
WM_SYSKEYUP = 0x0105
|
||||
WM_SYSCHAR = 0x0106
|
||||
WM_SYSDEADCHAR = 0x0107
|
||||
WM_KEYLAST = 0x0108
|
||||
WM_IME_STARTCOMPOSITION = 0x010D
|
||||
WM_IME_ENDCOMPOSITION = 0x010E
|
||||
WM_IME_COMPOSITION = 0x010F
|
||||
WM_IME_KEYLAST = 0x010F
|
||||
WM_INITDIALOG = 0x0110
|
||||
WM_COMMAND = 0x0111
|
||||
WM_SYSCOMMAND = 0x0112
|
||||
WM_TIMER = 0x0113
|
||||
WM_HSCROLL = 0x0114
|
||||
WM_VSCROLL = 0x0115
|
||||
WM_INITMENU = 0x0116
|
||||
WM_INITMENUPOPUP = 0x0117
|
||||
WM_MENUSELECT = 0x011F
|
||||
WM_MENUCHAR = 0x0120
|
||||
WM_ENTERIDLE = 0x0121
|
||||
WM_MENURBUTTONUP = 0x0122
|
||||
WM_MENUDRAG = 0x0123
|
||||
WM_MENUGETOBJECT = 0x0124
|
||||
WM_UNINITMENUPOPUP = 0x0125
|
||||
WM_MENUCOMMAND = 0x0126
|
||||
WM_CTLCOLORMSGBOX = 0x0132
|
||||
WM_CTLCOLOREDIT = 0x0133
|
||||
WM_CTLCOLORLISTBOX = 0x0134
|
||||
WM_CTLCOLORBTN = 0x0135
|
||||
WM_CTLCOLORDLG = 0x0136
|
||||
WM_CTLCOLORSCROLLBAR = 0x0137
|
||||
WM_CTLCOLORSTATIC = 0x0138
|
||||
WM_MOUSEMOVE = 0x0200
|
||||
WM_LBUTTONDOWN = 0x0201
|
||||
WM_LBUTTONUP = 0x0202
|
||||
WM_LBUTTONDBLCLK = 0x0203
|
||||
WM_RBUTTONDOWN = 0x0204
|
||||
WM_RBUTTONUP = 0x0205
|
||||
WM_RBUTTONDBLCLK = 0x0206
|
||||
WM_MBUTTONDOWN = 0x0207
|
||||
WM_MBUTTONUP = 0x0208
|
||||
WM_MBUTTONDBLCLK = 0x0209
|
||||
WM_MOUSEWHEEL = 0x020A
|
||||
WM_PARENTNOTIFY = 0x0210
|
||||
WM_ENTERMENULOOP = 0x0211
|
||||
WM_EXITMENULOOP = 0x0212
|
||||
WM_NEXTMENU = 0x0213
|
||||
WM_SIZING = 0x0214
|
||||
WM_CAPTURECHANGED = 0x0215
|
||||
WM_MOVING = 0x0216
|
||||
WM_DEVICECHANGE = 0x0219
|
||||
WM_MDICREATE = 0x0220
|
||||
WM_MDIDESTROY = 0x0221
|
||||
WM_MDIACTIVATE = 0x0222
|
||||
WM_MDIRESTORE = 0x0223
|
||||
WM_MDINEXT = 0x0224
|
||||
WM_MDIMAXIMIZE = 0x0225
|
||||
WM_MDITILE = 0x0226
|
||||
WM_MDICASCADE = 0x0227
|
||||
WM_MDIICONARRANGE = 0x0228
|
||||
WM_MDIGETACTIVE = 0x0229
|
||||
WM_MDISETMENU = 0x0230
|
||||
WM_ENTERSIZEMOVE = 0x0231
|
||||
WM_EXITSIZEMOVE = 0x0232
|
||||
WM_DROPFILES = 0x0233
|
||||
WM_MDIREFRESHMENU = 0x0234
|
||||
WM_IME_SETCONTEXT = 0x0281
|
||||
WM_IME_NOTIFY = 0x0282
|
||||
WM_IME_CONTROL = 0x0283
|
||||
WM_IME_COMPOSITIONFULL = 0x0284
|
||||
WM_IME_SELECT = 0x0285
|
||||
WM_IME_CHAR = 0x0286
|
||||
WM_IME_REQUEST = 0x0288
|
||||
WM_IME_KEYDOWN = 0x0290
|
||||
WM_IME_KEYUP = 0x0291
|
||||
WM_MOUSEHOVER = 0x02A1
|
||||
WM_MOUSELEAVE = 0x02A3
|
||||
WM_CUT = 0x0300
|
||||
WM_COPY = 0x0301
|
||||
WM_PASTE = 0x0302
|
||||
WM_CLEAR = 0x0303
|
||||
WM_UNDO = 0x0304
|
||||
WM_RENDERFORMAT = 0x0305
|
||||
WM_RENDERALLFORMATS = 0x0306
|
||||
WM_DESTROYCLIPBOARD = 0x0307
|
||||
WM_DRAWCLIPBOARD = 0x0308
|
||||
WM_PAINTCLIPBOARD = 0x0309
|
||||
WM_VSCROLLCLIPBOARD = 0x030A
|
||||
WM_SIZECLIPBOARD = 0x030B
|
||||
WM_ASKCBFORMATNAME = 0x030C
|
||||
WM_CHANGECBCHAIN = 0x030D
|
||||
WM_HSCROLLCLIPBOARD = 0x030E
|
||||
WM_QUERYNEWPALETTE = 0x030F
|
||||
WM_PALETTEISCHANGING = 0x0310
|
||||
WM_PALETTECHANGED = 0x0311
|
||||
WM_HOTKEY = 0x0312
|
||||
WM_PRINT = 0x0317
|
||||
WM_PRINTCLIENT = 0x0318
|
||||
WM_HANDHELDFIRST = 0x0358
|
||||
WM_HANDHELDLAST = 0x035F
|
||||
WM_AFXFIRST = 0x0360
|
||||
WM_AFXLAST = 0x037F
|
||||
WM_PENWINFIRST = 0x0380
|
||||
WM_PENWINLAST = 0x038F
|
||||
WM_APP = 0x8000
|
||||
WM_USER = 0x0400
|
||||
WM_REFLECT = WM_USER + 0x1c00
|
||||
|
||||
class WNDCLASSEX(ctypes.Structure):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(WNDCLASSEX, self).__init__(*args, **kwargs)
|
||||
self.cbSize = ctypes.sizeof(self)
|
||||
_fields_ = [("cbSize", ctypes.c_uint),
|
||||
("style", ctypes.c_uint),
|
||||
("lpfnWndProc", WNDPROC),
|
||||
("cbClsExtra", ctypes.c_int),
|
||||
("cbWndExtra", ctypes.c_int),
|
||||
("hInstance", ctypes.wintypes.HANDLE),
|
||||
("hIcon", ctypes.wintypes.HANDLE),
|
||||
("hCursor", ctypes.wintypes.HANDLE),
|
||||
("hBrush", ctypes.wintypes.HANDLE),
|
||||
("lpszMenuName", ctypes.wintypes.LPCWSTR),
|
||||
("lpszClassName", ctypes.wintypes.LPCWSTR),
|
||||
("hIconSm", ctypes.wintypes.HANDLE)]
|
||||
|
||||
UpdateWindow = ctypes.windll.user32.UpdateWindow
|
||||
UpdateWindow.argtypes = [ctypes.wintypes.HWND]
|
||||
|
||||
SW_HIDE = 0
|
||||
SW_SHOWNORMAL = 1
|
||||
SW_SHOW = 5
|
||||
|
||||
ShowWindow = ctypes.windll.user32.ShowWindow
|
||||
ShowWindow.argtypes = [ctypes.wintypes.HWND, ctypes.c_int]
|
||||
|
||||
CS_VREDRAW = 0x0001
|
||||
CS_HREDRAW = 0x0002
|
||||
CS_KEYCVTWINDOW = 0x0004
|
||||
CS_DBLCLKS = 0x0008
|
||||
CS_OWNDC = 0x0020
|
||||
CS_CLASSDC = 0x0040
|
||||
CS_PARENTDC = 0x0080
|
||||
CS_NOKEYCVT = 0x0100
|
||||
CS_NOCLOSE = 0x0200
|
||||
CS_SAVEBITS = 0x0800
|
||||
CS_BYTEALIGNCLIENT = 0x1000
|
||||
CS_BYTEALIGNWINDOW = 0x2000
|
||||
CS_GLOBALCLASS = 0x4000
|
||||
|
||||
COLOR_SCROLLBAR = 0
|
||||
COLOR_BACKGROUND = 1
|
||||
COLOR_ACTIVECAPTION = 2
|
||||
COLOR_INACTIVECAPTION = 3
|
||||
COLOR_MENU = 4
|
||||
COLOR_WINDOW = 5
|
||||
COLOR_WINDOWFRAME = 6
|
||||
COLOR_MENUTEXT = 7
|
||||
COLOR_WINDOWTEXT = 8
|
||||
COLOR_CAPTIONTEXT = 9
|
||||
COLOR_ACTIVEBORDER = 10
|
||||
COLOR_INACTIVEBORDER = 11
|
||||
COLOR_APPWORKSPACE = 12
|
||||
COLOR_HIGHLIGHT = 13
|
||||
COLOR_HIGHLIGHTTEXT = 14
|
||||
COLOR_BTNFACE = 15
|
||||
COLOR_BTNSHADOW = 16
|
||||
COLOR_GRAYTEXT = 17
|
||||
COLOR_BTNTEXT = 18
|
||||
COLOR_INACTIVECAPTIONTEXT = 19
|
||||
COLOR_BTNHIGHLIGHT = 20
|
||||
|
||||
LoadCursor = ctypes.windll.user32.LoadCursorW
|
||||
|
||||
def GenerateDummyWindow(callback, uid):
|
||||
newclass = WNDCLASSEX()
|
||||
newclass.lpfnWndProc = callback
|
||||
newclass.style = CS_VREDRAW | CS_HREDRAW
|
||||
newclass.lpszClassName = uid.replace("-", "")
|
||||
newclass.hBrush = COLOR_BACKGROUND
|
||||
newclass.hCursor = LoadCursor(0, 32512)
|
||||
ATOM = ctypes.windll.user32.RegisterClassExW(ctypes.byref(newclass))
|
||||
#print "ATOM", ATOM
|
||||
#print "CLASS", newclass.lpszClassName
|
||||
hwnd = ctypes.windll.user32.CreateWindowExW(0,
|
||||
newclass.lpszClassName,
|
||||
u"Dummy Window",
|
||||
WS_OVERLAPPEDWINDOW | WS_SYSMENU,
|
||||
ctypes.windll.user32.GetSystemMetrics(SM_CXVIRTUALSCREEN),
|
||||
ctypes.windll.user32.GetSystemMetrics(SM_CYVIRTUALSCREEN),
|
||||
800, 600, 0, 0, 0, 0)
|
||||
ShowWindow(hwnd, SW_SHOW)
|
||||
UpdateWindow(hwnd)
|
||||
ShowWindow(hwnd, SW_HIDE)
|
||||
return hwnd
|
||||
|
||||
# Message loop calls
|
||||
|
||||
TIMERCALLBACK = ctypes.WINFUNCTYPE(None,
|
||||
ctypes.wintypes.HWND,
|
||||
ctypes.wintypes.UINT,
|
||||
ctypes.POINTER(ctypes.wintypes.UINT),
|
||||
ctypes.wintypes.DWORD)
|
||||
|
||||
SetTimer = ctypes.windll.user32.SetTimer
|
||||
SetTimer.restype = ctypes.POINTER(ctypes.wintypes.UINT)
|
||||
SetTimer.argtypes = [ctypes.wintypes.HWND,
|
||||
ctypes.POINTER(ctypes.wintypes.UINT),
|
||||
ctypes.wintypes.UINT,
|
||||
TIMERCALLBACK]
|
||||
|
||||
KillTimer = ctypes.windll.user32.KillTimer
|
||||
KillTimer.restype = ctypes.wintypes.BOOL
|
||||
KillTimer.argtypes = [ctypes.wintypes.HWND,
|
||||
ctypes.POINTER(ctypes.wintypes.UINT)]
|
||||
|
||||
class MSG(ctypes.Structure):
|
||||
_fields_ = [ ('HWND', ctypes.wintypes.HWND),
|
||||
('message', ctypes.wintypes.UINT),
|
||||
('wParam', ctypes.wintypes.WPARAM),
|
||||
('lParam', ctypes.wintypes.LPARAM),
|
||||
('time', ctypes.wintypes.DWORD),
|
||||
('pt', POINT)]
|
||||
|
||||
GetMessage = ctypes.windll.user32.GetMessageW
|
||||
GetMessage.restype = ctypes.wintypes.BOOL
|
||||
GetMessage.argtypes = [ctypes.POINTER(MSG), ctypes.wintypes.HWND, ctypes.wintypes.UINT, ctypes.wintypes.UINT]
|
||||
|
||||
TranslateMessage = ctypes.windll.user32.TranslateMessage
|
||||
TranslateMessage.restype = ctypes.wintypes.ULONG
|
||||
TranslateMessage.argtypes = [ctypes.POINTER(MSG)]
|
||||
|
||||
DispatchMessage = ctypes.windll.user32.DispatchMessageW
|
||||
DispatchMessage.restype = ctypes.wintypes.ULONG
|
||||
DispatchMessage.argtypes = [ctypes.POINTER(MSG)]
|
||||
|
||||
def LoadIcon(iconfilename, small=False):
|
||||
return LoadImage(0,
|
||||
unicode(iconfilename),
|
||||
IMAGE_ICON,
|
||||
16 if small else 0,
|
||||
16 if small else 0,
|
||||
LR_LOADFROMFILE)
|
||||
|
||||
|
||||
class NotificationIcon(object):
|
||||
def __init__(self, iconfilename, tooltip=None):
|
||||
assert os.path.isfile(unicode(iconfilename)), "{} doesn't exist".format(iconfilename)
|
||||
self._iconfile = unicode(iconfilename)
|
||||
self._hicon = LoadIcon(self._iconfile, True)
|
||||
assert self._hicon, "Failed to load {}".format(iconfilename)
|
||||
#self._pumpqueue = Queue.Queue()
|
||||
self._die = False
|
||||
self._timerid = None
|
||||
self._uid = uuid.uuid4()
|
||||
self._tooltip = unicode(tooltip) if tooltip else u''
|
||||
#self._thread = threading.Thread(target=self._run)
|
||||
#self._thread.start()
|
||||
self._info_bubble = None
|
||||
self.items = []
|
||||
|
||||
|
||||
def _bubble(self, iconinfo):
|
||||
if self._info_bubble:
|
||||
info_bubble = self._info_bubble
|
||||
self._info_bubble = None
|
||||
message = unicode(self._info_bubble)
|
||||
iconinfo.uFlags |= NIF_INFO
|
||||
iconinfo.szInfo = message
|
||||
iconinfo.szInfoTitle = message
|
||||
iconinfo.dwInfoFlags = NIIF_INFO
|
||||
iconinfo.union.uTimeout = 10000
|
||||
Shell_NotifyIcon(NIM_MODIFY, ctypes.pointer(iconinfo))
|
||||
|
||||
|
||||
def _run(self):
|
||||
self._windowproc = WNDPROC(self._callback)
|
||||
self._hwnd = GenerateDummyWindow(self._windowproc, str(self._uid))
|
||||
|
||||
iconinfo = NOTIFYICONDATA()
|
||||
iconinfo.hWnd = self._hwnd
|
||||
iconinfo.uID = 100
|
||||
iconinfo.uFlags = NIF_ICON | NIF_SHOWTIP | NIF_MESSAGE | (NIF_TIP if self._tooltip else 0)
|
||||
iconinfo.uCallbackMessage = WM_MENUCOMMAND
|
||||
iconinfo.hIcon = self._hicon
|
||||
iconinfo.szTip = self._tooltip
|
||||
iconinfo.dwState = NIS_SHAREDICON
|
||||
iconinfo.dwInfoFlags = NIIF_INFO
|
||||
# iconinfo.dwStateMask = NIS_SHAREDICON
|
||||
iconinfo.szInfo = "Application Title"
|
||||
iconinfo.union.uTimeout = 5000
|
||||
|
||||
Shell_NotifyIcon(NIM_ADD, ctypes.pointer(iconinfo))
|
||||
|
||||
iconinfo.union.uVersion = NOTIFYICON_VERSION
|
||||
Shell_NotifyIcon(NIM_SETVERSION, ctypes.pointer(iconinfo))
|
||||
self.iconinfo = iconinfo
|
||||
|
||||
PostMessage(self._hwnd, WM_NULL, 0, 0)
|
||||
|
||||
#self._timerid = SetTimer(self._hwnd, self._timerid, 25, TIMERCALLBACK())
|
||||
message = MSG()
|
||||
last_time = -1
|
||||
ret = None
|
||||
while not self._die:
|
||||
try:
|
||||
ret = GetMessage(ctypes.pointer(message), 0, 0, 0)
|
||||
TranslateMessage(ctypes.pointer(message))
|
||||
DispatchMessage(ctypes.pointer(message))
|
||||
except Exception, err:
|
||||
# print "NotificationIcon error", err, message
|
||||
message = MSG()
|
||||
time.sleep(0.125)
|
||||
print "Icon thread stopped, removing icon..."
|
||||
#KillTimer(self._hwnd, self._timerid)
|
||||
|
||||
Shell_NotifyIcon(NIM_DELETE, ctypes.cast(ctypes.pointer(iconinfo), ctypes.POINTER(NOTIFYICONDATA)))
|
||||
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
||||
ctypes.windll.user32.DestroyIcon(self._hicon)
|
||||
|
||||
|
||||
def _menu(self):
|
||||
if not hasattr(self, 'items'):
|
||||
return
|
||||
|
||||
menu = CreatePopupMenu()
|
||||
func = None
|
||||
|
||||
try:
|
||||
iidx = 1000
|
||||
defaultitem = -1
|
||||
item_map = {}
|
||||
for fs in self.items:
|
||||
iidx += 1
|
||||
if isinstance(fs, basestring):
|
||||
if fs and not fs.strip('-_='):
|
||||
AppendMenu(menu, MF_SEPARATOR, iidx, fs)
|
||||
else:
|
||||
AppendMenu(menu, MF_STRING | MF_GRAYED, iidx, fs)
|
||||
elif isinstance(fs, tuple):
|
||||
if callable(fs[0]):
|
||||
itemstring = fs[0]()
|
||||
else:
|
||||
itemstring = unicode(fs[0])
|
||||
flags = MF_STRING
|
||||
if itemstring.startswith("!"):
|
||||
itemstring = itemstring[1:]
|
||||
defaultitem = iidx
|
||||
if itemstring.startswith("+"):
|
||||
itemstring = itemstring[1:]
|
||||
flags = flags | MF_CHECKED
|
||||
itemcallable = fs[1]
|
||||
item_map[iidx] = itemcallable
|
||||
if itemcallable is False:
|
||||
flags = flags | MF_DISABLED
|
||||
elif not callable(itemcallable):
|
||||
flags = flags | MF_GRAYED
|
||||
AppendMenu(menu, flags, iidx, itemstring)
|
||||
|
||||
if defaultitem != -1:
|
||||
SetMenuDefaultItem(menu, defaultitem, 0)
|
||||
|
||||
pos = POINT()
|
||||
GetCursorPos(ctypes.pointer(pos))
|
||||
|
||||
PostMessage(self._hwnd, WM_NULL, 0, 0)
|
||||
|
||||
SetForegroundWindow(self._hwnd)
|
||||
|
||||
ti = TrackPopupMenu(menu, TPM_RIGHTBUTTON | TPM_RETURNCMD | TPM_NONOTIFY, pos.x, pos.y, 0, self._hwnd, None)
|
||||
|
||||
if ti in item_map:
|
||||
func = item_map[ti]
|
||||
|
||||
PostMessage(self._hwnd, WM_NULL, 0, 0)
|
||||
finally:
|
||||
DestroyMenu(menu)
|
||||
if func: func()
|
||||
|
||||
|
||||
def clicked(self):
|
||||
self._menu()
|
||||
|
||||
|
||||
|
||||
def _callback(self, hWnd, msg, wParam, lParam):
|
||||
# Check if the main thread is still alive
|
||||
if msg == WM_TIMER:
|
||||
if not any(thread.getName() == 'MainThread' and thread.isAlive()
|
||||
for thread in threading.enumerate()):
|
||||
self._die = True
|
||||
elif msg == WM_MENUCOMMAND and lParam == WM_LBUTTONUP:
|
||||
self.clicked()
|
||||
elif msg == WM_MENUCOMMAND and lParam == WM_RBUTTONUP:
|
||||
self._menu()
|
||||
else:
|
||||
return DefWindowProc(hWnd, msg, wParam, lParam)
|
||||
return 1
|
||||
|
||||
|
||||
def die(self):
|
||||
self._die = True
|
||||
PostMessage(self._hwnd, WM_NULL, 0, 0)
|
||||
time.sleep(0.2)
|
||||
try:
|
||||
Shell_NotifyIcon(NIM_DELETE, self.iconinfo)
|
||||
except Exception, err:
|
||||
print "Icon remove error", err
|
||||
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
||||
ctypes.windll.user32.DestroyIcon(self._hicon)
|
||||
|
||||
|
||||
def pump(self):
|
||||
try:
|
||||
while not self._pumpqueue.empty():
|
||||
callable = self._pumpqueue.get(False)
|
||||
callable()
|
||||
except Queue.Empty:
|
||||
pass
|
||||
|
||||
|
||||
def announce(self, text):
|
||||
self._info_bubble = text
|
||||
|
||||
|
||||
def hideConsole():
|
||||
ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0)
|
||||
|
||||
def showConsole():
|
||||
ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import time
|
||||
def greet():
|
||||
ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0)
|
||||
print "Hello"
|
||||
def quit():
|
||||
ni._die = True
|
||||
#sys.exit()
|
||||
def announce():
|
||||
ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 1)
|
||||
ni.announce("Hello there")
|
||||
|
||||
def clicked():
|
||||
ni.announce("Hello")
|
||||
|
||||
def dynamicTitle():
|
||||
return "!The time is: %s" % time.time()
|
||||
|
||||
ni = NotificationIcon(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../trayicon.ico'), "ZeroNet 0.2.9")
|
||||
ni.items = [
|
||||
(dynamicTitle, False),
|
||||
('Hello', greet),
|
||||
('Title', False),
|
||||
('!Default', greet),
|
||||
('+Popup bubble', announce),
|
||||
'Nothing',
|
||||
'--',
|
||||
('Quit', quit)
|
||||
]
|
||||
ni.clicked = clicked
|
||||
import atexit
|
||||
@atexit.register
|
||||
def goodbye():
|
||||
print "You are now leaving the Python sector."
|
||||
|
||||
ni._run()
|
|
@ -1,53 +0,0 @@
|
|||
''' Get windows special folders without pythonwin
|
||||
Example:
|
||||
import specialfolders
|
||||
start_programs = specialfolders.get(specialfolders.PROGRAMS)
|
||||
|
||||
Code is public domain, do with it what you will.
|
||||
|
||||
Luke Pinner - Environment.gov.au, 2010 February 10
|
||||
'''
|
||||
|
||||
#Imports use _syntax to mask them from autocomplete IDE's
|
||||
import ctypes as _ctypes
|
||||
from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH, create_unicode_buffer as _cub
|
||||
_SHGetFolderPath = _ctypes.windll.shell32.SHGetFolderPathW
|
||||
|
||||
#public special folder constants
|
||||
DESKTOP= 0
|
||||
PROGRAMS= 2
|
||||
MYDOCUMENTS= 5
|
||||
FAVORITES= 6
|
||||
STARTUP= 7
|
||||
RECENT= 8
|
||||
SENDTO= 9
|
||||
STARTMENU= 11
|
||||
MYMUSIC= 13
|
||||
MYVIDEOS= 14
|
||||
NETHOOD= 19
|
||||
FONTS= 20
|
||||
TEMPLATES= 21
|
||||
ALLUSERSSTARTMENU= 22
|
||||
ALLUSERSPROGRAMS= 23
|
||||
ALLUSERSSTARTUP= 24
|
||||
ALLUSERSDESKTOP= 25
|
||||
APPLICATIONDATA= 26
|
||||
PRINTHOOD= 27
|
||||
LOCALSETTINGSAPPLICATIONDATA= 28
|
||||
ALLUSERSFAVORITES= 31
|
||||
LOCALSETTINGSTEMPORARYINTERNETFILES=32
|
||||
COOKIES= 33
|
||||
LOCALSETTINGSHISTORY= 34
|
||||
ALLUSERSAPPLICATIONDATA= 35
|
||||
|
||||
def get(intFolder):
|
||||
_SHGetFolderPath.argtypes = [_HWND, _ctypes.c_int, _HANDLE, _DWORD, _LPCWSTR]
|
||||
auPathBuffer = _cub(_MAX_PATH)
|
||||
exit_code=_SHGetFolderPath(0, intFolder, 0, 0, auPathBuffer)
|
||||
return auPathBuffer.value
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
print get(STARTUP)
|
||||
open(get(STARTUP)+"\\zeronet.cmd", "w").write("cd /D %s\r\nzeronet.py" % os.getcwd())
|
Binary file not shown.
Before Width: | Height: | Size: 1.1 KiB |
|
@ -1,71 +0,0 @@
|
|||
import logging
|
||||
import re
|
||||
|
||||
from Plugin import PluginManager
|
||||
|
||||
allow_reload = False # No reload supported
|
||||
|
||||
log = logging.getLogger("ZeronamePlugin")
|
||||
|
||||
|
||||
@PluginManager.registerTo("SiteManager")
|
||||
class SiteManagerPlugin(object):
|
||||
zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F"
|
||||
site_zeroname = None
|
||||
|
||||
def load(self):
|
||||
super(SiteManagerPlugin, self).load()
|
||||
if not self.get(self.zeroname_address):
|
||||
self.need(self.zeroname_address) # Need ZeroName site
|
||||
|
||||
# Checks if its a valid address
|
||||
def isAddress(self, address):
|
||||
if self.isDomain(address):
|
||||
return True
|
||||
else:
|
||||
return super(SiteManagerPlugin, self).isAddress(address)
|
||||
|
||||
# Return: True if the address is domain
|
||||
def isDomain(self, address):
|
||||
return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
|
||||
|
||||
# Resolve domain
|
||||
# Return: The address or None
|
||||
def resolveDomain(self, domain):
|
||||
domain = domain.lower()
|
||||
if not self.site_zeroname:
|
||||
self.site_zeroname = self.need(self.zeroname_address)
|
||||
self.site_zeroname.needFile("data/names.json", priority=10)
|
||||
db = self.site_zeroname.storage.loadJson("data/names.json")
|
||||
return db.get(domain)
|
||||
|
||||
# Return or create site and start download site files
|
||||
# Return: Site or None if dns resolve failed
|
||||
def need(self, address, all_file=True):
|
||||
if self.isDomain(address): # Its looks like a domain
|
||||
address_resolved = self.resolveDomain(address)
|
||||
if address_resolved:
|
||||
address = address_resolved
|
||||
else:
|
||||
return None
|
||||
|
||||
return super(SiteManagerPlugin, self).need(address, all_file)
|
||||
|
||||
# Return: Site object or None if not found
|
||||
def get(self, address):
|
||||
if self.sites is None: # Not loaded yet
|
||||
self.load()
|
||||
if self.isDomain(address): # Its looks like a domain
|
||||
address_resolved = self.resolveDomain(address)
|
||||
if address_resolved: # Domain found
|
||||
site = self.sites.get(address_resolved)
|
||||
if site:
|
||||
site_domain = site.settings.get("domain")
|
||||
if site_domain != address:
|
||||
site.settings["domain"] = address
|
||||
else: # Domain not found
|
||||
site = self.sites.get(address)
|
||||
|
||||
else: # Access by site address
|
||||
site = self.sites.get(address)
|
||||
return site
|
|
@ -1,42 +0,0 @@
|
|||
import re
|
||||
from Plugin import PluginManager
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiRequest")
|
||||
class UiRequestPlugin(object):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
from Site import SiteManager
|
||||
self.site_manager = SiteManager.site_manager
|
||||
super(UiRequestPlugin, self).__init__(*args, **kwargs)
|
||||
|
||||
# Media request
|
||||
def actionSiteMedia(self, path):
|
||||
match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path)
|
||||
if match: # Its a valid domain, resolve first
|
||||
domain = match.group("address")
|
||||
address = self.site_manager.resolveDomain(domain)
|
||||
if address:
|
||||
path = "/media/" + address + match.group("inner_path")
|
||||
return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
|
||||
|
||||
# Is mediarequest allowed from that referer
|
||||
def isMediaRequestAllowed(self, site_address, referer):
|
||||
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
|
||||
referer_path = re.sub("\?.*", "", referer_path) # Remove http params
|
||||
|
||||
if not re.sub("^http[s]{0,1}://", "", referer).startswith(self.env["HTTP_HOST"]): # Different origin
|
||||
return False
|
||||
|
||||
if self.isProxyRequest(): # Match to site domain
|
||||
referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access
|
||||
referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1)
|
||||
else: # Match to request path
|
||||
referer_site_address = re.match("/(?P<address>[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", referer_path).group("address")
|
||||
|
||||
if referer_site_address == site_address: # Referer site address as simple address
|
||||
return True
|
||||
elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns
|
||||
return True
|
||||
else: # Invalid referer
|
||||
return False
|
|
@ -1,2 +0,0 @@
|
|||
import UiRequestPlugin
|
||||
import SiteManagerPlugin
|
|
@ -1,159 +0,0 @@
|
|||
import time
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import socket
|
||||
|
||||
from bitcoinrpc.authproxy import AuthServiceProxy
|
||||
|
||||
|
||||
def publish():
|
||||
print "* Signing..."
|
||||
os.system("python zeronet.py siteSign %s %s" % (config["site"], config["privatekey"]))
|
||||
print "* Publishing..."
|
||||
os.system("python zeronet.py sitePublish %s" % config["site"])
|
||||
|
||||
|
||||
def processNameOp(domain, value):
|
||||
if not value.startswith("{"):
|
||||
return False
|
||||
try:
|
||||
data = json.loads(value)
|
||||
except Exception, err:
|
||||
print "Json load error: %s" % err
|
||||
return False
|
||||
if "zeronet" not in data:
|
||||
print "No zeronet in ", data.keys()
|
||||
return False
|
||||
if not isinstance(data["zeronet"], dict):
|
||||
print "Not dict: ", data["zeronet"]
|
||||
return False
|
||||
if not re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", domain):
|
||||
print "Invalid domain: ", domain
|
||||
return False
|
||||
|
||||
if "slave" in sys.argv:
|
||||
print "Waiting for master update arrive"
|
||||
time.sleep(30) # Wait 30 sec to allow master updater
|
||||
|
||||
# Note: Requires the file data/names.json to exist and contain "{}" to work
|
||||
names_raw = open(names_path, "rb").read()
|
||||
names = json.loads(names_raw)
|
||||
for subdomain, address in data["zeronet"].items():
|
||||
subdomain = subdomain.lower()
|
||||
address = re.sub("[^A-Za-z0-9]", "", address)
|
||||
print subdomain, domain, "->", address
|
||||
if subdomain:
|
||||
if re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", subdomain):
|
||||
names["%s.%s.bit" % (subdomain, domain)] = address
|
||||
else:
|
||||
print "Invalid subdomain:", domain, subdomain
|
||||
else:
|
||||
names["%s.bit" % domain] = address
|
||||
|
||||
new_names_raw = json.dumps(names, indent=2, sort_keys=True)
|
||||
if new_names_raw != names_raw:
|
||||
open(names_path, "wb").write(new_names_raw)
|
||||
return True
|
||||
else:
|
||||
print "names not changed"
|
||||
return False
|
||||
|
||||
|
||||
def processBlock(block_id):
|
||||
print "Processing block #%s..." % block_id
|
||||
s = time.time()
|
||||
block_hash = rpc.getblockhash(block_id)
|
||||
block = rpc.getblock(block_hash)
|
||||
|
||||
print "Checking %s tx" % len(block["tx"])
|
||||
updated = 0
|
||||
for tx in block["tx"]:
|
||||
try:
|
||||
transaction = rpc.getrawtransaction(tx, 1)
|
||||
for vout in transaction.get("vout", []):
|
||||
if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]:
|
||||
name_op = vout["scriptPubKey"]["nameOp"]
|
||||
updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"])
|
||||
except Exception, err:
|
||||
print "Error processing tx #%s %s" % (tx, err)
|
||||
print "Done in %.3fs (updated %s)." % (time.time() - s, updated)
|
||||
if updated:
|
||||
publish()
|
||||
|
||||
|
||||
# Loading config...
|
||||
|
||||
# Check whether platform is on windows or linux
|
||||
# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin
|
||||
|
||||
if sys.platform == "win32":
|
||||
namecoin_location = os.getenv('APPDATA') + "/Namecoin/"
|
||||
else:
|
||||
namecoin_location = os.path.expanduser("~/.namecoin/")
|
||||
|
||||
config_path = namecoin_location + 'zeroname_config.json'
|
||||
if not os.path.isfile(config_path): # Create sample config
|
||||
open(config_path, "w").write(
|
||||
json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet/', 'privatekey': '', 'lastprocessed': 223911}, indent=2)
|
||||
)
|
||||
print "Example config written to %s" % config_path
|
||||
sys.exit(0)
|
||||
|
||||
config = json.load(open(config_path))
|
||||
names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"])
|
||||
os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is.
|
||||
|
||||
# Getting rpc connect details
|
||||
namecoin_conf = open(namecoin_location + "namecoin.conf").read()
|
||||
|
||||
# Connecting to RPC
|
||||
rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1)
|
||||
rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
|
||||
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
|
||||
|
||||
rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
|
||||
|
||||
last_block = int(rpc.getinfo()["blocks"])
|
||||
|
||||
if not config["lastprocessed"]: # Start processing from last block
|
||||
config["lastprocessed"] = last_block
|
||||
|
||||
# Processing skipped blocks
|
||||
print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block)
|
||||
for block_id in range(config["lastprocessed"], last_block + 1):
|
||||
processBlock(block_id)
|
||||
|
||||
# processBlock(223911) # Testing zeronetwork.bit
|
||||
# processBlock(227052) # Testing brainwallets.bit
|
||||
# processBlock(236824) # Utf8 domain name (invalid should skip)
|
||||
# processBlock(236752) # Uppercase domain (invalid should skip)
|
||||
# processBlock(236870) # Encoded domain (should pass)
|
||||
# sys.exit(0)
|
||||
|
||||
while 1:
|
||||
print "Waiting for new block",
|
||||
sys.stdout.flush()
|
||||
while 1:
|
||||
try:
|
||||
rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
|
||||
if (int(rpc.getinfo()["blocks"]) > last_block):
|
||||
break
|
||||
time.sleep(1)
|
||||
rpc.waitforblock()
|
||||
print "Found"
|
||||
break # Block found
|
||||
except socket.timeout: # Timeout
|
||||
print ".",
|
||||
sys.stdout.flush()
|
||||
except Exception, err:
|
||||
print "Exception", err.__class__, err
|
||||
time.sleep(5)
|
||||
|
||||
last_block = int(rpc.getinfo()["blocks"])
|
||||
for block_id in range(config["lastprocessed"] + 1, last_block + 1):
|
||||
processBlock(block_id)
|
||||
|
||||
config["lastprocessed"] = last_block
|
||||
open(config_path, "w").write(json.dumps(config, indent=1))
|
|
@ -1,153 +0,0 @@
|
|||
import logging, json, os, re, sys, time
|
||||
import gevent
|
||||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
from util import Http
|
||||
from Debug import Debug
|
||||
|
||||
allow_reload = False # No reload supported
|
||||
|
||||
log = logging.getLogger("DnschainPlugin")
|
||||
|
||||
@PluginManager.registerTo("SiteManager")
|
||||
class SiteManagerPlugin(object):
|
||||
dns_cache_path = "%s/dns_cache.json" % config.data_dir
|
||||
dns_cache = None
|
||||
|
||||
# Checks if its a valid address
|
||||
def isAddress(self, address):
|
||||
if self.isDomain(address):
|
||||
return True
|
||||
else:
|
||||
return super(SiteManagerPlugin, self).isAddress(address)
|
||||
|
||||
|
||||
# Return: True if the address is domain
|
||||
def isDomain(self, address):
|
||||
return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
|
||||
|
||||
|
||||
# Load dns entries from data/dns_cache.json
|
||||
def loadDnsCache(self):
|
||||
if os.path.isfile(self.dns_cache_path):
|
||||
self.dns_cache = json.load(open(self.dns_cache_path))
|
||||
else:
|
||||
self.dns_cache = {}
|
||||
log.debug("Loaded dns cache, entries: %s" % len(self.dns_cache))
|
||||
|
||||
|
||||
# Save dns entries to data/dns_cache.json
|
||||
def saveDnsCache(self):
|
||||
json.dump(self.dns_cache, open(self.dns_cache_path, "wb"), indent=2)
|
||||
|
||||
|
||||
# Resolve domain using dnschain.net
|
||||
# Return: The address or None
|
||||
def resolveDomainDnschainNet(self, domain):
|
||||
try:
|
||||
match = self.isDomain(domain)
|
||||
sub_domain = match.group(1).strip(".")
|
||||
top_domain = match.group(2)
|
||||
if not sub_domain: sub_domain = "@"
|
||||
address = None
|
||||
with gevent.Timeout(5, Exception("Timeout: 5s")):
|
||||
res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read()
|
||||
data = json.loads(res)["data"]["value"]
|
||||
if "zeronet" in data:
|
||||
for key, val in data["zeronet"].iteritems():
|
||||
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
||||
self.saveDnsCache()
|
||||
return data["zeronet"].get(sub_domain)
|
||||
# Not found
|
||||
return address
|
||||
except Exception, err:
|
||||
log.debug("Dnschain.net %s resolve error: %s" % (domain, Debug.formatException(err)))
|
||||
|
||||
|
||||
# Resolve domain using dnschain.info
|
||||
# Return: The address or None
|
||||
def resolveDomainDnschainInfo(self, domain):
|
||||
try:
|
||||
match = self.isDomain(domain)
|
||||
sub_domain = match.group(1).strip(".")
|
||||
top_domain = match.group(2)
|
||||
if not sub_domain: sub_domain = "@"
|
||||
address = None
|
||||
with gevent.Timeout(5, Exception("Timeout: 5s")):
|
||||
res = Http.get("https://dnschain.info/bit/d/%s" % re.sub("\.bit$", "", top_domain)).read()
|
||||
data = json.loads(res)["value"]
|
||||
for key, val in data["zeronet"].iteritems():
|
||||
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
||||
self.saveDnsCache()
|
||||
return data["zeronet"].get(sub_domain)
|
||||
# Not found
|
||||
return address
|
||||
except Exception, err:
|
||||
log.debug("Dnschain.info %s resolve error: %s" % (domain, Debug.formatException(err)))
|
||||
|
||||
|
||||
# Resolve domain
|
||||
# Return: The address or None
|
||||
def resolveDomain(self, domain):
|
||||
domain = domain.lower()
|
||||
if self.dns_cache == None:
|
||||
self.loadDnsCache()
|
||||
if domain.count(".") < 2: # Its a topleved request, prepend @. to it
|
||||
domain = "@."+domain
|
||||
|
||||
domain_details = self.dns_cache.get(domain)
|
||||
if domain_details and time.time() < domain_details[1]: # Found in cache and its not expired
|
||||
return domain_details[0]
|
||||
else:
|
||||
# Resovle dns using dnschain
|
||||
thread_dnschain_info = gevent.spawn(self.resolveDomainDnschainInfo, domain)
|
||||
thread_dnschain_net = gevent.spawn(self.resolveDomainDnschainNet, domain)
|
||||
gevent.joinall([thread_dnschain_net, thread_dnschain_info]) # Wait for finish
|
||||
|
||||
if thread_dnschain_info.value and thread_dnschain_net.value: # Booth successfull
|
||||
if thread_dnschain_info.value == thread_dnschain_net.value: # Same returned value
|
||||
return thread_dnschain_info.value
|
||||
else:
|
||||
log.error("Dns %s missmatch: %s != %s" % (domain, thread_dnschain_info.value, thread_dnschain_net.value))
|
||||
|
||||
# Problem during resolve
|
||||
if domain_details: # Resolve failed, but we have it in the cache
|
||||
domain_details[1] = time.time()+60*60 # Dont try again for 1 hour
|
||||
return domain_details[0]
|
||||
else: # Not found in cache
|
||||
self.dns_cache[domain] = [None, time.time()+60] # Don't check again for 1 min
|
||||
return None
|
||||
|
||||
|
||||
# Return or create site and start download site files
|
||||
# Return: Site or None if dns resolve failed
|
||||
def need(self, address, all_file=True):
|
||||
if self.isDomain(address): # Its looks like a domain
|
||||
address_resolved = self.resolveDomain(address)
|
||||
if address_resolved:
|
||||
address = address_resolved
|
||||
else:
|
||||
return None
|
||||
|
||||
return super(SiteManagerPlugin, self).need(address, all_file)
|
||||
|
||||
|
||||
# Return: Site object or None if not found
|
||||
def get(self, address):
|
||||
if self.sites == None: # Not loaded yet
|
||||
self.load()
|
||||
if self.isDomain(address): # Its looks like a domain
|
||||
address_resolved = self.resolveDomain(address)
|
||||
if address_resolved: # Domain found
|
||||
site = self.sites.get(address_resolved)
|
||||
if site:
|
||||
site_domain = site.settings.get("domain")
|
||||
if site_domain != address:
|
||||
site.settings["domain"] = address
|
||||
else: # Domain not found
|
||||
site = self.sites.get(address)
|
||||
|
||||
else: # Access by site address
|
||||
site = self.sites.get(address)
|
||||
return site
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
import re
|
||||
from Plugin import PluginManager
|
||||
|
||||
@PluginManager.registerTo("UiRequest")
|
||||
class UiRequestPlugin(object):
|
||||
def __init__(self, server = None):
|
||||
from Site import SiteManager
|
||||
self.site_manager = SiteManager.site_manager
|
||||
super(UiRequestPlugin, self).__init__(server)
|
||||
|
||||
|
||||
# Media request
|
||||
def actionSiteMedia(self, path):
|
||||
match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path)
|
||||
if match: # Its a valid domain, resolve first
|
||||
domain = match.group("address")
|
||||
address = self.site_manager.resolveDomain(domain)
|
||||
if address:
|
||||
path = "/media/"+address+match.group("inner_path")
|
||||
return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
|
||||
|
||||
|
||||
# Is mediarequest allowed from that referer
|
||||
def isMediaRequestAllowed(self, site_address, referer):
|
||||
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
|
||||
referer_site_address = re.match("/(?P<address>[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", referer_path).group("address")
|
||||
|
||||
if referer_site_address == site_address: # Referer site address as simple address
|
||||
return True
|
||||
elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns
|
||||
return True
|
||||
else: # Invalid referer
|
||||
return False
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# This plugin is experimental, if you really want to enable uncomment the following lines:
|
||||
# import DnschainPlugin
|
||||
# import SiteManagerPlugin
|
|
@ -1,24 +0,0 @@
|
|||
import re
|
||||
from Plugin import PluginManager
|
||||
|
||||
# Warning: If you modify the donation address then renmae the plugin's directory to "MyDonationMessage" to prevent the update script overwrite
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiRequest")
|
||||
class UiRequestPlugin(object):
|
||||
# Inject a donation message to every page top right corner
|
||||
def actionWrapper(self, path):
|
||||
back = super(UiRequestPlugin, self).actionWrapper(path)
|
||||
if not back or not hasattr(back, "endswith"): return back # Wrapper error or not string returned, injection not possible
|
||||
|
||||
back = re.sub("</body>\s*</html>\s*$",
|
||||
"""
|
||||
<style>
|
||||
#donation_message { position: absolute; bottom: 0px; right: 20px; padding: 7px; font-family: Arial; font-size: 11px }
|
||||
</style>
|
||||
<a id='donation_message' href='https://blockchain.info/address/1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX' target='_blank'>Please donate to help to keep this ZeroProxy alive</a>
|
||||
</body>
|
||||
</html>
|
||||
""", back)
|
||||
|
||||
return back
|
|
@ -1 +0,0 @@
|
|||
import DonationMessagePlugin
|
|
@ -1,171 +0,0 @@
|
|||
import re
|
||||
import sys
|
||||
from Plugin import PluginManager
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiRequest")
|
||||
class UiRequestPlugin(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.user_manager = sys.modules["User.UserManager"].user_manager
|
||||
super(UiRequestPlugin, self).__init__(*args, **kwargs)
|
||||
|
||||
# Create new user and inject user welcome message if necessary
|
||||
# Return: Html body also containing the injection
|
||||
def actionWrapper(self, path, extra_headers=None):
|
||||
|
||||
match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
|
||||
if not match:
|
||||
return False
|
||||
inner_path = match.group("inner_path").lstrip("/")
|
||||
html_request = "." not in inner_path or inner_path.endswith(".html") # Only inject html to html requests
|
||||
|
||||
user_created = False
|
||||
if html_request:
|
||||
user = self.getCurrentUser() # Get user from cookie
|
||||
if not user: # No user found by cookie
|
||||
user = self.user_manager.create()
|
||||
user_created = True
|
||||
|
||||
if user_created:
|
||||
if not extra_headers:
|
||||
extra_headers = []
|
||||
extra_headers.append(('Set-Cookie', "master_address=%s;path=/;max-age=2592000;" % user.master_address)) # = 30 days
|
||||
|
||||
loggedin = self.get.get("login") == "done"
|
||||
|
||||
back_generator = super(UiRequestPlugin, self).actionWrapper(path, extra_headers) # Get the wrapper frame output
|
||||
|
||||
if not back_generator: # Wrapper error or not string returned, injection not possible
|
||||
return False
|
||||
|
||||
if user_created:
|
||||
back = back_generator.next()
|
||||
master_seed = user.master_seed
|
||||
# Inject the welcome message
|
||||
inject_html = """
|
||||
<!-- Multiser plugin -->
|
||||
<style>
|
||||
.masterseed { font-size: 95%; background-color: #FFF0AD; padding: 5px 8px; margin: 9px 0px }
|
||||
</style>
|
||||
<script>
|
||||
hello_message = "<b>Hello, welcome to ZeroProxy!</b><div style='margin-top: 8px'>A new, unique account created for you:</div>"
|
||||
hello_message+= "<div class='masterseed'>{master_seed}</div> <div>This is your private key, <b>save it</b>, so you can login next time.</div><br>"
|
||||
hello_message+= "<a href='#' class='button' style='margin-left: 0px'>Ok, Saved it!</a> or <a href='#Login' onclick='wrapper.ws.cmd(\\"userLoginForm\\", []); return false'>Login</a><br><br>"
|
||||
hello_message+= "<small>This site is allows you to browse ZeroNet content, but if you want to secure your account <br>"
|
||||
hello_message+= "and help to make a better network, then please run your own <a href='https://github.com/HelloZeroNet/ZeroNet' target='_blank'>ZeroNet client</a>.</small>"
|
||||
setTimeout(function() {
|
||||
wrapper.notifications.add("hello", "info", hello_message)
|
||||
delete(hello_message)
|
||||
}, 1000)
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
""".replace("\t", "")
|
||||
inject_html = inject_html.replace("{master_seed}", master_seed) # Set the master seed in the message
|
||||
|
||||
return iter([re.sub("</body>\s*</html>\s*$", inject_html, back)]) # Replace the </body></html> tags with the injection
|
||||
|
||||
elif loggedin:
|
||||
back = back_generator.next()
|
||||
inject_html = """
|
||||
<!-- Multiser plugin -->
|
||||
<script>
|
||||
setTimeout(function() {
|
||||
wrapper.notifications.add("login", "done", "Hello again!<br><small>You have been logged in successfully</small>", 5000)
|
||||
}, 1000)
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
""".replace("\t", "")
|
||||
return iter([re.sub("</body>\s*</html>\s*$", inject_html, back)]) # Replace the </body></html> tags with the injection
|
||||
|
||||
else: # No injection necessary
|
||||
return back_generator
|
||||
|
||||
# Get the current user based on request's cookies
|
||||
# Return: User object or None if no match
|
||||
def getCurrentUser(self):
|
||||
cookies = self.getCookies()
|
||||
user = None
|
||||
if "master_address" in cookies:
|
||||
users = self.user_manager.list()
|
||||
user = users.get(cookies["master_address"])
|
||||
return user
|
||||
|
||||
|
||||
@PluginManager.registerTo("UserManager")
|
||||
class UserManagerPlugin(object):
|
||||
# In multiuser mode do not load the users
|
||||
def load(self):
|
||||
if not self.users:
|
||||
self.users = {}
|
||||
return self.users
|
||||
|
||||
# Find user by master address
|
||||
# Return: User or None
|
||||
def get(self, master_address=None):
|
||||
users = self.list()
|
||||
if master_address in users:
|
||||
user = users[master_address]
|
||||
else:
|
||||
user = None
|
||||
return user
|
||||
|
||||
|
||||
@PluginManager.registerTo("User")
|
||||
class UserPlugin(object):
|
||||
# In multiuser mode users data only exits in memory, dont write to data/user.json
|
||||
def save(self):
|
||||
return False
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiWebsocket")
|
||||
class UiWebsocketPlugin(object):
|
||||
# Let the page know we running in multiuser mode
|
||||
def formatServerInfo(self):
|
||||
server_info = super(UiWebsocketPlugin, self).formatServerInfo()
|
||||
server_info["multiuser"] = True
|
||||
if "ADMIN" in self.site.settings["permissions"]:
|
||||
server_info["master_address"] = self.user.master_address
|
||||
return server_info
|
||||
|
||||
# Show current user's master seed
|
||||
def actionUserShowMasterSeed(self, to):
|
||||
if "ADMIN" not in self.site.settings["permissions"]:
|
||||
return self.response(to, "Show master seed not allowed")
|
||||
message = "<b style='padding-top: 5px; display: inline-block'>Your unique private key:</b>"
|
||||
message += "<div style='font-size: 84%%; background-color: #FFF0AD; padding: 5px 8px; margin: 9px 0px'>%s</div>" % self.user.master_seed
|
||||
message += "<small>(Save it, you can access your account using this information)</small>"
|
||||
self.cmd("notification", ["info", message])
|
||||
|
||||
# Logout user
|
||||
def actionUserLogout(self, to):
|
||||
if "ADMIN" not in self.site.settings["permissions"]:
|
||||
return self.response(to, "Logout not allowed")
|
||||
message = "<b>You have been logged out.</b> <a href='#Login' class='button' onclick='wrapper.ws.cmd(\"userLoginForm\", []); return false'>Login to another account</a>"
|
||||
message += "<script>document.cookie = 'master_address=; expires=Thu, 01 Jan 1970 00:00:00 UTC'</script>"
|
||||
self.cmd("notification", ["done", message, 1000000]) # 1000000 = Show ~forever :)
|
||||
# Delete from user_manager
|
||||
user_manager = sys.modules["User.UserManager"].user_manager
|
||||
if self.user.master_address in user_manager.users:
|
||||
del user_manager.users[self.user.master_address]
|
||||
self.response(to, "Successful logout")
|
||||
else:
|
||||
self.response(to, "User not found")
|
||||
|
||||
# Show login form
|
||||
def actionUserLoginForm(self, to):
|
||||
self.cmd("prompt", ["<b>Login</b><br>Your private key:", "password", "Login"], self.responseUserLogin)
|
||||
|
||||
# Login form submit
|
||||
def responseUserLogin(self, master_seed):
|
||||
user_manager = sys.modules["User.UserManager"].user_manager
|
||||
user = user_manager.create(master_seed=master_seed)
|
||||
if user.master_address:
|
||||
message = "Successfull login, reloading page..."
|
||||
message += "<script>document.cookie = 'master_address=%s;path=/;max-age=2592000;'</script>" % user.master_address
|
||||
message += "<script>wrapper.reload('login=done')</script>"
|
||||
self.cmd("notification", ["done", message])
|
||||
else:
|
||||
self.cmd("notification", ["error", "Error: Invalid master seed"])
|
||||
self.actionUserLoginForm(0)
|
|
@ -1 +0,0 @@
|
|||
import MultiuserPlugin
|
|
@ -1,118 +0,0 @@
|
|||
import string
|
||||
import random
|
||||
import time
|
||||
import json
|
||||
import re
|
||||
|
||||
from Config import config
|
||||
from Plugin import PluginManager
|
||||
|
||||
if "sessions" not in locals().keys(): # To keep sessions between module reloads
|
||||
sessions = {}
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiRequest")
|
||||
class UiRequestPlugin(object):
|
||||
sessions = sessions
|
||||
last_cleanup = time.time()
|
||||
|
||||
def route(self, path):
|
||||
if path.endswith("favicon.ico"):
|
||||
return self.actionFile("src/Ui/media/img/favicon.ico")
|
||||
else:
|
||||
if config.ui_password:
|
||||
if time.time() - self.last_cleanup > 60 * 60: # Cleanup expired sessions every hour
|
||||
self.cleanup()
|
||||
# Validate session
|
||||
session_id = self.getCookies().get("session_id")
|
||||
if session_id not in self.sessions: # Invalid session id, display login
|
||||
return self.actionLogin()
|
||||
return super(UiRequestPlugin, self).route(path)
|
||||
|
||||
# Action: Login
|
||||
def actionLogin(self):
|
||||
template = open("plugins/UiPassword/login.html").read()
|
||||
self.sendHeader()
|
||||
posted = self.getPosted()
|
||||
if posted: # Validate http posted data
|
||||
if self.checkPassword(posted.get("password")):
|
||||
# Valid password, create session
|
||||
session_id = self.randomString(26)
|
||||
self.sessions[session_id] = {
|
||||
"added": time.time(),
|
||||
"keep": posted.get("keep")
|
||||
}
|
||||
|
||||
# Redirect to homepage or referer
|
||||
url = self.env.get("HTTP_REFERER", "")
|
||||
if not url or re.sub("\?.*", "", url).endswith("/Login"):
|
||||
url = "/" + config.homepage
|
||||
cookie_header = ('Set-Cookie', "session_id=%s;path=/;max-age=2592000;" % session_id) # Max age = 30 days
|
||||
self.start_response('301 Redirect', [('Location', url), cookie_header])
|
||||
yield "Redirecting..."
|
||||
|
||||
else:
|
||||
# Invalid password, show login form again
|
||||
template = template.replace("{result}", "bad_password")
|
||||
yield template
|
||||
|
||||
def checkPassword(self, password):
|
||||
if password == config.ui_password:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def randomString(self, chars):
|
||||
return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(chars))
|
||||
|
||||
@classmethod
|
||||
def cleanup(cls):
|
||||
cls.last_cleanup = time.time()
|
||||
for session_id, session in cls.sessions.items():
|
||||
if session["keep"] and time.time() - session["added"] > 60 * 60 * 24 * 60: # Max 60days for keep sessions
|
||||
del(cls.sessions[session_id])
|
||||
elif not session["keep"] and time.time() - session["added"] > 60 * 60 * 24: # Max 24h for non-keep sessions
|
||||
del(cls.sessions[session_id])
|
||||
|
||||
# Action: Display sessions
|
||||
def actionSessions(self):
|
||||
self.sendHeader()
|
||||
yield "<pre>"
|
||||
yield json.dumps(self.sessions, indent=4)
|
||||
|
||||
# Action: Logout
|
||||
def actionLogout(self):
|
||||
# Session id has to passed as get parameter or called without referer to avoid remote logout
|
||||
session_id = self.getCookies().get("session_id")
|
||||
if not self.env.get("HTTP_REFERER") or session_id == self.get.get("session_id"):
|
||||
if session_id in self.sessions:
|
||||
del self.sessions[session_id]
|
||||
self.start_response('301 Redirect', [
|
||||
('Location', "/"),
|
||||
('Set-Cookie', "session_id=deleted; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT")
|
||||
])
|
||||
yield "Redirecting..."
|
||||
else:
|
||||
self.sendHeader()
|
||||
yield "Error: Invalid session id"
|
||||
|
||||
|
||||
@PluginManager.registerTo("ConfigPlugin")
|
||||
class ConfigPlugin(object):
|
||||
def createArguments(self):
|
||||
group = self.parser.add_argument_group("UiPassword plugin")
|
||||
group.add_argument('--ui_password', help='Password to access UiServer', default=None, metavar="password")
|
||||
|
||||
return super(ConfigPlugin, self).createArguments()
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiWebsocket")
|
||||
class UiWebsocketPlugin(object):
|
||||
def actionUiLogout(self, to):
|
||||
permissions = self.getPermissions(to)
|
||||
if "ADMIN" not in permissions:
|
||||
return self.response(to, "You don't have permission to run this command")
|
||||
|
||||
session_id = self.request.getCookies().get("session_id", "")
|
||||
message = "<script>document.location.href = '/Logout?session_id=%s'</script>" % session_id
|
||||
self.cmd("notification", ["done", message])
|
|
@ -1 +0,0 @@
|
|||
import UiPasswordPlugin
|
|
@ -1,116 +0,0 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Log In</title>
|
||||
<meta name="viewport" id="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
|
||||
<style>
|
||||
body {
|
||||
background-color: #323C4D; font-family: "Segoe UI", Helvetica, Arial; font-weight: lighter;
|
||||
font-size: 22px; color: #333; letter-spacing: 1px; color: white; overflow: hidden;
|
||||
}
|
||||
.login { left: 50%; position: absolute; top: 50%; transform: translateX(-50%) translateY(-50%); -webkit-transform: translateX(-50%) translateY(-50%); width: 100%; max-width: 370px; text-align: center; }
|
||||
|
||||
*:focus { outline: 0; }
|
||||
input[type=text], input[type=password] {
|
||||
padding: 10px 0px; border: 0px; display: block; margin: 15px 0px; width: 100%; border-radius: 30px; transition: 0.3s ease-out; background-color: #DDD;
|
||||
text-align: center; font-family: "Segoe UI", Helvetica, Arial; font-weight: lighter; font-size: 28px; border: 2px solid #323C4D;
|
||||
}
|
||||
input[type=text]:focus, input[type=password]:focus {
|
||||
border: 2px solid #FFF; background-color: #FFF;
|
||||
}
|
||||
input[type=checkbox] { opacity: 0; }
|
||||
input[type=checkbox]:checked + label { color: white; }
|
||||
input[type=checkbox]:focus + label::before { background-color: #435065; }
|
||||
input[type=checkbox]:checked + label::before { box-shadow: inset 0px 0px 0px 5px white; background-color: #4DCC6E; }
|
||||
input.error { border: 2px solid #F44336 !important; animation: shake 1s }
|
||||
label::before {
|
||||
content: ""; width: 20px; height: 20px; background-color: #323C4D;
|
||||
display: inline-block; margin-left: -20px; border-radius: 15px; box-shadow: inset 0px 0px 0px 2px #9EA5B3;
|
||||
transition: all 0.1s; margin-right: 7px; position: relative; top: 2px;
|
||||
}
|
||||
label { vertical-align: -1px; color: #9EA5B3; transition: all 0.3s; }
|
||||
|
||||
.button {
|
||||
padding: 13px; display: inline-block; margin: 15px 0px; width: 100%; border-radius: 30px; text-align: center; white-space: nowrap;
|
||||
font-size: 28px; color: #333; background: linear-gradient(45deg, #6B14D3 0, #7A26E2 25%, #4962DD 90%);
|
||||
box-sizing: border-box; margin-top: 50px; color: white; text-decoration: none; transition: 0.3s ease-out;
|
||||
}
|
||||
.button:hover, .button:focus { box-shadow: 0px 5px 30px rgba(0,0,0,0.3); }
|
||||
.button:active { transform: translateY(1px); box-shadow: 0px 0px 20px rgba(0,0,0,0.5); transition: none; }
|
||||
|
||||
#login_form_submit { display: none; }
|
||||
|
||||
.login-anim { animation: login 1s cubic-bezier(0.785, 0.135, 0.15, 0.86) forwards; }
|
||||
|
||||
@keyframes login {
|
||||
0% { width: 100%; }
|
||||
60% { width: 63px; transform: scale(1); color: rgba(255,255,255,0); }
|
||||
70% { width: 63px; transform: scale(1); color: rgba(255,255,255,0); }
|
||||
100% { transform: scale(80); width: 63px; color: rgba(255,255,255,0); }
|
||||
}
|
||||
|
||||
@keyframes shake {
|
||||
0%, 100% { transform: translateX(0); }
|
||||
10%, 30%, 50%, 70%, 90% { transform: translateX(-10px); }
|
||||
20%, 40%, 60%, 80% { transform: translateX(10px); }
|
||||
}
|
||||
</style>
|
||||
|
||||
<body>
|
||||
|
||||
|
||||
<div class="login">
|
||||
<form action="" method="post" id="login_form" onkeypress="return onFormKeypress(event)">
|
||||
<!--<input type="text" name="username" placeholder="Username" required/>-->
|
||||
<input type="password" name="password" placeholder="Password" required/>
|
||||
<input type="checkbox" name="keep" id="keep"><label for="keep">Keep me logged in</label>
|
||||
<div style="clear: both"></div>
|
||||
<a href="#" class="button" onclick="return submit()" id="login_button"><span>Log In</span></a>
|
||||
<input type="submit" id="login_form_submit"/>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
|
||||
<script>
|
||||
|
||||
function onFormKeypress(e) {
|
||||
if (event.keyCode == 13) {
|
||||
submit()
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function submit() {
|
||||
var form = document.getElementById("login_form")
|
||||
if (form.checkValidity()) {
|
||||
document.getElementById("login_button").className = "button login-anim"
|
||||
setTimeout(function() {
|
||||
form.submit()
|
||||
}, 1000)
|
||||
} else {
|
||||
form.submit()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function badPassword() {
|
||||
var elem = document.getElementsByName("password")[0]
|
||||
elem.className = "error"
|
||||
elem.placeholder = "Wrong Password"
|
||||
elem.focus()
|
||||
elem.addEventListener('input', function() {
|
||||
elem.className = ""
|
||||
elem.placeholder = "Password"
|
||||
})
|
||||
}
|
||||
|
||||
result = "{result}"
|
||||
|
||||
if (result == "bad_password")
|
||||
badPassword()
|
||||
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1,68 +0,0 @@
|
|||
import logging, json, os, re, sys, time
|
||||
import gevent
|
||||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
from Debug import Debug
|
||||
from domainLookup import lookupDomain
|
||||
|
||||
allow_reload = False # No reload supported
|
||||
|
||||
log = logging.getLogger("Zeroname-localPlugin")
|
||||
|
||||
|
||||
@PluginManager.registerTo("SiteManager")
|
||||
class SiteManagerPlugin(object):
|
||||
def load(self):
|
||||
super(SiteManagerPlugin, self).load()
|
||||
|
||||
# Checks if its a valid address
|
||||
def isAddress(self, address):
|
||||
if self.isDomain(address):
|
||||
return True
|
||||
else:
|
||||
return super(SiteManagerPlugin, self).isAddress(address)
|
||||
|
||||
|
||||
# Return: True if the address is domain
|
||||
def isDomain(self, address):
|
||||
return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
|
||||
|
||||
|
||||
# Resolve domain
|
||||
# Return: The address or None
|
||||
def resolveDomain(self, domain):
|
||||
return lookupDomain(domain)
|
||||
|
||||
|
||||
# Return or create site and start download site files
|
||||
# Return: Site or None if dns resolve failed
|
||||
def need(self, address, all_file=True):
|
||||
if self.isDomain(address): # Its looks like a domain
|
||||
address_resolved = self.resolveDomain(address)
|
||||
if address_resolved:
|
||||
address = address_resolved
|
||||
else:
|
||||
return None
|
||||
|
||||
return super(SiteManagerPlugin, self).need(address, all_file)
|
||||
|
||||
|
||||
# Return: Site object or None if not found
|
||||
def get(self, address):
|
||||
if self.sites == None: # Not loaded yet
|
||||
self.load()
|
||||
if self.isDomain(address): # Its looks like a domain
|
||||
address_resolved = self.resolveDomain(address)
|
||||
if address_resolved: # Domain found
|
||||
site = self.sites.get(address_resolved)
|
||||
if site:
|
||||
site_domain = site.settings.get("domain")
|
||||
if site_domain != address:
|
||||
site.settings["domain"] = address
|
||||
else: # Domain not found
|
||||
site = self.sites.get(address)
|
||||
|
||||
else: # Access by site address
|
||||
site = self.sites.get(address)
|
||||
return site
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
import re
|
||||
from Plugin import PluginManager
|
||||
|
||||
@PluginManager.registerTo("UiRequest")
|
||||
class UiRequestPlugin(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
from Site import SiteManager
|
||||
self.site_manager = SiteManager.site_manager
|
||||
super(UiRequestPlugin, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
# Media request
|
||||
def actionSiteMedia(self, path):
|
||||
match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path)
|
||||
if match: # Its a valid domain, resolve first
|
||||
domain = match.group("address")
|
||||
address = self.site_manager.resolveDomain(domain)
|
||||
if address:
|
||||
path = "/media/"+address+match.group("inner_path")
|
||||
return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
|
||||
|
||||
|
||||
# Is mediarequest allowed from that referer
|
||||
def isMediaRequestAllowed(self, site_address, referer):
|
||||
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
|
||||
referer_path = re.sub("\?.*", "", referer_path) # Remove http params
|
||||
|
||||
if self.isProxyRequest(): # Match to site domain
|
||||
referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access
|
||||
referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1)
|
||||
else: # Match to request path
|
||||
referer_site_address = re.match("/(?P<address>[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", referer_path).group("address")
|
||||
|
||||
if referer_site_address == site_address: # Referer site address as simple address
|
||||
return True
|
||||
elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns
|
||||
return True
|
||||
else: # Invalid referer
|
||||
return False
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
import UiRequestPlugin
|
||||
import SiteManagerPlugin
|
|
@ -1,54 +0,0 @@
|
|||
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
||||
import time, json, os, sys, re, socket, json
|
||||
|
||||
# Either returns domain's address or none if it doesn't exist
|
||||
# Supports subdomains and .bit on the end
|
||||
def lookupDomain(domain):
|
||||
domain = domain.lower()
|
||||
|
||||
#remove .bit on end
|
||||
if domain[-4:] == ".bit":
|
||||
domain = domain[0:-4]
|
||||
|
||||
#check for subdomain
|
||||
if domain.find(".") != -1:
|
||||
subdomain = domain[0:domain.find(".")]
|
||||
domain = domain[domain.find(".")+1:]
|
||||
else:
|
||||
subdomain = ""
|
||||
|
||||
try:
|
||||
domain_object = rpc.name_show("d/"+domain)
|
||||
except:
|
||||
#domain doesn't exist
|
||||
return None
|
||||
|
||||
domain_json = json.loads(domain_object['value'])
|
||||
|
||||
try:
|
||||
domain_address = domain_json["zeronet"][subdomain]
|
||||
except:
|
||||
#domain exists but doesn't have any zeronet value
|
||||
return None
|
||||
|
||||
return domain_address
|
||||
|
||||
# Loading config...
|
||||
|
||||
# Check whether platform is on windows or linux
|
||||
# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin
|
||||
|
||||
if sys.platform == "win32":
|
||||
namecoin_location = os.getenv('APPDATA') + "/Namecoin/"
|
||||
else:
|
||||
namecoin_location = os.path.expanduser("~/.namecoin/")
|
||||
|
||||
# Getting rpc connect details
|
||||
namecoin_conf = open(namecoin_location + "namecoin.conf").read()
|
||||
|
||||
# Connecting to RPC
|
||||
rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1)
|
||||
rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
|
||||
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
|
||||
|
||||
rpc = AuthServiceProxy(rpc_url, timeout=60*5)
|
|
@ -1,2 +1,13 @@
|
|||
gevent==1.0.1
|
||||
msgpack-python>=0.4.4
|
||||
gevent==1.4.0; python_version <= "3.6"
|
||||
greenlet==0.4.16; python_version <= "3.6"
|
||||
gevent>=20.9.0; python_version >= "3.7"
|
||||
msgpack>=0.4.4
|
||||
base58
|
||||
merkletools @ git+https://github.com/ZeroNetX/pymerkletools.git@dev
|
||||
rsa
|
||||
PySocks>=1.6.8
|
||||
pyasn1
|
||||
websocket_client
|
||||
gevent-ws
|
||||
coincurve
|
||||
maxminddb
|
||||
|
|
468
src/Config.py
468
src/Config.py
|
@ -1,16 +1,42 @@
|
|||
import argparse
|
||||
import sys
|
||||
import os
|
||||
import ConfigParser
|
||||
import locale
|
||||
import re
|
||||
import configparser
|
||||
import logging
|
||||
import logging.handlers
|
||||
import stat
|
||||
import time
|
||||
|
||||
|
||||
class Config(object):
|
||||
|
||||
def __init__(self, argv):
|
||||
self.version = "0.3.4"
|
||||
self.rev = 668
|
||||
self.version = "0.9.0"
|
||||
self.rev = 4630
|
||||
self.argv = argv
|
||||
self.action = None
|
||||
self.test_parser = None
|
||||
self.pending_changes = {}
|
||||
self.need_restart = False
|
||||
self.keys_api_change_allowed = set([
|
||||
"tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers",
|
||||
"trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline",
|
||||
"threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db"
|
||||
])
|
||||
self.keys_restart_need = set([
|
||||
"tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db"
|
||||
])
|
||||
self.start_dir = self.getStartDir()
|
||||
|
||||
self.config_file = self.start_dir + "/zeronet.conf"
|
||||
self.data_dir = self.start_dir + "/data"
|
||||
self.log_dir = self.start_dir + "/log"
|
||||
self.openssl_lib_file = None
|
||||
self.openssl_bin_file = None
|
||||
|
||||
self.trackers_file = False
|
||||
self.createParser()
|
||||
self.createArguments()
|
||||
|
||||
|
@ -27,15 +53,41 @@ class Config(object):
|
|||
def strToBool(self, v):
|
||||
return v.lower() in ("yes", "true", "t", "1")
|
||||
|
||||
def getStartDir(self):
|
||||
this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd")
|
||||
|
||||
if "--start_dir" in self.argv:
|
||||
start_dir = self.argv[self.argv.index("--start_dir") + 1]
|
||||
elif this_file.endswith("/Contents/Resources/core/src/Config.py"):
|
||||
# Running as ZeroNet.app
|
||||
if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")):
|
||||
# Runnig from non-writeable directory, put data to Application Support
|
||||
start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet")
|
||||
else:
|
||||
# Running from writeable directory put data next to .app
|
||||
start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file)
|
||||
elif this_file.endswith("/core/src/Config.py"):
|
||||
# Running as exe or source is at Application Support directory, put var files to outside of core dir
|
||||
start_dir = this_file.replace("/core/src/Config.py", "")
|
||||
elif this_file.endswith("usr/share/zeronet/src/Config.py"):
|
||||
# Running from non-writeable location, e.g., AppImage
|
||||
start_dir = os.path.expanduser("~/ZeroNet")
|
||||
else:
|
||||
start_dir = "."
|
||||
|
||||
return start_dir
|
||||
|
||||
# Create command line arguments
|
||||
def createArguments(self):
|
||||
from Crypt import CryptHash
|
||||
access_key_default = CryptHash.random(24, "base64") # Used to allow restrited plugins when multiuser plugin is enabled
|
||||
trackers = [
|
||||
"udp://tracker.coppersurfer.tk:6969",
|
||||
"udp://tracker.leechers-paradise.org:6969",
|
||||
"udp://9.rarbg.com:2710",
|
||||
"http://tracker.aletorrenty.pl:2710/announce",
|
||||
"http://tracker.skyts.net:6969/announce",
|
||||
"http://torrent.gresille.org/announce"
|
||||
"http://open.acgnxtracker.com:80/announce", # DE
|
||||
"http://tracker.bt4g.com:2095/announce", # Cloudflare
|
||||
"http://tracker.files.fm:6969/announce",
|
||||
"http://t.publictracker.xyz:6969/announce",
|
||||
"https://tracker.lilithraws.cf:443/announce",
|
||||
"https://tracker.babico.name.tr:443/announce",
|
||||
]
|
||||
# Platform specific
|
||||
if sys.platform.startswith("win"):
|
||||
|
@ -43,13 +95,43 @@ class Config(object):
|
|||
else:
|
||||
coffeescript = None
|
||||
|
||||
try:
|
||||
language, enc = locale.getdefaultlocale()
|
||||
language = language.lower().replace("_", "-")
|
||||
if language not in ["pt-br", "zh-tw"]:
|
||||
language = language.split("-")[0]
|
||||
except Exception:
|
||||
language = "en"
|
||||
|
||||
use_openssl = True
|
||||
|
||||
if repr(1483108852.565) != "1483108852.565": # Fix for weird Android issue
|
||||
fix_float_decimals = True
|
||||
else:
|
||||
fix_float_decimals = False
|
||||
|
||||
config_file = self.start_dir + "/zeronet.conf"
|
||||
data_dir = self.start_dir + "/data"
|
||||
log_dir = self.start_dir + "/log"
|
||||
|
||||
ip_local = ["127.0.0.1", "::1"]
|
||||
|
||||
# Main
|
||||
action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
|
||||
|
||||
# SiteCreate
|
||||
action = self.subparsers.add_parser("siteCreate", help='Create a new site')
|
||||
action.register('type', 'bool', self.strToBool)
|
||||
action.add_argument('--use_master_seed', help="Allow created site's private key to be recovered using the master seed in users.json (default: True)", type="bool", choices=[True, False], default=True)
|
||||
|
||||
# SiteNeedFile
|
||||
action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site')
|
||||
action.add_argument('address', help='Site address')
|
||||
action.add_argument('inner_path', help='File inner path')
|
||||
|
||||
# SiteDownload
|
||||
action = self.subparsers.add_parser("siteDownload", help='Download a new site')
|
||||
action.add_argument('address', help='Site address')
|
||||
|
||||
# SiteSign
|
||||
action = self.subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
|
||||
|
@ -57,6 +139,7 @@ class Config(object):
|
|||
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
|
||||
action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
|
||||
default="content.json", metavar="inner_path")
|
||||
action.add_argument('--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true')
|
||||
action.add_argument('--publish', help='Publish site after the signing', action='store_true')
|
||||
|
||||
# SitePublish
|
||||
|
@ -73,6 +156,12 @@ class Config(object):
|
|||
action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
|
||||
action.add_argument('address', help='Site to verify')
|
||||
|
||||
# SiteCmd
|
||||
action = self.subparsers.add_parser("siteCmd", help='Execute a ZeroFrame API command on a site')
|
||||
action.add_argument('address', help='Site address')
|
||||
action.add_argument('cmd', help='API command name')
|
||||
action.add_argument('parameters', help='Parameters of the command', nargs='?')
|
||||
|
||||
# dbRebuild
|
||||
action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
|
||||
action.add_argument('address', help='Site to rebuild')
|
||||
|
@ -107,56 +196,150 @@ class Config(object):
|
|||
action.add_argument('message', help='Message to sign')
|
||||
action.add_argument('privatekey', help='Private key')
|
||||
|
||||
# Crypt Verify
|
||||
action = self.subparsers.add_parser("cryptVerify", help='Verify message using Bitcoin public address')
|
||||
action.add_argument('message', help='Message to verify')
|
||||
action.add_argument('sign', help='Signiture for message')
|
||||
action.add_argument('address', help='Signer\'s address')
|
||||
|
||||
# Crypt GetPrivatekey
|
||||
action = self.subparsers.add_parser("cryptGetPrivatekey", help='Generate a privatekey from master seed')
|
||||
action.add_argument('master_seed', help='Source master seed')
|
||||
action.add_argument('site_address_index', help='Site address index', type=int)
|
||||
|
||||
action = self.subparsers.add_parser("getConfig", help='Return json-encoded info')
|
||||
action = self.subparsers.add_parser("testConnection", help='Testing')
|
||||
action = self.subparsers.add_parser("testAnnounce", help='Testing')
|
||||
|
||||
self.test_parser = self.subparsers.add_parser("test", help='Run a test')
|
||||
self.test_parser.add_argument('test_name', help='Test name', nargs="?")
|
||||
# self.test_parser.add_argument('--benchmark', help='Run the tests multiple times to measure the performance', action='store_true')
|
||||
|
||||
# Config parameters
|
||||
self.parser.add_argument('--verbose', help='More detailed logging', action='store_true')
|
||||
self.parser.add_argument('--debug', help='Debug mode', action='store_true')
|
||||
self.parser.add_argument('--silent', help='Only log errors to terminal output', action='store_true')
|
||||
self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
|
||||
self.parser.add_argument('--merge_media', help='Merge all.js and all.css', action='store_true')
|
||||
|
||||
self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true')
|
||||
|
||||
self.parser.add_argument('--config_file', help='Path of config file', default="zeronet.conf", metavar="path")
|
||||
self.parser.add_argument('--data_dir', help='Path of data directory', default="data", metavar="path")
|
||||
self.parser.add_argument('--log_dir', help='Path of logging directory', default="log", metavar="path")
|
||||
self.parser.add_argument('--start_dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path")
|
||||
self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path")
|
||||
self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path")
|
||||
|
||||
self.parser.add_argument('--console_log_level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"])
|
||||
|
||||
self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path")
|
||||
self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR", "off"])
|
||||
self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"])
|
||||
self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int)
|
||||
|
||||
self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language')
|
||||
self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
|
||||
self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
|
||||
self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
|
||||
self.parser.add_argument('--ui_host', help='Allow access using this hosts', metavar='host', nargs='*')
|
||||
self.parser.add_argument('--ui_trans_proxy', help='Allow access using a transparent proxy', action='store_true')
|
||||
|
||||
self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
|
||||
nargs='?', const="default_browser", metavar='browser_name')
|
||||
self.parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr',
|
||||
self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d',
|
||||
metavar='address')
|
||||
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size')
|
||||
self.parser.add_argument('--updatesite', help='Source code update site', default='1Update8crprmciJHwp2WXqkx2c4iYp18',
|
||||
metavar='address')
|
||||
self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key')
|
||||
self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source')
|
||||
|
||||
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=25, type=int, metavar='limit')
|
||||
self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit')
|
||||
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
|
||||
self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit')
|
||||
self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers')
|
||||
|
||||
self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
|
||||
self.parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port')
|
||||
self.parser.add_argument('--fileserver_port', help='FileServer bind port (0: randomize)', default=0, type=int, metavar='port')
|
||||
self.parser.add_argument('--fileserver_port_range', help='FileServer randomization range', default="10000-40000", metavar='port')
|
||||
self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"])
|
||||
self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*')
|
||||
self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*')
|
||||
self.parser.add_argument('--offline', help='Disable network communication', action='store_true')
|
||||
|
||||
self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
|
||||
self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
|
||||
self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip')
|
||||
self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip')
|
||||
self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*')
|
||||
self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', default=False, metavar='path')
|
||||
self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup',
|
||||
type='bool', choices=[True, False], default=use_openssl)
|
||||
self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*')
|
||||
self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable")
|
||||
self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True)
|
||||
self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True)
|
||||
self.parser.add_argument('--openssl_lib_file', help='Path for OpenSSL library file (default: detect)', default=argparse.SUPPRESS, metavar="path")
|
||||
self.parser.add_argument('--openssl_bin_file', help='Path for OpenSSL binary file (default: detect)', default=argparse.SUPPRESS, metavar="path")
|
||||
self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true')
|
||||
self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
|
||||
self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true')
|
||||
self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
|
||||
type='bool', choices=[True, False], default=True)
|
||||
self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true')
|
||||
self.parser.add_argument('--max_files_opened', help='Change maximum opened files allowed by OS to this value on startup',
|
||||
default=2048, type=int, metavar='limit')
|
||||
self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size')
|
||||
self.parser.add_argument('--use_tempfiles', help='Use temporary files when downloading (experimental)',
|
||||
type='bool', choices=[True, False], default=False)
|
||||
self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)',
|
||||
type='bool', choices=[True, False], default=False)
|
||||
self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power',
|
||||
type='bool', choices=[True, False], default=True)
|
||||
type='bool', choices=[True, False], default=False)
|
||||
self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification',
|
||||
type='bool', choices=[True, False], default=fix_float_decimals)
|
||||
self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed")
|
||||
|
||||
self.parser.add_argument('--threads_fs_read', help='Number of threads for file read operations', default=1, type=int)
|
||||
self.parser.add_argument('--threads_fs_write', help='Number of threads for file write operations', default=1, type=int)
|
||||
self.parser.add_argument('--threads_crypt', help='Number of threads for cryptographic operations', default=2, type=int)
|
||||
self.parser.add_argument('--threads_db', help='Number of threads for database operations', default=1, type=int)
|
||||
|
||||
self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual")
|
||||
|
||||
self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
|
||||
metavar='executable_path')
|
||||
|
||||
self.parser.add_argument('--tor', help='enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable')
|
||||
self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051')
|
||||
self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050')
|
||||
self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password')
|
||||
self.parser.add_argument('--tor_use_bridges', help='Use obfuscated bridge relays to avoid Tor block', action='store_true')
|
||||
self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services in Tor always mode', metavar='limit', type=int, default=10)
|
||||
self.parser.add_argument('--tor_hs_port', help='Hidden service port in Tor always mode', metavar='limit', type=int, default=15441)
|
||||
|
||||
self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
|
||||
self.parser.add_argument('--end', help='Stop multi value argument parsing', action='store_true')
|
||||
|
||||
return self.parser
|
||||
|
||||
def loadTrackersFile(self):
|
||||
self.trackers = []
|
||||
for tracker in open(self.trackers_file):
|
||||
if "://" in tracker:
|
||||
self.trackers.append(tracker.strip())
|
||||
if not self.trackers_file:
|
||||
self.trackers_file = ["trackers.txt", "{data_dir}/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d/trackers.txt"]
|
||||
self.trackers = self.arguments.trackers[:]
|
||||
|
||||
for trackers_file in self.trackers_file:
|
||||
try:
|
||||
if trackers_file.startswith("/"): # Absolute
|
||||
trackers_file_path = trackers_file
|
||||
elif trackers_file.startswith("{data_dir}"): # Relative to data_dir
|
||||
trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir)
|
||||
else: # Relative to zeronet.py
|
||||
trackers_file_path = self.start_dir + "/" + trackers_file
|
||||
|
||||
if not os.path.exists(trackers_file_path):
|
||||
continue
|
||||
|
||||
for line in open(trackers_file_path):
|
||||
tracker = line.strip()
|
||||
if "://" in tracker and tracker not in self.trackers:
|
||||
self.trackers.append(tracker)
|
||||
except Exception as err:
|
||||
print("Error loading trackers file: %s" % err)
|
||||
|
||||
# Find arguments specified for current action
|
||||
def getActionArguments(self):
|
||||
|
@ -168,7 +351,7 @@ class Config(object):
|
|||
|
||||
# Try to find action from argv
|
||||
def getAction(self, argv):
|
||||
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
||||
actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
||||
found_action = False
|
||||
for action in actions: # See if any in argv
|
||||
if action in argv:
|
||||
|
@ -197,8 +380,17 @@ class Config(object):
|
|||
valid_parameters.append(arg)
|
||||
return valid_parameters + plugin_parameters
|
||||
|
||||
def getParser(self, argv):
|
||||
action = self.getAction(argv)
|
||||
if not action:
|
||||
return self.parser
|
||||
else:
|
||||
return self.subparsers.choices[action]
|
||||
|
||||
# Parse arguments from config file and command line
|
||||
def parse(self, silent=False, parse_config=True):
|
||||
argv = self.argv[:] # Copy command line arguments
|
||||
current_parser = self.getParser(argv)
|
||||
if silent: # Don't display messages or quit on unknown parameter
|
||||
original_print_message = self.parser._print_message
|
||||
original_exit = self.parser.exit
|
||||
|
@ -206,27 +398,36 @@ class Config(object):
|
|||
def silencer(parser, function_name):
|
||||
parser.exited = True
|
||||
return None
|
||||
self.parser.exited = False
|
||||
self.parser._print_message = lambda *args, **kwargs: silencer(self.parser, "_print_message")
|
||||
self.parser.exit = lambda *args, **kwargs: silencer(self.parser, "exit")
|
||||
current_parser.exited = False
|
||||
current_parser._print_message = lambda *args, **kwargs: silencer(current_parser, "_print_message")
|
||||
current_parser.exit = lambda *args, **kwargs: silencer(current_parser, "exit")
|
||||
|
||||
argv = self.argv[:] # Copy command line arguments
|
||||
self.parseCommandline(argv, silent) # Parse argv
|
||||
self.setAttributes()
|
||||
if parse_config:
|
||||
argv = self.parseConfig(argv) # Add arguments from config file
|
||||
|
||||
self.parseCommandline(argv, silent) # Parse argv
|
||||
self.setAttributes()
|
||||
|
||||
if not silent:
|
||||
if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local:
|
||||
self.ip_local.append(self.fileserver_ip)
|
||||
|
||||
if silent: # Restore original functions
|
||||
if self.parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action
|
||||
if current_parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action
|
||||
self.action = None
|
||||
self.parser._print_message = original_print_message
|
||||
self.parser.exit = original_exit
|
||||
current_parser._print_message = original_print_message
|
||||
current_parser.exit = original_exit
|
||||
|
||||
self.loadTrackersFile()
|
||||
|
||||
# Parse command line arguments
|
||||
def parseCommandline(self, argv, silent=False):
|
||||
# Find out if action is specificed on start
|
||||
action = self.getAction(argv)
|
||||
if not action:
|
||||
argv.append("--end")
|
||||
argv.append("main")
|
||||
action = "main"
|
||||
argv = self.moveUnknownToEnd(argv, action)
|
||||
|
@ -242,29 +443,55 @@ class Config(object):
|
|||
# Parse config file
|
||||
def parseConfig(self, argv):
|
||||
# Find config file path from parameters
|
||||
config_file = "zeronet.conf"
|
||||
if "--config_file" in argv:
|
||||
config_file = argv[argv.index("--config_file") + 1]
|
||||
self.config_file = argv[argv.index("--config_file") + 1]
|
||||
# Load config file
|
||||
if os.path.isfile(config_file):
|
||||
config = ConfigParser.ConfigParser(allow_no_value=True)
|
||||
config.read(config_file)
|
||||
if os.path.isfile(self.config_file):
|
||||
config = configparser.RawConfigParser(allow_no_value=True, strict=False)
|
||||
config.read(self.config_file)
|
||||
for section in config.sections():
|
||||
for key, val in config.items(section):
|
||||
if val == "True":
|
||||
val = None
|
||||
if section != "global": # If not global prefix key with section
|
||||
key = section + "_" + key
|
||||
|
||||
if key == "open_browser": # Prefer config file value over cli argument
|
||||
while "--%s" % key in argv:
|
||||
pos = argv.index("--open_browser")
|
||||
del argv[pos:pos + 2]
|
||||
|
||||
argv_extend = ["--%s" % key]
|
||||
if val:
|
||||
for line in val.strip().split("\n"): # Allow multi-line values
|
||||
argv.insert(1, line)
|
||||
argv.insert(1, "--%s" % key)
|
||||
argv_extend.append(line)
|
||||
if "\n" in val:
|
||||
argv_extend.append("--end")
|
||||
|
||||
argv = argv[:1] + argv_extend + argv[1:]
|
||||
return argv
|
||||
|
||||
# Return command line value of given argument
|
||||
def getCmdlineValue(self, key):
|
||||
if key not in self.argv:
|
||||
return None
|
||||
argv_index = self.argv.index(key)
|
||||
if argv_index == len(self.argv) - 1: # last arg, test not specified
|
||||
return None
|
||||
|
||||
return self.argv[argv_index + 1]
|
||||
|
||||
# Expose arguments as class attributes
|
||||
def setAttributes(self):
|
||||
# Set attributes from arguments
|
||||
if self.arguments:
|
||||
args = vars(self.arguments)
|
||||
for key, val in args.items():
|
||||
if type(val) is list:
|
||||
val = val[:]
|
||||
if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"):
|
||||
if val:
|
||||
val = val.replace("\\", "/")
|
||||
setattr(self, key, val)
|
||||
|
||||
def loadPlugins(self):
|
||||
|
@ -273,7 +500,11 @@ class Config(object):
|
|||
@PluginManager.acceptPlugins
|
||||
class ConfigPlugin(object):
|
||||
def __init__(self, config):
|
||||
self.argv = config.argv
|
||||
self.parser = config.parser
|
||||
self.subparsers = config.subparsers
|
||||
self.test_parser = config.test_parser
|
||||
self.getCmdlineValue = config.getCmdlineValue
|
||||
self.createArguments()
|
||||
|
||||
def createArguments(self):
|
||||
|
@ -281,5 +512,164 @@ class Config(object):
|
|||
|
||||
ConfigPlugin(self)
|
||||
|
||||
def saveValue(self, key, value):
|
||||
if not os.path.isfile(self.config_file):
|
||||
content = ""
|
||||
else:
|
||||
content = open(self.config_file).read()
|
||||
lines = content.splitlines()
|
||||
|
||||
global_line_i = None
|
||||
key_line_i = None
|
||||
i = 0
|
||||
for line in lines:
|
||||
if line.strip() == "[global]":
|
||||
global_line_i = i
|
||||
if line.startswith(key + " =") or line == key:
|
||||
key_line_i = i
|
||||
i += 1
|
||||
|
||||
if key_line_i and len(lines) > key_line_i + 1:
|
||||
while True: # Delete previous multiline values
|
||||
is_value_line = lines[key_line_i + 1].startswith(" ") or lines[key_line_i + 1].startswith("\t")
|
||||
if not is_value_line:
|
||||
break
|
||||
del lines[key_line_i + 1]
|
||||
|
||||
if value is None: # Delete line
|
||||
if key_line_i:
|
||||
del lines[key_line_i]
|
||||
|
||||
else: # Add / update
|
||||
if type(value) is list:
|
||||
value_lines = [""] + [str(line).replace("\n", "").replace("\r", "") for line in value]
|
||||
else:
|
||||
value_lines = [str(value).replace("\n", "").replace("\r", "")]
|
||||
new_line = "%s = %s" % (key, "\n ".join(value_lines))
|
||||
if key_line_i: # Already in the config, change the line
|
||||
lines[key_line_i] = new_line
|
||||
elif global_line_i is None: # No global section yet, append to end of file
|
||||
lines.append("[global]")
|
||||
lines.append(new_line)
|
||||
else: # Has global section, append the line after it
|
||||
lines.insert(global_line_i + 1, new_line)
|
||||
|
||||
open(self.config_file, "w").write("\n".join(lines))
|
||||
|
||||
def getServerInfo(self):
|
||||
from Plugin import PluginManager
|
||||
import main
|
||||
|
||||
info = {
|
||||
"platform": sys.platform,
|
||||
"fileserver_ip": self.fileserver_ip,
|
||||
"fileserver_port": self.fileserver_port,
|
||||
"ui_ip": self.ui_ip,
|
||||
"ui_port": self.ui_port,
|
||||
"version": self.version,
|
||||
"rev": self.rev,
|
||||
"language": self.language,
|
||||
"debug": self.debug,
|
||||
"plugins": PluginManager.plugin_manager.plugin_names,
|
||||
|
||||
"log_dir": os.path.abspath(self.log_dir),
|
||||
"data_dir": os.path.abspath(self.data_dir),
|
||||
"src_dir": os.path.dirname(os.path.abspath(__file__))
|
||||
}
|
||||
|
||||
try:
|
||||
info["ip_external"] = main.file_server.port_opened
|
||||
info["tor_enabled"] = main.file_server.tor_manager.enabled
|
||||
info["tor_status"] = main.file_server.tor_manager.status
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return info
|
||||
|
||||
def initConsoleLogger(self):
|
||||
if self.action == "main":
|
||||
format = '[%(asctime)s] %(name)s %(message)s'
|
||||
else:
|
||||
format = '%(name)s %(message)s'
|
||||
|
||||
if self.console_log_level == "default":
|
||||
if self.silent:
|
||||
level = logging.ERROR
|
||||
elif self.debug:
|
||||
level = logging.DEBUG
|
||||
else:
|
||||
level = logging.INFO
|
||||
else:
|
||||
level = logging.getLevelName(self.console_log_level)
|
||||
|
||||
console_logger = logging.StreamHandler()
|
||||
console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S"))
|
||||
console_logger.setLevel(level)
|
||||
logging.getLogger('').addHandler(console_logger)
|
||||
|
||||
def initFileLogger(self):
|
||||
if self.action == "main":
|
||||
log_file_path = "%s/debug.log" % self.log_dir
|
||||
else:
|
||||
log_file_path = "%s/cmd.log" % self.log_dir
|
||||
|
||||
if self.log_rotate == "off":
|
||||
file_logger = logging.FileHandler(log_file_path, "w", "utf-8")
|
||||
else:
|
||||
when_names = {"weekly": "w", "daily": "d", "hourly": "h"}
|
||||
file_logger = logging.handlers.TimedRotatingFileHandler(
|
||||
log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count,
|
||||
encoding="utf8"
|
||||
)
|
||||
|
||||
if os.path.isfile(log_file_path):
|
||||
file_logger.doRollover() # Always start with empty log file
|
||||
file_logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s'))
|
||||
file_logger.setLevel(logging.getLevelName(self.log_level))
|
||||
logging.getLogger('').setLevel(logging.getLevelName(self.log_level))
|
||||
logging.getLogger('').addHandler(file_logger)
|
||||
|
||||
def initLogging(self, console_logging=None, file_logging=None):
|
||||
if console_logging == None:
|
||||
console_logging = self.console_log_level != "off"
|
||||
|
||||
if file_logging == None:
|
||||
file_logging = self.log_level != "off"
|
||||
|
||||
# Create necessary files and dirs
|
||||
if not os.path.isdir(self.log_dir):
|
||||
os.mkdir(self.log_dir)
|
||||
try:
|
||||
os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
except Exception as err:
|
||||
print("Can't change permission of %s: %s" % (self.log_dir, err))
|
||||
|
||||
# Make warning hidden from console
|
||||
logging.WARNING = 15 # Don't display warnings if not in debug mode
|
||||
logging.addLevelName(15, "WARNING")
|
||||
|
||||
logging.getLogger('').name = "-" # Remove root prefix
|
||||
|
||||
self.error_logger = ErrorLogHandler()
|
||||
self.error_logger.setLevel(logging.getLevelName("ERROR"))
|
||||
logging.getLogger('').addHandler(self.error_logger)
|
||||
|
||||
if console_logging:
|
||||
self.initConsoleLogger()
|
||||
if file_logging:
|
||||
self.initFileLogger()
|
||||
|
||||
|
||||
class ErrorLogHandler(logging.StreamHandler):
|
||||
def __init__(self):
|
||||
self.lines = []
|
||||
return super().__init__()
|
||||
|
||||
def emit(self, record):
|
||||
self.lines.append([time.time(), record.levelname, self.format(record)])
|
||||
|
||||
def onNewRecord(self, record):
|
||||
pass
|
||||
|
||||
|
||||
config = Config(sys.argv)
|
||||
|
|
|
@ -2,33 +2,49 @@ import socket
|
|||
import time
|
||||
|
||||
import gevent
|
||||
import msgpack
|
||||
try:
|
||||
from gevent.coros import RLock
|
||||
except:
|
||||
from gevent.lock import RLock
|
||||
|
||||
from Config import config
|
||||
from Debug import Debug
|
||||
from util import StreamingMsgpack
|
||||
from util import Msgpack
|
||||
from Crypt import CryptConnection
|
||||
from util import helper
|
||||
|
||||
|
||||
class Connection(object):
|
||||
__slots__ = (
|
||||
"sock", "sock_wrapped", "ip", "port", "id", "protocol", "type", "server", "unpacker", "req_id",
|
||||
"handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time",
|
||||
"last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent",
|
||||
"last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests", "waiting_streams"
|
||||
"sock", "sock_wrapped", "ip", "port", "cert_pin", "target_onion", "id", "protocol", "type", "server", "unpacker", "unpacker_bytes", "req_id", "ip_type",
|
||||
"handshake", "crypt", "connected", "event_connected", "closed", "start_time", "handshake_time", "last_recv_time", "is_private_ip", "is_tracker_connection",
|
||||
"last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "cpu_time", "send_lock",
|
||||
"last_ping_delay", "last_req_time", "last_cmd_sent", "last_cmd_recv", "bad_actions", "sites", "name", "waiting_requests", "waiting_streams"
|
||||
)
|
||||
|
||||
def __init__(self, server, ip, port, sock=None):
|
||||
def __init__(self, server, ip, port, sock=None, target_onion=None, is_tracker_connection=False):
|
||||
self.sock = sock
|
||||
self.ip = ip
|
||||
self.port = port
|
||||
self.cert_pin = None
|
||||
if "#" in ip:
|
||||
ip, self.cert_pin = ip.split("#")
|
||||
self.target_onion = target_onion # Requested onion adress
|
||||
self.id = server.last_connection_id
|
||||
server.last_connection_id += 1
|
||||
self.protocol = "?"
|
||||
self.type = "?"
|
||||
self.ip_type = "?"
|
||||
self.port = int(port)
|
||||
self.setIp(ip)
|
||||
|
||||
if helper.isPrivateIp(self.ip) and self.ip not in config.ip_local:
|
||||
self.is_private_ip = True
|
||||
else:
|
||||
self.is_private_ip = False
|
||||
self.is_tracker_connection = is_tracker_connection
|
||||
|
||||
self.server = server
|
||||
self.unpacker = None # Stream incoming socket messages here
|
||||
self.unpacker_bytes = 0 # How many bytes the unpacker received
|
||||
self.req_id = 0 # Last request id
|
||||
self.handshake = {} # Handshake info got from peer
|
||||
self.crypt = None # Connection encryption method
|
||||
|
@ -40,6 +56,7 @@ class Connection(object):
|
|||
|
||||
# Stats
|
||||
self.start_time = time.time()
|
||||
self.handshake_time = 0
|
||||
self.last_recv_time = 0
|
||||
self.last_message_time = 0
|
||||
self.last_send_time = 0
|
||||
|
@ -49,7 +66,12 @@ class Connection(object):
|
|||
self.bytes_sent = 0
|
||||
self.last_ping_delay = None
|
||||
self.last_req_time = 0
|
||||
self.last_cmd = None
|
||||
self.last_cmd_sent = None
|
||||
self.last_cmd_recv = None
|
||||
self.bad_actions = 0
|
||||
self.sites = 0
|
||||
self.cpu_time = 0.0
|
||||
self.send_lock = RLock()
|
||||
|
||||
self.name = None
|
||||
self.updateName()
|
||||
|
@ -57,6 +79,18 @@ class Connection(object):
|
|||
self.waiting_requests = {} # Waiting sent requests
|
||||
self.waiting_streams = {} # Waiting response file streams
|
||||
|
||||
def setIp(self, ip):
|
||||
self.ip = ip
|
||||
self.ip_type = helper.getIpType(ip)
|
||||
self.updateName()
|
||||
|
||||
def createSocket(self):
|
||||
if helper.getIpType(self.ip) == "ipv6" and not hasattr(socket, "socket_noproxy"):
|
||||
# Create IPv6 connection as IPv4 when using proxy
|
||||
return socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
|
||||
else:
|
||||
return socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
|
||||
def updateName(self):
|
||||
self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol)
|
||||
|
||||
|
@ -69,39 +103,108 @@ class Connection(object):
|
|||
def log(self, text):
|
||||
self.server.log.debug("%s > %s" % (self.name, text))
|
||||
|
||||
def getValidSites(self):
|
||||
return [key for key, val in self.server.tor_manager.site_onions.items() if val == self.target_onion]
|
||||
|
||||
def badAction(self, weight=1):
|
||||
self.bad_actions += weight
|
||||
if self.bad_actions > 40:
|
||||
self.close("Too many bad actions")
|
||||
elif self.bad_actions > 20:
|
||||
time.sleep(5)
|
||||
|
||||
def goodAction(self):
|
||||
self.bad_actions = 0
|
||||
|
||||
# Open connection to peer and wait for handshake
|
||||
def connect(self):
|
||||
self.log("Connecting...")
|
||||
self.type = "out"
|
||||
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.sock.connect((self.ip, int(self.port)))
|
||||
if self.ip_type == "onion":
|
||||
if not self.server.tor_manager or not self.server.tor_manager.enabled:
|
||||
raise Exception("Can't connect to onion addresses, no Tor controller present")
|
||||
self.sock = self.server.tor_manager.createSocket(self.ip, self.port)
|
||||
elif config.tor == "always" and helper.isPrivateIp(self.ip) and self.ip not in config.ip_local:
|
||||
raise Exception("Can't connect to local IPs in Tor: always mode")
|
||||
elif config.trackers_proxy != "disable" and config.tor != "always" and self.is_tracker_connection:
|
||||
if config.trackers_proxy == "tor":
|
||||
self.sock = self.server.tor_manager.createSocket(self.ip, self.port)
|
||||
else:
|
||||
import socks
|
||||
self.sock = socks.socksocket()
|
||||
proxy_ip, proxy_port = config.trackers_proxy.split(":")
|
||||
self.sock.set_proxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
|
||||
else:
|
||||
self.sock = self.createSocket()
|
||||
|
||||
# Implicit SSL in the future
|
||||
# self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
|
||||
# self.sock.do_handshake()
|
||||
# self.crypt = "tls-rsa"
|
||||
# self.sock_wrapped = True
|
||||
if "TCP_NODELAY" in dir(socket):
|
||||
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
|
||||
timeout_before = self.sock.gettimeout()
|
||||
self.sock.settimeout(30)
|
||||
if self.ip_type == "ipv6" and not hasattr(self.sock, "proxy"):
|
||||
sock_address = (self.ip, self.port, 1, 1)
|
||||
else:
|
||||
sock_address = (self.ip, self.port)
|
||||
|
||||
self.sock.connect(sock_address)
|
||||
|
||||
# Implicit SSL
|
||||
should_encrypt = not self.ip_type == "onion" and self.ip not in self.server.broken_ssl_ips and self.ip not in config.ip_local
|
||||
if self.cert_pin:
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", cert_pin=self.cert_pin)
|
||||
self.sock.do_handshake()
|
||||
self.crypt = "tls-rsa"
|
||||
self.sock_wrapped = True
|
||||
elif should_encrypt and "tls-rsa" in CryptConnection.manager.crypt_supported:
|
||||
try:
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
|
||||
self.sock.do_handshake()
|
||||
self.crypt = "tls-rsa"
|
||||
self.sock_wrapped = True
|
||||
except Exception as err:
|
||||
if not config.force_encryption:
|
||||
self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err)))
|
||||
self.server.broken_ssl_ips[self.ip] = True
|
||||
self.sock.close()
|
||||
self.crypt = None
|
||||
self.sock = self.createSocket()
|
||||
self.sock.settimeout(30)
|
||||
self.sock.connect(sock_address)
|
||||
|
||||
# Detect protocol
|
||||
self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()})
|
||||
self.send({"cmd": "handshake", "req_id": 0, "params": self.getHandshakeInfo()})
|
||||
event_connected = self.event_connected
|
||||
gevent.spawn(self.messageLoop)
|
||||
return event_connected.get() # Wait for handshake
|
||||
connect_res = event_connected.get() # Wait for handshake
|
||||
self.sock.settimeout(timeout_before)
|
||||
return connect_res
|
||||
|
||||
# Handle incoming connection
|
||||
def handleIncomingConnection(self, sock):
|
||||
self.log("Incoming connection...")
|
||||
|
||||
if "TCP_NODELAY" in dir(socket):
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
|
||||
self.type = "in"
|
||||
try:
|
||||
if sock.recv(1, gevent.socket.MSG_PEEK) == "\x16":
|
||||
self.log("Crypt in connection using implicit SSL")
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True)
|
||||
self.sock_wrapped = True
|
||||
self.crypt = "tls-rsa"
|
||||
except Exception, err:
|
||||
self.log("Socket peek error: %s" % Debug.formatException(err))
|
||||
if self.ip not in config.ip_local: # Clearnet: Check implicit SSL
|
||||
try:
|
||||
first_byte = sock.recv(1, gevent.socket.MSG_PEEK)
|
||||
if first_byte == b"\x16":
|
||||
self.log("Crypt in connection using implicit SSL")
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True)
|
||||
self.sock_wrapped = True
|
||||
self.crypt = "tls-rsa"
|
||||
except Exception as err:
|
||||
self.log("Socket peek error: %s" % Debug.formatException(err))
|
||||
self.messageLoop()
|
||||
|
||||
def getMsgpackUnpacker(self):
|
||||
if self.handshake and self.handshake.get("use_bin_type"):
|
||||
return Msgpack.getUnpacker(fallback=True, decode=False)
|
||||
else: # Backward compatibility for <0.7.0
|
||||
return Msgpack.getUnpacker(fallback=True, decode=True)
|
||||
|
||||
# Message loop for connection
|
||||
def messageLoop(self):
|
||||
if not self.sock:
|
||||
|
@ -110,78 +213,243 @@ class Connection(object):
|
|||
self.protocol = "v2"
|
||||
self.updateName()
|
||||
self.connected = True
|
||||
buff_len = 0
|
||||
req_len = 0
|
||||
self.unpacker_bytes = 0
|
||||
|
||||
self.unpacker = msgpack.Unpacker()
|
||||
try:
|
||||
while True:
|
||||
buff = self.sock.recv(16 * 1024)
|
||||
while not self.closed:
|
||||
buff = self.sock.recv(64 * 1024)
|
||||
if not buff:
|
||||
break # Connection closed
|
||||
buff_len = len(buff)
|
||||
|
||||
# Statistics
|
||||
self.last_recv_time = time.time()
|
||||
self.incomplete_buff_recv += 1
|
||||
self.bytes_recv += len(buff)
|
||||
self.server.bytes_recv += len(buff)
|
||||
self.bytes_recv += buff_len
|
||||
self.server.bytes_recv += buff_len
|
||||
req_len += buff_len
|
||||
|
||||
if not self.unpacker:
|
||||
self.unpacker = msgpack.Unpacker()
|
||||
self.unpacker = self.getMsgpackUnpacker()
|
||||
self.unpacker_bytes = 0
|
||||
|
||||
self.unpacker.feed(buff)
|
||||
buff = None
|
||||
for message in self.unpacker:
|
||||
self.unpacker_bytes += buff_len
|
||||
|
||||
while True:
|
||||
try:
|
||||
message = next(self.unpacker)
|
||||
except StopIteration:
|
||||
break
|
||||
if not type(message) is dict:
|
||||
if config.debug_socket:
|
||||
self.log("Invalid message type: %s, content: %r, buffer: %r" % (type(message), message, buff[0:16]))
|
||||
raise Exception("Invalid message type: %s" % type(message))
|
||||
|
||||
# Stats
|
||||
self.incomplete_buff_recv = 0
|
||||
stat_key = message.get("cmd", "unknown")
|
||||
if stat_key == "response" and "to" in message:
|
||||
cmd_sent = self.waiting_requests.get(message["to"], {"cmd": "unknown"})["cmd"]
|
||||
stat_key = "response: %s" % cmd_sent
|
||||
if stat_key == "update":
|
||||
stat_key = "update: %s" % message["params"]["site"]
|
||||
self.server.stat_recv[stat_key]["bytes"] += req_len
|
||||
self.server.stat_recv[stat_key]["num"] += 1
|
||||
if "stream_bytes" in message:
|
||||
self.handleStream(message)
|
||||
self.server.stat_recv[stat_key]["bytes"] += message["stream_bytes"]
|
||||
req_len = 0
|
||||
|
||||
# Handle message
|
||||
if "stream_bytes" in message:
|
||||
buff_left = self.handleStream(message, buff)
|
||||
self.unpacker = self.getMsgpackUnpacker()
|
||||
self.unpacker.feed(buff_left)
|
||||
self.unpacker_bytes = len(buff_left)
|
||||
if config.debug_socket:
|
||||
self.log("Start new unpacker with buff_left: %r" % buff_left)
|
||||
else:
|
||||
self.handleMessage(message)
|
||||
|
||||
message = None
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if not self.closed:
|
||||
self.log("Socket error: %s" % Debug.formatException(err))
|
||||
self.close() # MessageLoop ended, close connection
|
||||
self.server.stat_recv["error: %s" % err]["bytes"] += req_len
|
||||
self.server.stat_recv["error: %s" % err]["num"] += 1
|
||||
self.close("MessageLoop ended (closed: %s)" % self.closed) # MessageLoop ended, close connection
|
||||
|
||||
def getUnpackerUnprocessedBytesNum(self):
|
||||
if "tell" in dir(self.unpacker):
|
||||
bytes_num = self.unpacker_bytes - self.unpacker.tell()
|
||||
else:
|
||||
bytes_num = self.unpacker._fb_buf_n - self.unpacker._fb_buf_o
|
||||
return bytes_num
|
||||
|
||||
# Stream socket directly to a file
|
||||
def handleStream(self, message, buff):
|
||||
stream_bytes_left = message["stream_bytes"]
|
||||
file = self.waiting_streams[message["to"]]
|
||||
|
||||
unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum()
|
||||
|
||||
if unprocessed_bytes_num: # Found stream bytes in unpacker
|
||||
unpacker_stream_bytes = min(unprocessed_bytes_num, stream_bytes_left)
|
||||
buff_stream_start = len(buff) - unprocessed_bytes_num
|
||||
file.write(buff[buff_stream_start:buff_stream_start + unpacker_stream_bytes])
|
||||
stream_bytes_left -= unpacker_stream_bytes
|
||||
else:
|
||||
unpacker_stream_bytes = 0
|
||||
|
||||
if config.debug_socket:
|
||||
self.log(
|
||||
"Starting stream %s: %s bytes (%s from unpacker, buff size: %s, unprocessed: %s)" %
|
||||
(message["to"], message["stream_bytes"], unpacker_stream_bytes, len(buff), unprocessed_bytes_num)
|
||||
)
|
||||
|
||||
try:
|
||||
while 1:
|
||||
if stream_bytes_left <= 0:
|
||||
break
|
||||
stream_buff = self.sock.recv(min(64 * 1024, stream_bytes_left))
|
||||
if not stream_buff:
|
||||
break
|
||||
buff_len = len(stream_buff)
|
||||
stream_bytes_left -= buff_len
|
||||
file.write(stream_buff)
|
||||
|
||||
# Statistics
|
||||
self.last_recv_time = time.time()
|
||||
self.incomplete_buff_recv += 1
|
||||
self.bytes_recv += buff_len
|
||||
self.server.bytes_recv += buff_len
|
||||
except Exception as err:
|
||||
self.log("Stream read error: %s" % Debug.formatException(err))
|
||||
|
||||
if config.debug_socket:
|
||||
self.log("End stream %s, file pos: %s" % (message["to"], file.tell()))
|
||||
|
||||
self.incomplete_buff_recv = 0
|
||||
self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event
|
||||
del self.waiting_streams[message["to"]]
|
||||
del self.waiting_requests[message["to"]]
|
||||
|
||||
if unpacker_stream_bytes:
|
||||
return buff[buff_stream_start + unpacker_stream_bytes:]
|
||||
else:
|
||||
return b""
|
||||
|
||||
# My handshake info
|
||||
def handshakeInfo(self):
|
||||
return {
|
||||
def getHandshakeInfo(self):
|
||||
# No TLS for onion connections
|
||||
if self.ip_type == "onion":
|
||||
crypt_supported = []
|
||||
elif self.ip in self.server.broken_ssl_ips:
|
||||
crypt_supported = []
|
||||
else:
|
||||
crypt_supported = CryptConnection.manager.crypt_supported
|
||||
# No peer id for onion connections
|
||||
if self.ip_type == "onion" or self.ip in config.ip_local:
|
||||
peer_id = ""
|
||||
else:
|
||||
peer_id = self.server.peer_id
|
||||
# Setup peer lock from requested onion address
|
||||
if self.handshake and self.handshake.get("target_ip", "").endswith(".onion") and self.server.tor_manager.start_onions:
|
||||
self.target_onion = self.handshake.get("target_ip").replace(".onion", "") # My onion address
|
||||
if not self.server.tor_manager.site_onions.values():
|
||||
self.server.log.warning("Unknown target onion address: %s" % self.target_onion)
|
||||
|
||||
handshake = {
|
||||
"version": config.version,
|
||||
"protocol": "v2",
|
||||
"peer_id": self.server.peer_id,
|
||||
"use_bin_type": True,
|
||||
"peer_id": peer_id,
|
||||
"fileserver_port": self.server.port,
|
||||
"port_opened": self.server.port_opened,
|
||||
"port_opened": self.server.port_opened.get(self.ip_type, None),
|
||||
"target_ip": self.ip,
|
||||
"rev": config.rev,
|
||||
"crypt_supported": CryptConnection.manager.crypt_supported,
|
||||
"crypt": self.crypt
|
||||
"crypt_supported": crypt_supported,
|
||||
"crypt": self.crypt,
|
||||
"time": int(time.time())
|
||||
}
|
||||
if self.target_onion:
|
||||
handshake["onion"] = self.target_onion
|
||||
elif self.ip_type == "onion":
|
||||
handshake["onion"] = self.server.tor_manager.getOnion("global")
|
||||
|
||||
if self.is_tracker_connection:
|
||||
handshake["tracker_connection"] = True
|
||||
|
||||
if config.debug_socket:
|
||||
self.log("My Handshake: %s" % handshake)
|
||||
|
||||
return handshake
|
||||
|
||||
def setHandshake(self, handshake):
|
||||
if config.debug_socket:
|
||||
self.log("Remote Handshake: %s" % handshake)
|
||||
|
||||
if handshake.get("peer_id") == self.server.peer_id and not handshake.get("tracker_connection") and not self.is_tracker_connection:
|
||||
self.close("Same peer id, can't connect to myself")
|
||||
self.server.peer_blacklist.append((handshake["target_ip"], handshake["fileserver_port"]))
|
||||
return False
|
||||
|
||||
self.handshake = handshake
|
||||
if handshake.get("port_opened", None) is False: # Not connectable
|
||||
if handshake.get("port_opened", None) is False and "onion" not in handshake and not self.is_private_ip: # Not connectable
|
||||
self.port = 0
|
||||
else:
|
||||
self.port = handshake["fileserver_port"] # Set peer fileserver port
|
||||
self.port = int(handshake["fileserver_port"]) # Set peer fileserver port
|
||||
|
||||
if handshake.get("use_bin_type") and self.unpacker:
|
||||
unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum()
|
||||
self.log("Changing unpacker to bin type (unprocessed bytes: %s)" % unprocessed_bytes_num)
|
||||
unprocessed_bytes = self.unpacker.read_bytes(unprocessed_bytes_num)
|
||||
self.unpacker = self.getMsgpackUnpacker() # Create new unpacker for different msgpack type
|
||||
self.unpacker_bytes = 0
|
||||
if unprocessed_bytes:
|
||||
self.unpacker.feed(unprocessed_bytes)
|
||||
|
||||
# Check if we can encrypt the connection
|
||||
if handshake.get("crypt_supported") and handshake["peer_id"] not in self.server.broken_ssl_peer_ids:
|
||||
if handshake.get("crypt"): # Recommended crypt by server
|
||||
if handshake.get("crypt_supported") and self.ip not in self.server.broken_ssl_ips:
|
||||
if type(handshake["crypt_supported"][0]) is bytes:
|
||||
handshake["crypt_supported"] = [item.decode() for item in handshake["crypt_supported"]] # Backward compatibility
|
||||
|
||||
if self.ip_type == "onion" or self.ip in config.ip_local:
|
||||
crypt = None
|
||||
elif handshake.get("crypt"): # Recommended crypt by server
|
||||
crypt = handshake["crypt"]
|
||||
else: # Select the best supported on both sides
|
||||
crypt = CryptConnection.manager.selectCrypt(handshake["crypt_supported"])
|
||||
|
||||
if crypt:
|
||||
self.crypt = crypt
|
||||
|
||||
if self.type == "in" and handshake.get("onion") and not self.ip_type == "onion": # Set incoming connection's onion address
|
||||
if self.server.ips.get(self.ip) == self:
|
||||
del self.server.ips[self.ip]
|
||||
self.setIp(handshake["onion"] + ".onion")
|
||||
self.log("Changing ip to %s" % self.ip)
|
||||
self.server.ips[self.ip] = self
|
||||
self.updateName()
|
||||
|
||||
self.event_connected.set(True) # Mark handshake as done
|
||||
self.event_connected = None
|
||||
self.handshake_time = time.time()
|
||||
|
||||
# Handle incoming message
|
||||
def handleMessage(self, message):
|
||||
cmd = message["cmd"]
|
||||
|
||||
self.last_message_time = time.time()
|
||||
if message.get("cmd") == "response": # New style response
|
||||
self.last_cmd_recv = cmd
|
||||
if cmd == "response": # New style response
|
||||
if message["to"] in self.waiting_requests:
|
||||
if self.last_send_time:
|
||||
if self.last_send_time and len(self.waiting_requests) == 1:
|
||||
ping = time.time() - self.last_send_time
|
||||
self.last_ping_delay = ping
|
||||
self.waiting_requests[message["to"]].set(message) # Set the response to event
|
||||
self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event
|
||||
del self.waiting_requests[message["to"]]
|
||||
elif message["to"] == 0: # Other peers handshake
|
||||
ping = time.time() - self.start_time
|
||||
|
@ -192,140 +460,128 @@ class Connection(object):
|
|||
if message.get("crypt") and not self.sock_wrapped:
|
||||
self.crypt = message["crypt"]
|
||||
server = (self.type == "in")
|
||||
self.log("Crypt out connection using: %s (server side: %s)..." % (self.crypt, server))
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
|
||||
self.log("Crypt out connection using: %s (server side: %s, ping: %.3fs)..." % (self.crypt, server, ping))
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
|
||||
self.sock.do_handshake()
|
||||
self.sock_wrapped = True
|
||||
|
||||
if not self.sock_wrapped and self.cert_pin:
|
||||
self.close("Crypt connection error: Socket not encrypted, but certificate pin present")
|
||||
return
|
||||
|
||||
self.setHandshake(message)
|
||||
else:
|
||||
self.log("Unknown response: %s" % message)
|
||||
elif message.get("cmd"): # Handhsake request
|
||||
if message["cmd"] == "handshake":
|
||||
if config.debug_socket:
|
||||
self.log("Handshake request: %s" % message)
|
||||
self.setHandshake(message["params"])
|
||||
data = self.handshakeInfo()
|
||||
data["cmd"] = "response"
|
||||
data["to"] = message["req_id"]
|
||||
self.send(data) # Send response to handshake
|
||||
# Sent crypt request to client
|
||||
if self.crypt and not self.sock_wrapped:
|
||||
server = (self.type == "in")
|
||||
self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server))
|
||||
try:
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
|
||||
self.sock_wrapped = True
|
||||
except Exception, err:
|
||||
self.log("Crypt connection error: %s, adding peerid %s as broken ssl." % (err, message["params"]["peer_id"]))
|
||||
self.server.broken_ssl_peer_ids[message["params"]["peer_id"]] = True
|
||||
elif cmd:
|
||||
self.server.num_recv += 1
|
||||
if cmd == "handshake":
|
||||
self.handleHandshake(message)
|
||||
else:
|
||||
self.server.handleRequest(self, message)
|
||||
else: # Old style response, no req_id definied
|
||||
if config.debug_socket:
|
||||
self.log("Old style response, waiting: %s" % self.waiting_requests.keys())
|
||||
last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true
|
||||
self.waiting_requests[last_req_id].set(message)
|
||||
del self.waiting_requests[last_req_id] # Remove from waiting request
|
||||
|
||||
# Stream socket directly to a file
|
||||
def handleStream(self, message):
|
||||
if config.debug_socket:
|
||||
self.log("Starting stream %s: %s bytes" % (message["to"], message["stream_bytes"]))
|
||||
# Incoming handshake set request
|
||||
def handleHandshake(self, message):
|
||||
self.setHandshake(message["params"])
|
||||
data = self.getHandshakeInfo()
|
||||
data["cmd"] = "response"
|
||||
data["to"] = message["req_id"]
|
||||
self.send(data) # Send response to handshake
|
||||
# Sent crypt request to client
|
||||
if self.crypt and not self.sock_wrapped:
|
||||
server = (self.type == "in")
|
||||
self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server))
|
||||
try:
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
|
||||
self.sock_wrapped = True
|
||||
except Exception as err:
|
||||
if not config.force_encryption:
|
||||
self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err)))
|
||||
self.server.broken_ssl_ips[self.ip] = True
|
||||
self.close("Broken ssl")
|
||||
|
||||
read_bytes = message["stream_bytes"] # Bytes left we have to read from socket
|
||||
try:
|
||||
buff = self.unpacker.read_bytes(min(16 * 1024, read_bytes)) # Check if the unpacker has something left in buffer
|
||||
except Exception, err:
|
||||
buff = ""
|
||||
file = self.waiting_streams[message["to"]]
|
||||
if buff:
|
||||
read_bytes -= len(buff)
|
||||
file.write(buff)
|
||||
|
||||
try:
|
||||
while 1:
|
||||
if read_bytes <= 0:
|
||||
break
|
||||
buff = self.sock.recv(16 * 1024)
|
||||
if not buff:
|
||||
break
|
||||
buff_len = len(buff)
|
||||
read_bytes -= buff_len
|
||||
file.write(buff)
|
||||
|
||||
# Statistics
|
||||
self.last_recv_time = time.time()
|
||||
self.incomplete_buff_recv += 1
|
||||
self.bytes_recv += buff_len
|
||||
self.server.bytes_recv += buff_len
|
||||
except Exception, err:
|
||||
self.log("Stream read error: %s" % Debug.formatException(err))
|
||||
|
||||
if config.debug_socket:
|
||||
self.log("End stream %s" % message["to"])
|
||||
|
||||
self.incomplete_buff_recv = 0
|
||||
self.waiting_requests[message["to"]].set(message) # Set the response to event
|
||||
del self.waiting_streams[message["to"]]
|
||||
del self.waiting_requests[message["to"]]
|
||||
if not self.sock_wrapped and self.cert_pin:
|
||||
self.close("Crypt connection error: Socket not encrypted, but certificate pin present")
|
||||
|
||||
# Send data to connection
|
||||
def send(self, message, streaming=False):
|
||||
self.last_send_time = time.time()
|
||||
if config.debug_socket:
|
||||
self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (
|
||||
message.get("cmd"), message.get("to"), streaming,
|
||||
message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"),
|
||||
message.get("req_id"))
|
||||
)
|
||||
self.last_send_time = time.time()
|
||||
|
||||
if not self.sock:
|
||||
self.log("Send error: missing socket")
|
||||
return False
|
||||
|
||||
if not self.connected and message.get("cmd") != "handshake":
|
||||
self.log("Wait for handshake before send request")
|
||||
self.event_connected.get()
|
||||
|
||||
try:
|
||||
stat_key = message.get("cmd", "unknown")
|
||||
if stat_key == "response":
|
||||
stat_key = "response: %s" % self.last_cmd_recv
|
||||
else:
|
||||
self.server.num_sent += 1
|
||||
|
||||
self.server.stat_sent[stat_key]["num"] += 1
|
||||
if streaming:
|
||||
bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall)
|
||||
message = None
|
||||
with self.send_lock:
|
||||
bytes_sent = Msgpack.stream(message, self.sock.sendall)
|
||||
self.bytes_sent += bytes_sent
|
||||
self.server.bytes_sent += bytes_sent
|
||||
else:
|
||||
data = msgpack.packb(message)
|
||||
self.server.stat_sent[stat_key]["bytes"] += bytes_sent
|
||||
message = None
|
||||
else:
|
||||
data = Msgpack.pack(message)
|
||||
self.bytes_sent += len(data)
|
||||
self.server.bytes_sent += len(data)
|
||||
self.sock.sendall(data)
|
||||
except Exception, err:
|
||||
self.log("Send errror: %s" % Debug.formatException(err))
|
||||
self.close()
|
||||
self.server.stat_sent[stat_key]["bytes"] += len(data)
|
||||
message = None
|
||||
with self.send_lock:
|
||||
self.sock.sendall(data)
|
||||
except Exception as err:
|
||||
self.close("Send error: %s (cmd: %s)" % (err, stat_key))
|
||||
return False
|
||||
self.last_sent_time = time.time()
|
||||
return True
|
||||
|
||||
# Stream raw file to connection
|
||||
# Stream file to connection without msgpacking
|
||||
def sendRawfile(self, file, read_bytes):
|
||||
buff = 64 * 1024
|
||||
bytes_left = read_bytes
|
||||
bytes_sent = 0
|
||||
while True:
|
||||
self.last_send_time = time.time()
|
||||
self.sock.sendall(
|
||||
file.read(min(bytes_left, buff))
|
||||
)
|
||||
data = file.read(min(bytes_left, buff))
|
||||
bytes_sent += len(data)
|
||||
with self.send_lock:
|
||||
self.sock.sendall(data)
|
||||
bytes_left -= buff
|
||||
if bytes_left <= 0:
|
||||
break
|
||||
self.bytes_sent += read_bytes
|
||||
self.server.bytes_sent += read_bytes
|
||||
self.bytes_sent += bytes_sent
|
||||
self.server.bytes_sent += bytes_sent
|
||||
self.server.stat_sent["raw_file"]["num"] += 1
|
||||
self.server.stat_sent["raw_file"]["bytes"] += bytes_sent
|
||||
return True
|
||||
|
||||
# Create and send a request to peer
|
||||
def request(self, cmd, params={}, stream_to=None):
|
||||
# Last command sent more than 10 sec ago, timeout
|
||||
if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10:
|
||||
self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time))
|
||||
self.close()
|
||||
self.close("Request %s timeout: %.3fs" % (self.last_cmd_sent, time.time() - self.last_send_time))
|
||||
return False
|
||||
|
||||
self.last_req_time = time.time()
|
||||
self.last_cmd = cmd
|
||||
self.last_cmd_sent = cmd
|
||||
self.req_id += 1
|
||||
data = {"cmd": cmd, "req_id": self.req_id, "params": params}
|
||||
event = gevent.event.AsyncResult() # Create new event for response
|
||||
self.waiting_requests[self.req_id] = event
|
||||
self.waiting_requests[self.req_id] = {"evt": event, "cmd": cmd}
|
||||
if stream_to:
|
||||
self.waiting_streams[self.req_id] = stream_to
|
||||
self.send(data) # Send request
|
||||
|
@ -338,16 +594,16 @@ class Connection(object):
|
|||
with gevent.Timeout(10.0, False):
|
||||
try:
|
||||
response = self.request("ping")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log("Ping error: %s" % Debug.formatException(err))
|
||||
if response and "body" in response and response["body"] == "Pong!":
|
||||
if response and "body" in response and response["body"] == b"Pong!":
|
||||
self.last_ping_delay = time.time() - s
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
# Close connection
|
||||
def close(self):
|
||||
def close(self, reason="Unknown"):
|
||||
if self.closed:
|
||||
return False # Already closed
|
||||
self.closed = True
|
||||
|
@ -355,21 +611,21 @@ class Connection(object):
|
|||
if self.event_connected:
|
||||
self.event_connected.set(False)
|
||||
|
||||
if config.debug_socket:
|
||||
self.log(
|
||||
"Closing connection, waiting_requests: %s, buff: %s..." %
|
||||
(len(self.waiting_requests), self.incomplete_buff_recv)
|
||||
)
|
||||
self.log(
|
||||
"Closing connection: %s, waiting_requests: %s, sites: %s, buff: %s..." %
|
||||
(reason, len(self.waiting_requests), self.sites, self.incomplete_buff_recv)
|
||||
)
|
||||
for request in self.waiting_requests.values(): # Mark pending requests failed
|
||||
request.set(False)
|
||||
request["evt"].set(False)
|
||||
self.waiting_requests = {}
|
||||
self.waiting_streams = {}
|
||||
self.sites = 0
|
||||
self.server.removeConnection(self) # Remove connection from server registry
|
||||
try:
|
||||
if self.sock:
|
||||
self.sock.shutdown(gevent.socket.SHUT_WR)
|
||||
self.sock.close()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if config.debug_socket:
|
||||
self.log("Close error: %s" % err)
|
||||
|
||||
|
|
|
@ -1,82 +1,153 @@
|
|||
import logging
|
||||
import random
|
||||
import string
|
||||
import time
|
||||
import sys
|
||||
import socket
|
||||
from collections import defaultdict
|
||||
|
||||
import gevent
|
||||
import msgpack
|
||||
from gevent.server import StreamServer
|
||||
from gevent.pool import Pool
|
||||
|
||||
import util
|
||||
from util import helper
|
||||
from Debug import Debug
|
||||
from Connection import Connection
|
||||
from .Connection import Connection
|
||||
from Config import config
|
||||
from Crypt import CryptConnection
|
||||
from Crypt import CryptHash
|
||||
from Tor import TorManager
|
||||
from Site import SiteManager
|
||||
|
||||
|
||||
class ConnectionServer:
|
||||
class ConnectionServer(object):
|
||||
def __init__(self, ip=None, port=None, request_handler=None):
|
||||
if not ip:
|
||||
if config.fileserver_ip_type == "ipv6":
|
||||
ip = "::1"
|
||||
else:
|
||||
ip = "127.0.0.1"
|
||||
port = 15441
|
||||
self.ip = ip
|
||||
self.port = port
|
||||
self.last_connection_id = 1 # Connection id incrementer
|
||||
self.last_connection_id = 0 # Connection id incrementer
|
||||
self.last_connection_id_current_version = 0 # Connection id incrementer for current client version
|
||||
self.last_connection_id_supported_version = 0 # Connection id incrementer for last supported version
|
||||
self.log = logging.getLogger("ConnServer")
|
||||
self.port_opened = None
|
||||
self.port_opened = {}
|
||||
self.peer_blacklist = SiteManager.peer_blacklist
|
||||
|
||||
self.tor_manager = TorManager(self.ip, self.port)
|
||||
self.connections = [] # Connections
|
||||
self.whitelist = config.ip_local # No flood protection on this ips
|
||||
self.ip_incoming = {} # Incoming connections from ip in the last minute to avoid connection flood
|
||||
self.broken_ssl_peer_ids = {} # Peerids of broken ssl connections
|
||||
self.broken_ssl_ips = {} # Peerids of broken ssl connections
|
||||
self.ips = {} # Connection by ip
|
||||
self.has_internet = True # Internet outage detection
|
||||
|
||||
self.running = True
|
||||
self.thread_checker = gevent.spawn(self.checkConnections)
|
||||
self.stream_server = None
|
||||
self.stream_server_proxy = None
|
||||
self.running = False
|
||||
self.stopping = False
|
||||
self.thread_checker = None
|
||||
|
||||
self.stat_recv = defaultdict(lambda: defaultdict(int))
|
||||
self.stat_sent = defaultdict(lambda: defaultdict(int))
|
||||
self.bytes_recv = 0
|
||||
self.bytes_sent = 0
|
||||
self.num_recv = 0
|
||||
self.num_sent = 0
|
||||
|
||||
self.num_incoming = 0
|
||||
self.num_outgoing = 0
|
||||
self.had_external_incoming = False
|
||||
|
||||
self.timecorrection = 0.0
|
||||
self.pool = Pool(500) # do not accept more than 500 connections
|
||||
|
||||
# Bittorrent style peerid
|
||||
self.peer_id = "-ZN0%s-%s" % (config.version.replace(".", ""), CryptHash.random(12, "base64"))
|
||||
self.peer_id = "-UT3530-%s" % CryptHash.random(12, "base64")
|
||||
|
||||
# Check msgpack version
|
||||
if msgpack.version[0] == 0 and msgpack.version[1] < 4:
|
||||
self.log.error(
|
||||
"Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo pip install msgpack-python --upgrade`" %
|
||||
"Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack --upgrade`" %
|
||||
str(msgpack.version)
|
||||
)
|
||||
sys.exit(0)
|
||||
|
||||
if port: # Listen server on a port
|
||||
self.pool = Pool(1000) # do not accept more than 1000 connections
|
||||
self.stream_server = StreamServer(
|
||||
(ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
||||
)
|
||||
if request_handler:
|
||||
self.handleRequest = request_handler
|
||||
if request_handler:
|
||||
self.handleRequest = request_handler
|
||||
|
||||
def start(self):
|
||||
def start(self, check_connections=True):
|
||||
if self.stopping:
|
||||
return False
|
||||
self.running = True
|
||||
if check_connections:
|
||||
self.thread_checker = gevent.spawn(self.checkConnections)
|
||||
CryptConnection.manager.loadCerts()
|
||||
if config.tor != "disable":
|
||||
self.tor_manager.start()
|
||||
if not self.port:
|
||||
self.log.info("No port found, not binding")
|
||||
return False
|
||||
|
||||
self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % (
|
||||
self.ip, self.port,
|
||||
".".join(map(str, msgpack.version)), CryptConnection.manager.crypt_supported)
|
||||
)
|
||||
self.ip, self.port, ".".join(map(str, msgpack.version)),
|
||||
CryptConnection.manager.crypt_supported
|
||||
))
|
||||
try:
|
||||
self.stream_server.serve_forever() # Start normal connection server
|
||||
except Exception, err:
|
||||
self.log.info("StreamServer bind error, must be running already: %s" % err)
|
||||
self.stream_server = StreamServer(
|
||||
(self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
||||
)
|
||||
except Exception as err:
|
||||
self.log.info("StreamServer create error: %s" % Debug.formatException(err))
|
||||
|
||||
def listen(self):
|
||||
if not self.running:
|
||||
return None
|
||||
|
||||
if self.stream_server_proxy:
|
||||
gevent.spawn(self.listenProxy)
|
||||
try:
|
||||
self.stream_server.serve_forever()
|
||||
except Exception as err:
|
||||
self.log.info("StreamServer listen error: %s" % err)
|
||||
return False
|
||||
self.log.debug("Stopped.")
|
||||
|
||||
def stop(self):
|
||||
self.log.debug("Stopping %s" % self.stream_server)
|
||||
self.stopping = True
|
||||
self.running = False
|
||||
self.stream_server.stop()
|
||||
if self.thread_checker:
|
||||
gevent.kill(self.thread_checker)
|
||||
if self.stream_server:
|
||||
self.stream_server.stop()
|
||||
|
||||
def closeConnections(self):
|
||||
self.log.debug("Closing all connection: %s" % len(self.connections))
|
||||
for connection in self.connections[:]:
|
||||
connection.close("Close all connections")
|
||||
|
||||
def handleIncomingConnection(self, sock, addr):
|
||||
ip, port = addr
|
||||
if config.offline:
|
||||
sock.close()
|
||||
return False
|
||||
|
||||
ip, port = addr[0:2]
|
||||
ip = ip.lower()
|
||||
if ip.startswith("::ffff:"): # IPv6 to IPv4 mapping
|
||||
ip = ip.replace("::ffff:", "", 1)
|
||||
self.num_incoming += 1
|
||||
|
||||
if not self.had_external_incoming and not helper.isPrivateIp(ip):
|
||||
self.had_external_incoming = True
|
||||
|
||||
# Connection flood protection
|
||||
if ip in self.ip_incoming:
|
||||
if ip in self.ip_incoming and ip not in self.whitelist:
|
||||
self.ip_incoming[ip] += 1
|
||||
if self.ip_incoming[ip] > 3: # Allow 3 in 1 minute from same ip
|
||||
if self.ip_incoming[ip] > 6: # Allow 6 in 1 minute from same ip
|
||||
self.log.debug("Connection flood detected from %s" % ip)
|
||||
time.sleep(30)
|
||||
sock.close()
|
||||
|
@ -86,13 +157,33 @@ class ConnectionServer:
|
|||
|
||||
connection = Connection(self, ip, port, sock)
|
||||
self.connections.append(connection)
|
||||
self.ips[ip] = connection
|
||||
rev = connection.handshake.get("rev", 0)
|
||||
if rev >= 4560:
|
||||
self.last_connection_id_supported_version += 1
|
||||
if rev == config.rev:
|
||||
self.last_connection_id_current_version += 1
|
||||
if ip not in config.ip_local:
|
||||
self.ips[ip] = connection
|
||||
connection.handleIncomingConnection(sock)
|
||||
|
||||
def getConnection(self, ip=None, port=None, peer_id=None, create=True):
|
||||
def handleMessage(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None, is_tracker_connection=False):
|
||||
ip_type = helper.getIpType(ip)
|
||||
has_per_site_onion = (ip.endswith(".onion") or self.port_opened.get(ip_type, None) == False) and self.tor_manager.start_onions and site
|
||||
if has_per_site_onion: # Site-unique connection for Tor
|
||||
if ip.endswith(".onion"):
|
||||
site_onion = self.tor_manager.getOnion(site.address)
|
||||
else:
|
||||
site_onion = self.tor_manager.getOnion("global")
|
||||
key = ip + site_onion
|
||||
else:
|
||||
key = ip
|
||||
|
||||
# Find connection by ip
|
||||
if ip in self.ips:
|
||||
connection = self.ips[ip]
|
||||
if key in self.ips:
|
||||
connection = self.ips[key]
|
||||
if not peer_id or connection.handshake.get("peer_id") == peer_id: # Filter by peer_id
|
||||
if not connection.connected and create:
|
||||
succ = connection.event_connected.get() # Wait for connection
|
||||
|
@ -100,81 +191,196 @@ class ConnectionServer:
|
|||
raise Exception("Connection event return error")
|
||||
return connection
|
||||
|
||||
# Recover from connection pool
|
||||
for connection in self.connections:
|
||||
if connection.ip == ip:
|
||||
if peer_id and connection.handshake.get("peer_id") != peer_id: # Does not match
|
||||
continue
|
||||
if not connection.connected and create:
|
||||
succ = connection.event_connected.get() # Wait for connection
|
||||
if not succ:
|
||||
raise Exception("Connection event return error")
|
||||
return connection
|
||||
# Recover from connection pool
|
||||
for connection in self.connections:
|
||||
if connection.ip == ip:
|
||||
if peer_id and connection.handshake.get("peer_id") != peer_id: # Does not match
|
||||
continue
|
||||
if ip.endswith(".onion") and self.tor_manager.start_onions and ip.replace(".onion", "") != connection.target_onion:
|
||||
# For different site
|
||||
continue
|
||||
if not connection.connected and create:
|
||||
succ = connection.event_connected.get() # Wait for connection
|
||||
if not succ:
|
||||
raise Exception("Connection event return error")
|
||||
return connection
|
||||
|
||||
# No connection found
|
||||
if create: # Allow to create new connection if not found
|
||||
if create and not config.offline: # Allow to create new connection if not found
|
||||
if port == 0:
|
||||
raise Exception("This peer is not connectable")
|
||||
|
||||
if (ip, port) in self.peer_blacklist and not is_tracker_connection:
|
||||
raise Exception("This peer is blacklisted")
|
||||
|
||||
try:
|
||||
connection = Connection(self, ip, port)
|
||||
self.ips[ip] = connection
|
||||
if has_per_site_onion: # Lock connection to site
|
||||
connection = Connection(self, ip, port, target_onion=site_onion, is_tracker_connection=is_tracker_connection)
|
||||
else:
|
||||
connection = Connection(self, ip, port, is_tracker_connection=is_tracker_connection)
|
||||
self.num_outgoing += 1
|
||||
self.ips[key] = connection
|
||||
self.connections.append(connection)
|
||||
connection.log("Connecting... (site: %s)" % site)
|
||||
succ = connection.connect()
|
||||
if not succ:
|
||||
connection.close()
|
||||
connection.close("Connection event return error")
|
||||
raise Exception("Connection event return error")
|
||||
else:
|
||||
rev = connection.handshake.get("rev", 0)
|
||||
if rev >= 4560:
|
||||
self.last_connection_id_supported_version += 1
|
||||
if rev == config.rev:
|
||||
self.last_connection_id_current_version += 1
|
||||
|
||||
except Exception, err:
|
||||
self.log.debug("%s Connect error: %s" % (ip, Debug.formatException(err)))
|
||||
connection.close()
|
||||
except Exception as err:
|
||||
connection.close("%s Connect error: %s" % (ip, Debug.formatException(err)))
|
||||
raise err
|
||||
|
||||
if len(self.connections) > config.global_connected_limit:
|
||||
gevent.spawn(self.checkMaxConnections)
|
||||
|
||||
return connection
|
||||
else:
|
||||
return None
|
||||
|
||||
def removeConnection(self, connection):
|
||||
self.log.debug("Removing %s..." % connection)
|
||||
if self.ips.get(connection.ip) == connection: # Delete if same as in registry
|
||||
# Delete if same as in registry
|
||||
if self.ips.get(connection.ip) == connection:
|
||||
del self.ips[connection.ip]
|
||||
# Site locked connection
|
||||
if connection.target_onion:
|
||||
if self.ips.get(connection.ip + connection.target_onion) == connection:
|
||||
del self.ips[connection.ip + connection.target_onion]
|
||||
# Cert pinned connection
|
||||
if connection.cert_pin and self.ips.get(connection.ip + "#" + connection.cert_pin) == connection:
|
||||
del self.ips[connection.ip + "#" + connection.cert_pin]
|
||||
|
||||
if connection in self.connections:
|
||||
self.connections.remove(connection)
|
||||
|
||||
def checkConnections(self):
|
||||
run_i = 0
|
||||
time.sleep(15)
|
||||
while self.running:
|
||||
time.sleep(60) # Sleep 1 min
|
||||
run_i += 1
|
||||
self.ip_incoming = {} # Reset connected ips counter
|
||||
self.broken_ssl_peer_ids = {} # Reset broken ssl peerids count
|
||||
last_message_time = 0
|
||||
s = time.time()
|
||||
for connection in self.connections[:]: # Make a copy
|
||||
if connection.ip.endswith(".onion") or config.tor == "always":
|
||||
timeout_multipler = 2
|
||||
else:
|
||||
timeout_multipler = 1
|
||||
|
||||
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
|
||||
if connection.last_message_time > last_message_time and not connection.is_private_ip:
|
||||
# Message from local IPs does not means internet connection
|
||||
last_message_time = connection.last_message_time
|
||||
|
||||
if connection.unpacker and idle > 30:
|
||||
# Delete the unpacker if not needed
|
||||
del connection.unpacker
|
||||
connection.unpacker = None
|
||||
connection.log("Unpacker deleted")
|
||||
|
||||
elif connection.last_cmd_sent == "announce" and idle > 20: # Bootstrapper connection close after 20 sec
|
||||
connection.close("[Cleanup] Tracker connection, idle: %.3fs" % idle)
|
||||
|
||||
if idle > 60 * 60:
|
||||
# Wake up after 1h
|
||||
connection.log("[Cleanup] After wakeup, idle: %s" % idle)
|
||||
connection.close()
|
||||
connection.close("[Cleanup] After wakeup, idle: %.3fs" % idle)
|
||||
|
||||
elif idle > 20 * 60 and connection.last_send_time < time.time() - 10:
|
||||
# Idle more than 20 min and we not send request in last 10 sec
|
||||
if not connection.ping(): # send ping request
|
||||
connection.close()
|
||||
# Idle more than 20 min and we have not sent request in last 10 sec
|
||||
if not connection.ping():
|
||||
connection.close("[Cleanup] Ping timeout")
|
||||
|
||||
elif idle > 10 and connection.incomplete_buff_recv > 0:
|
||||
# Incompelte data with more than 10 sec idle
|
||||
connection.log("[Cleanup] Connection buff stalled")
|
||||
connection.close()
|
||||
elif idle > 10 * timeout_multipler and connection.incomplete_buff_recv > 0:
|
||||
# Incomplete data with more than 10 sec idle
|
||||
connection.close("[Cleanup] Connection buff stalled")
|
||||
|
||||
elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10:
|
||||
# Sent command and no response in 10 sec
|
||||
connection.log(
|
||||
"[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time)
|
||||
elif idle > 10 * timeout_multipler and connection.protocol == "?": # No connection after 10 sec
|
||||
connection.close(
|
||||
"[Cleanup] Connect timeout: %.3fs" % idle
|
||||
)
|
||||
connection.close()
|
||||
|
||||
elif idle > 60 and connection.protocol == "?": # No connection after 1 min
|
||||
connection.log("[Cleanup] Connect timeout: %s" % idle)
|
||||
connection.close()
|
||||
elif idle > 10 * timeout_multipler and connection.waiting_requests and time.time() - connection.last_send_time > 10 * timeout_multipler:
|
||||
# Sent command and no response in 10 sec
|
||||
connection.close(
|
||||
"[Cleanup] Command %s timeout: %.3fs" % (connection.last_cmd_sent, time.time() - connection.last_send_time)
|
||||
)
|
||||
|
||||
elif idle < 60 and connection.bad_actions > 40:
|
||||
connection.close(
|
||||
"[Cleanup] Too many bad actions: %s" % connection.bad_actions
|
||||
)
|
||||
|
||||
elif idle > 5 * 60 and connection.sites == 0:
|
||||
connection.close(
|
||||
"[Cleanup] No site for connection"
|
||||
)
|
||||
|
||||
elif run_i % 90 == 0:
|
||||
# Reset bad action counter every 30 min
|
||||
connection.bad_actions = 0
|
||||
|
||||
# Internet outage detection
|
||||
if time.time() - last_message_time > max(60, 60 * 10 / max(1, float(len(self.connections)) / 50)):
|
||||
# Offline: Last message more than 60-600sec depending on connection number
|
||||
if self.has_internet and last_message_time:
|
||||
self.has_internet = False
|
||||
self.onInternetOffline()
|
||||
else:
|
||||
# Online
|
||||
if not self.has_internet:
|
||||
self.has_internet = True
|
||||
self.onInternetOnline()
|
||||
|
||||
self.timecorrection = self.getTimecorrection()
|
||||
|
||||
if time.time() - s > 0.01:
|
||||
self.log.debug("Connection cleanup in %.3fs" % (time.time() - s))
|
||||
|
||||
time.sleep(15)
|
||||
self.log.debug("Checkconnections ended")
|
||||
|
||||
@util.Noparallel(blocking=False)
|
||||
def checkMaxConnections(self):
|
||||
if len(self.connections) < config.global_connected_limit:
|
||||
return 0
|
||||
|
||||
s = time.time()
|
||||
num_connected_before = len(self.connections)
|
||||
self.connections.sort(key=lambda connection: connection.sites)
|
||||
num_closed = 0
|
||||
for connection in self.connections:
|
||||
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
|
||||
if idle > 60:
|
||||
connection.close("Connection limit reached")
|
||||
num_closed += 1
|
||||
if num_closed > config.global_connected_limit * 0.1:
|
||||
break
|
||||
|
||||
self.log.debug("Closed %s connections of %s after reached limit %s in %.3fs" % (
|
||||
num_closed, num_connected_before, config.global_connected_limit, time.time() - s
|
||||
))
|
||||
return num_closed
|
||||
|
||||
def onInternetOnline(self):
|
||||
self.log.info("Internet online")
|
||||
|
||||
def onInternetOffline(self):
|
||||
self.had_external_incoming = False
|
||||
self.log.info("Internet offline")
|
||||
|
||||
def getTimecorrection(self):
|
||||
corrections = sorted([
|
||||
connection.handshake.get("time") - connection.handshake_time + connection.last_ping_delay
|
||||
for connection in self.connections
|
||||
if connection.handshake.get("time") and connection.last_ping_delay
|
||||
])
|
||||
if len(corrections) < 9:
|
||||
return 0.0
|
||||
mid = int(len(corrections) / 2 - 1)
|
||||
median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3
|
||||
return median
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from ConnectionServer import ConnectionServer
|
||||
from Connection import Connection
|
||||
from .ConnectionServer import ConnectionServer
|
||||
from .Connection import Connection
|
||||
|
|
162
src/Content/ContentDb.py
Normal file
162
src/Content/ContentDb.py
Normal file
|
@ -0,0 +1,162 @@
|
|||
import os
|
||||
|
||||
from Db.Db import Db, DbTableError
|
||||
from Config import config
|
||||
from Plugin import PluginManager
|
||||
from Debug import Debug
|
||||
|
||||
|
||||
@PluginManager.acceptPlugins
|
||||
class ContentDb(Db):
|
||||
def __init__(self, path):
|
||||
Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path)
|
||||
self.foreign_keys = True
|
||||
|
||||
def init(self):
|
||||
try:
|
||||
self.schema = self.getSchema()
|
||||
try:
|
||||
self.checkTables()
|
||||
except DbTableError:
|
||||
pass
|
||||
self.log.debug("Checking foreign keys...")
|
||||
foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone()
|
||||
if foreign_key_error:
|
||||
raise Exception("Database foreign key error: %s" % foreign_key_error)
|
||||
except Exception as err:
|
||||
self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err))
|
||||
self.close()
|
||||
os.unlink(self.db_path) # Remove and try again
|
||||
Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, self.db_path)
|
||||
self.foreign_keys = True
|
||||
self.schema = self.getSchema()
|
||||
try:
|
||||
self.checkTables()
|
||||
except DbTableError:
|
||||
pass
|
||||
self.site_ids = {}
|
||||
self.sites = {}
|
||||
|
||||
def getSchema(self):
|
||||
schema = {}
|
||||
schema["db_name"] = "ContentDb"
|
||||
schema["version"] = 3
|
||||
schema["tables"] = {}
|
||||
|
||||
if not self.getTableVersion("site"):
|
||||
self.log.debug("Migrating from table version-less content.db")
|
||||
version = int(self.execute("PRAGMA user_version").fetchone()[0])
|
||||
if version > 0:
|
||||
self.checkTables()
|
||||
self.execute("INSERT INTO keyvalue ?", {"json_id": 0, "key": "table.site.version", "value": 1})
|
||||
self.execute("INSERT INTO keyvalue ?", {"json_id": 0, "key": "table.content.version", "value": 1})
|
||||
|
||||
schema["tables"]["site"] = {
|
||||
"cols": [
|
||||
["site_id", "INTEGER PRIMARY KEY ASC NOT NULL UNIQUE"],
|
||||
["address", "TEXT NOT NULL"]
|
||||
],
|
||||
"indexes": [
|
||||
"CREATE UNIQUE INDEX site_address ON site (address)"
|
||||
],
|
||||
"schema_changed": 1
|
||||
}
|
||||
|
||||
schema["tables"]["content"] = {
|
||||
"cols": [
|
||||
["content_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"],
|
||||
["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"],
|
||||
["inner_path", "TEXT"],
|
||||
["size", "INTEGER"],
|
||||
["size_files", "INTEGER"],
|
||||
["size_files_optional", "INTEGER"],
|
||||
["modified", "INTEGER"]
|
||||
],
|
||||
"indexes": [
|
||||
"CREATE UNIQUE INDEX content_key ON content (site_id, inner_path)",
|
||||
"CREATE INDEX content_modified ON content (site_id, modified)"
|
||||
],
|
||||
"schema_changed": 1
|
||||
}
|
||||
|
||||
return schema
|
||||
|
||||
def initSite(self, site):
|
||||
self.sites[site.address] = site
|
||||
|
||||
def needSite(self, site):
|
||||
if site.address not in self.site_ids:
|
||||
self.execute("INSERT OR IGNORE INTO site ?", {"address": site.address})
|
||||
self.site_ids = {}
|
||||
for row in self.execute("SELECT * FROM site"):
|
||||
self.site_ids[row["address"]] = row["site_id"]
|
||||
return self.site_ids[site.address]
|
||||
|
||||
def deleteSite(self, site):
|
||||
site_id = self.site_ids.get(site.address, 0)
|
||||
if site_id:
|
||||
self.execute("DELETE FROM site WHERE site_id = :site_id", {"site_id": site_id})
|
||||
del self.site_ids[site.address]
|
||||
del self.sites[site.address]
|
||||
|
||||
def setContent(self, site, inner_path, content, size=0):
|
||||
self.insertOrUpdate("content", {
|
||||
"size": size,
|
||||
"size_files": sum([val["size"] for key, val in content.get("files", {}).items()]),
|
||||
"size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]),
|
||||
"modified": int(content.get("modified", 0))
|
||||
}, {
|
||||
"site_id": self.site_ids.get(site.address, 0),
|
||||
"inner_path": inner_path
|
||||
})
|
||||
|
||||
def deleteContent(self, site, inner_path):
|
||||
self.execute("DELETE FROM content WHERE ?", {"site_id": self.site_ids.get(site.address, 0), "inner_path": inner_path})
|
||||
|
||||
def loadDbDict(self, site):
|
||||
res = self.execute(
|
||||
"SELECT GROUP_CONCAT(inner_path, '|') AS inner_paths FROM content WHERE ?",
|
||||
{"site_id": self.site_ids.get(site.address, 0)}
|
||||
)
|
||||
row = res.fetchone()
|
||||
if row and row["inner_paths"]:
|
||||
inner_paths = row["inner_paths"].split("|")
|
||||
return dict.fromkeys(inner_paths, False)
|
||||
else:
|
||||
return {}
|
||||
|
||||
def getTotalSize(self, site, ignore=None):
|
||||
params = {"site_id": self.site_ids.get(site.address, 0)}
|
||||
if ignore:
|
||||
params["not__inner_path"] = ignore
|
||||
res = self.execute("SELECT SUM(size) + SUM(size_files) AS size, SUM(size_files_optional) AS size_optional FROM content WHERE ?", params)
|
||||
row = dict(res.fetchone())
|
||||
|
||||
if not row["size"]:
|
||||
row["size"] = 0
|
||||
if not row["size_optional"]:
|
||||
row["size_optional"] = 0
|
||||
|
||||
return row["size"], row["size_optional"]
|
||||
|
||||
def listModified(self, site, after=None, before=None):
|
||||
params = {"site_id": self.site_ids.get(site.address, 0)}
|
||||
if after:
|
||||
params["modified>"] = after
|
||||
if before:
|
||||
params["modified<"] = before
|
||||
res = self.execute("SELECT inner_path, modified FROM content WHERE ?", params)
|
||||
return {row["inner_path"]: row["modified"] for row in res}
|
||||
|
||||
content_dbs = {}
|
||||
|
||||
|
||||
def getContentDb(path=None):
|
||||
if not path:
|
||||
path = "%s/content.db" % config.data_dir
|
||||
if path not in content_dbs:
|
||||
content_dbs[path] = ContentDb(path)
|
||||
content_dbs[path].init()
|
||||
return content_dbs[path]
|
||||
|
||||
getContentDb() # Pre-connect to default one
|
155
src/Content/ContentDbDict.py
Normal file
155
src/Content/ContentDbDict.py
Normal file
|
@ -0,0 +1,155 @@
|
|||
import time
|
||||
import os
|
||||
|
||||
from . import ContentDb
|
||||
from Debug import Debug
|
||||
from Config import config
|
||||
|
||||
|
||||
class ContentDbDict(dict):
|
||||
def __init__(self, site, *args, **kwargs):
|
||||
s = time.time()
|
||||
self.site = site
|
||||
self.cached_keys = []
|
||||
self.log = self.site.log
|
||||
self.db = ContentDb.getContentDb()
|
||||
self.db_id = self.db.needSite(site)
|
||||
self.num_loaded = 0
|
||||
super(ContentDbDict, self).__init__(self.db.loadDbDict(site)) # Load keys from database
|
||||
self.log.debug("ContentDb init: %.3fs, found files: %s, sites: %s" % (time.time() - s, len(self), len(self.db.site_ids)))
|
||||
|
||||
def loadItem(self, key):
|
||||
try:
|
||||
self.num_loaded += 1
|
||||
if self.num_loaded % 100 == 0:
|
||||
if config.verbose:
|
||||
self.log.debug("Loaded json: %s (latest: %s) called by: %s" % (self.num_loaded, key, Debug.formatStack()))
|
||||
else:
|
||||
self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key))
|
||||
content = self.site.storage.loadJson(key)
|
||||
dict.__setitem__(self, key, content)
|
||||
except IOError:
|
||||
if dict.get(self, key):
|
||||
self.__delitem__(key) # File not exists anymore
|
||||
raise KeyError(key)
|
||||
|
||||
self.addCachedKey(key)
|
||||
self.checkLimit()
|
||||
|
||||
return content
|
||||
|
||||
def getItemSize(self, key):
|
||||
return self.site.storage.getSize(key)
|
||||
|
||||
# Only keep last 10 accessed json in memory
|
||||
def checkLimit(self):
|
||||
if len(self.cached_keys) > 10:
|
||||
key_deleted = self.cached_keys.pop(0)
|
||||
dict.__setitem__(self, key_deleted, False)
|
||||
|
||||
def addCachedKey(self, key):
|
||||
if key not in self.cached_keys and key != "content.json" and len(key) > 40: # Always keep keys smaller than 40 char
|
||||
self.cached_keys.append(key)
|
||||
|
||||
def __getitem__(self, key):
|
||||
val = dict.get(self, key)
|
||||
if val: # Already loaded
|
||||
return val
|
||||
elif val is None: # Unknown key
|
||||
raise KeyError(key)
|
||||
elif val is False: # Loaded before, but purged from cache
|
||||
return self.loadItem(key)
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
self.addCachedKey(key)
|
||||
self.checkLimit()
|
||||
size = self.getItemSize(key)
|
||||
self.db.setContent(self.site, key, val, size)
|
||||
dict.__setitem__(self, key, val)
|
||||
|
||||
def __delitem__(self, key):
|
||||
self.db.deleteContent(self.site, key)
|
||||
dict.__delitem__(self, key)
|
||||
try:
|
||||
self.cached_keys.remove(key)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def iteritems(self):
|
||||
for key in dict.keys(self):
|
||||
try:
|
||||
val = self[key]
|
||||
except Exception as err:
|
||||
self.log.warning("Error loading %s: %s" % (key, err))
|
||||
continue
|
||||
yield key, val
|
||||
|
||||
def items(self):
|
||||
back = []
|
||||
for key in dict.keys(self):
|
||||
try:
|
||||
val = self[key]
|
||||
except Exception as err:
|
||||
self.log.warning("Error loading %s: %s" % (key, err))
|
||||
continue
|
||||
back.append((key, val))
|
||||
return back
|
||||
|
||||
def values(self):
|
||||
back = []
|
||||
for key, val in dict.iteritems(self):
|
||||
if not val:
|
||||
try:
|
||||
val = self.loadItem(key)
|
||||
except Exception:
|
||||
continue
|
||||
back.append(val)
|
||||
return back
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self.__getitem__(key)
|
||||
except KeyError:
|
||||
return default
|
||||
except Exception as err:
|
||||
self.site.bad_files[key] = self.site.bad_files.get(key, 1)
|
||||
dict.__delitem__(self, key)
|
||||
self.log.warning("Error loading %s: %s" % (key, err))
|
||||
return default
|
||||
|
||||
def execute(self, query, params={}):
|
||||
params["site_id"] = self.db_id
|
||||
return self.db.execute(query, params)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import psutil
|
||||
process = psutil.Process(os.getpid())
|
||||
s_mem = process.memory_info()[0] / float(2 ** 20)
|
||||
root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27"
|
||||
contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root)
|
||||
print("Init len", len(contents))
|
||||
|
||||
s = time.time()
|
||||
for dir_name in os.listdir(root + "/data/users/")[0:8000]:
|
||||
contents["data/users/%s/content.json" % dir_name]
|
||||
print("Load: %.3fs" % (time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
found = 0
|
||||
for key, val in contents.items():
|
||||
found += 1
|
||||
assert key
|
||||
assert val
|
||||
print("Found:", found)
|
||||
print("Iteritem: %.3fs" % (time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
found = 0
|
||||
for key in list(contents.keys()):
|
||||
found += 1
|
||||
assert key in contents
|
||||
print("In: %.3fs" % (time.time() - s))
|
||||
|
||||
print("Len:", len(list(contents.values())), len(list(contents.keys())))
|
||||
|
||||
print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem)
|
File diff suppressed because it is too large
Load diff
|
@ -1 +1 @@
|
|||
from ContentManager import ContentManager
|
||||
from .ContentManager import ContentManager
|
4
src/Crypt/Crypt.py
Normal file
4
src/Crypt/Crypt.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from Config import config
|
||||
from util import ThreadPool
|
||||
|
||||
thread_pool_crypt = ThreadPool.ThreadPool(config.threads_crypt)
|
|
@ -1,75 +1,101 @@
|
|||
import logging
|
||||
import base64
|
||||
import binascii
|
||||
import time
|
||||
import hashlib
|
||||
|
||||
from lib.BitcoinECC import BitcoinECC
|
||||
from lib.pybitcointools import bitcoin as btctools
|
||||
from util.Electrum import dbl_format
|
||||
from Config import config
|
||||
|
||||
# Try to load openssl
|
||||
import util.OpensslFindPatch
|
||||
|
||||
lib_verify_best = "sslcrypto"
|
||||
|
||||
from lib import sslcrypto
|
||||
sslcurve_native = sslcrypto.ecc.get_curve("secp256k1")
|
||||
sslcurve_fallback = sslcrypto.fallback.ecc.get_curve("secp256k1")
|
||||
sslcurve = sslcurve_native
|
||||
|
||||
def loadLib(lib_name, silent=False):
|
||||
global sslcurve, libsecp256k1message, lib_verify_best
|
||||
if lib_name == "libsecp256k1":
|
||||
s = time.time()
|
||||
from lib import libsecp256k1message
|
||||
import coincurve
|
||||
lib_verify_best = "libsecp256k1"
|
||||
if not silent:
|
||||
logging.info(
|
||||
"Libsecpk256k1 loaded: %s in %.3fs" %
|
||||
(type(coincurve._libsecp256k1.lib).__name__, time.time() - s)
|
||||
)
|
||||
elif lib_name == "sslcrypto":
|
||||
sslcurve = sslcurve_native
|
||||
if sslcurve_native == sslcurve_fallback:
|
||||
logging.warning("SSLCurve fallback loaded instead of native")
|
||||
elif lib_name == "sslcrypto_fallback":
|
||||
sslcurve = sslcurve_fallback
|
||||
|
||||
try:
|
||||
if not config.use_openssl:
|
||||
if not config.use_libsecp256k1:
|
||||
raise Exception("Disabled by config")
|
||||
from lib.opensslVerify import opensslVerify
|
||||
logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version)
|
||||
except Exception, err:
|
||||
logging.info("OpenSSL load failed: %s, falling back to slow bitcoin verify" % err)
|
||||
opensslVerify = None
|
||||
loadLib("libsecp256k1")
|
||||
lib_verify_best = "libsecp256k1"
|
||||
except Exception as err:
|
||||
logging.info("Libsecp256k1 load failed: %s" % err)
|
||||
|
||||
|
||||
def newPrivatekey(uncompressed=True): # Return new private key
|
||||
privatekey = btctools.encode_privkey(btctools.random_key(), "wif")
|
||||
return privatekey
|
||||
def newPrivatekey(): # Return new private key
|
||||
return sslcurve.private_to_wif(sslcurve.new_private_key()).decode()
|
||||
|
||||
|
||||
def newSeed():
|
||||
return btctools.random_key()
|
||||
return binascii.hexlify(sslcurve.new_private_key()).decode()
|
||||
|
||||
|
||||
def hdPrivatekey(seed, child):
|
||||
masterkey = btctools.bip32_master_key(seed)
|
||||
childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems
|
||||
key = btctools.bip32_extract_key(childkey)
|
||||
return btctools.encode_privkey(key, "wif")
|
||||
# Too large child id could cause problems
|
||||
privatekey_bin = sslcurve.derive_child(seed.encode(), child % 100000000)
|
||||
return sslcurve.private_to_wif(privatekey_bin).decode()
|
||||
|
||||
|
||||
def privatekeyToAddress(privatekey): # Return address from private key
|
||||
if privatekey.startswith("23") and len(privatekey) > 52: # Backward compatibility to broken lib
|
||||
bitcoin = BitcoinECC.Bitcoin()
|
||||
bitcoin.BitcoinAddressFromPrivate(privatekey)
|
||||
return bitcoin.BitcoinAddresFromPublicKey()
|
||||
else:
|
||||
try:
|
||||
return btctools.privkey_to_address(privatekey)
|
||||
except Exception: # Invalid privatekey
|
||||
return False
|
||||
try:
|
||||
if len(privatekey) == 64:
|
||||
privatekey_bin = bytes.fromhex(privatekey)
|
||||
else:
|
||||
privatekey_bin = sslcurve.wif_to_private(privatekey.encode())
|
||||
return sslcurve.private_to_address(privatekey_bin).decode()
|
||||
except Exception: # Invalid privatekey
|
||||
return False
|
||||
|
||||
|
||||
def sign(data, privatekey): # Return sign to data using private key
|
||||
if privatekey.startswith("23") and len(privatekey) > 52:
|
||||
return None # Old style private key not supported
|
||||
sign = btctools.ecdsa_sign(data, privatekey)
|
||||
return sign
|
||||
return base64.b64encode(sslcurve.sign(
|
||||
data.encode(),
|
||||
sslcurve.wif_to_private(privatekey.encode()),
|
||||
recoverable=True,
|
||||
hash=dbl_format
|
||||
)).decode()
|
||||
|
||||
|
||||
def signOld(data, privatekey): # Return sign to data using private key (backward compatible old style)
|
||||
bitcoin = BitcoinECC.Bitcoin()
|
||||
bitcoin.BitcoinAddressFromPrivate(privatekey)
|
||||
sign = bitcoin.SignECDSA(data)
|
||||
return sign
|
||||
def verify(data, valid_address, sign, lib_verify=None): # Verify data using address and sign
|
||||
if not lib_verify:
|
||||
lib_verify = lib_verify_best
|
||||
|
||||
if not sign:
|
||||
return False
|
||||
|
||||
def verify(data, address, sign): # Verify data using address and sign
|
||||
if hasattr(sign, "endswith"):
|
||||
if opensslVerify: # Use the faster method if avalible
|
||||
pub = opensslVerify.getMessagePubkey(data, sign)
|
||||
sign_address = btctools.pubtoaddr(pub)
|
||||
else: # Use pure-python
|
||||
pub = btctools.ecdsa_recover(data, sign)
|
||||
sign_address = btctools.pubtoaddr(pub)
|
||||
if lib_verify == "libsecp256k1":
|
||||
sign_address = libsecp256k1message.recover_address(data.encode("utf8"), sign).decode("utf8")
|
||||
elif lib_verify in ("sslcrypto", "sslcrypto_fallback"):
|
||||
publickey = sslcurve.recover(base64.b64decode(sign), data.encode(), hash=dbl_format)
|
||||
sign_address = sslcurve.public_to_address(publickey).decode()
|
||||
else:
|
||||
raise Exception("No library enabled for signature verification")
|
||||
|
||||
if type(address) is list: # Any address in the list
|
||||
return sign_address in address
|
||||
else: # One possible address
|
||||
return sign_address == address
|
||||
else: # Backward compatible old style
|
||||
bitcoin = BitcoinECC.Bitcoin()
|
||||
return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign)
|
||||
if type(valid_address) is list: # Any address in the list
|
||||
return sign_address in valid_address
|
||||
else: # One possible address
|
||||
return sign_address == valid_address
|
||||
|
|
|
@ -2,23 +2,81 @@ import sys
|
|||
import logging
|
||||
import os
|
||||
import ssl
|
||||
import hashlib
|
||||
import random
|
||||
|
||||
from Config import config
|
||||
from util import SslPatch
|
||||
from util import helper
|
||||
|
||||
|
||||
class CryptConnectionManager:
|
||||
def __init__(self):
|
||||
# OpenSSL params
|
||||
if sys.platform.startswith("win"):
|
||||
self.openssl_bin = "src\\lib\\opensslVerify\\openssl.exe"
|
||||
if config.openssl_bin_file:
|
||||
self.openssl_bin = config.openssl_bin_file
|
||||
elif sys.platform.startswith("win"):
|
||||
self.openssl_bin = "tools\\openssl\\openssl.exe"
|
||||
elif config.dist_type.startswith("bundle_linux"):
|
||||
self.openssl_bin = "../runtime/bin/openssl"
|
||||
else:
|
||||
self.openssl_bin = "openssl"
|
||||
self.openssl_env = {"OPENSSL_CONF": "src/lib/opensslVerify/openssl.cnf"}
|
||||
|
||||
self.context_client = None
|
||||
self.context_server = None
|
||||
|
||||
self.openssl_conf_template = "src/lib/openssl/openssl.cnf"
|
||||
self.openssl_conf = config.data_dir + "/openssl.cnf"
|
||||
|
||||
self.openssl_env = {
|
||||
"OPENSSL_CONF": self.openssl_conf,
|
||||
"RANDFILE": config.data_dir + "/openssl-rand.tmp"
|
||||
}
|
||||
|
||||
self.crypt_supported = [] # Supported cryptos
|
||||
|
||||
self.cacert_pem = config.data_dir + "/cacert-rsa.pem"
|
||||
self.cakey_pem = config.data_dir + "/cakey-rsa.pem"
|
||||
self.cert_pem = config.data_dir + "/cert-rsa.pem"
|
||||
self.cert_csr = config.data_dir + "/cert-rsa.csr"
|
||||
self.key_pem = config.data_dir + "/key-rsa.pem"
|
||||
|
||||
self.log = logging.getLogger("CryptConnectionManager")
|
||||
self.log.debug("Version: %s" % ssl.OPENSSL_VERSION)
|
||||
|
||||
self.fakedomains = [
|
||||
"yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com",
|
||||
"amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com",
|
||||
"godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp",
|
||||
"adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com",
|
||||
"networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com",
|
||||
"nazwa.pl", "symantec.com"
|
||||
]
|
||||
|
||||
def createSslContexts(self):
|
||||
if self.context_server and self.context_client:
|
||||
return False
|
||||
ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:"
|
||||
ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
|
||||
|
||||
if hasattr(ssl, "PROTOCOL_TLS"):
|
||||
protocol = ssl.PROTOCOL_TLS
|
||||
else:
|
||||
protocol = ssl.PROTOCOL_TLSv1_2
|
||||
self.context_client = ssl.SSLContext(protocol)
|
||||
self.context_client.check_hostname = False
|
||||
self.context_client.verify_mode = ssl.CERT_NONE
|
||||
|
||||
self.context_server = ssl.SSLContext(protocol)
|
||||
self.context_server.load_cert_chain(self.cert_pem, self.key_pem)
|
||||
|
||||
for ctx in (self.context_client, self.context_server):
|
||||
ctx.set_ciphers(ciphers)
|
||||
ctx.options |= ssl.OP_NO_COMPRESSION
|
||||
try:
|
||||
ctx.set_alpn_protocols(["h2", "http/1.1"])
|
||||
ctx.set_npn_protocols(["h2", "http/1.1"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Select crypt that supported by both sides
|
||||
# Return: Name of the crypto
|
||||
def selectCrypt(self, client_supported):
|
||||
|
@ -29,21 +87,24 @@ class CryptConnectionManager:
|
|||
|
||||
# Wrap socket for crypt
|
||||
# Return: wrapped socket
|
||||
def wrapSocket(self, sock, crypt, server=False):
|
||||
def wrapSocket(self, sock, crypt, server=False, cert_pin=None):
|
||||
if crypt == "tls-rsa":
|
||||
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:"
|
||||
ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
|
||||
if server:
|
||||
return ssl.wrap_socket(
|
||||
sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir,
|
||||
certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers)
|
||||
sock_wrapped = self.context_server.wrap_socket(sock, server_side=True)
|
||||
else:
|
||||
return ssl.wrap_socket(sock, ciphers=ciphers)
|
||||
sock_wrapped = self.context_client.wrap_socket(sock, server_hostname=random.choice(self.fakedomains))
|
||||
if cert_pin:
|
||||
cert_hash = hashlib.sha256(sock_wrapped.getpeercert(True)).hexdigest()
|
||||
if cert_hash != cert_pin:
|
||||
raise Exception("Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin))
|
||||
return sock_wrapped
|
||||
else:
|
||||
return sock
|
||||
|
||||
def removeCerts(self):
|
||||
for file_name in ["cert-rsa.pem", "key-rsa.pem"]:
|
||||
if config.keep_ssl_cert:
|
||||
return False
|
||||
for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr", "openssl-rand.tmp"]:
|
||||
file_path = "%s/%s" % (config.data_dir, file_name)
|
||||
if os.path.isfile(file_path):
|
||||
os.unlink(file_path)
|
||||
|
@ -53,69 +114,108 @@ class CryptConnectionManager:
|
|||
if config.disable_encryption:
|
||||
return False
|
||||
|
||||
if self.createSslRsaCert():
|
||||
if self.createSslRsaCert() and "tls-rsa" not in self.crypt_supported:
|
||||
self.crypt_supported.append("tls-rsa")
|
||||
|
||||
# Try to create RSA server cert + sign for connection encryption
|
||||
# Return: True on success
|
||||
def createSslRsaCert(self):
|
||||
import subprocess
|
||||
casubjects = [
|
||||
"/C=US/O=Amazon/OU=Server CA 1B/CN=Amazon",
|
||||
"/C=US/O=Let's Encrypt/CN=Let's Encrypt Authority X3",
|
||||
"/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert SHA2 High Assurance Server CA",
|
||||
"/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA"
|
||||
]
|
||||
self.openssl_env['CN'] = random.choice(self.fakedomains)
|
||||
environ = os.environ
|
||||
environ['OPENSSL_CONF'] = self.openssl_env['OPENSSL_CONF']
|
||||
environ['RANDFILE'] = self.openssl_env['RANDFILE']
|
||||
environ['CN'] = self.openssl_env['CN']
|
||||
|
||||
if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir):
|
||||
if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
|
||||
self.createSslContexts()
|
||||
return True # Files already exits
|
||||
|
||||
proc = subprocess.Popen(
|
||||
"%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s -out %s -nodes -config %s" % helper.shellquote(
|
||||
self.openssl_bin,
|
||||
config.data_dir+"/key-rsa.pem",
|
||||
config.data_dir+"/cert-rsa.pem",
|
||||
self.openssl_env["OPENSSL_CONF"]
|
||||
),
|
||||
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
|
||||
)
|
||||
back = proc.stdout.read().strip()
|
||||
proc.wait()
|
||||
logging.debug("Generating RSA cert and key PEM files...%s" % back)
|
||||
|
||||
if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir):
|
||||
return True
|
||||
else:
|
||||
logging.error("RSA ECC SSL cert generation failed, cert or key files not exist.")
|
||||
return False
|
||||
|
||||
# Not used yet: Missing on some platform
|
||||
"""def createSslEccCert(self):
|
||||
return False
|
||||
import subprocess
|
||||
|
||||
# Create ECC privatekey
|
||||
proc = subprocess.Popen(
|
||||
"%s ecparam -name prime256v1 -genkey -out %s/key-ecc.pem" % (self.openssl_bin, config.data_dir),
|
||||
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
|
||||
)
|
||||
back = proc.stdout.read().strip()
|
||||
proc.wait()
|
||||
self.log.debug("Generating ECC privatekey PEM file...%s" % back)
|
||||
# Replace variables in config template
|
||||
conf_template = open(self.openssl_conf_template).read()
|
||||
conf_template = conf_template.replace("$ENV::CN", self.openssl_env['CN'])
|
||||
open(self.openssl_conf, "w").write(conf_template)
|
||||
|
||||
# Create ECC cert
|
||||
proc = subprocess.Popen(
|
||||
"%s req -new -key %s -x509 -nodes -out %s -config %s" % helper.shellquote(
|
||||
self.openssl_bin,
|
||||
config.data_dir+"/key-ecc.pem",
|
||||
config.data_dir+"/cert-ecc.pem",
|
||||
self.openssl_env["OPENSSL_CONF"]
|
||||
),
|
||||
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
|
||||
# Generate CAcert and CAkey
|
||||
cmd_params = helper.shellquote(
|
||||
self.openssl_bin,
|
||||
self.openssl_conf,
|
||||
random.choice(casubjects),
|
||||
self.cakey_pem,
|
||||
self.cacert_pem
|
||||
)
|
||||
back = proc.stdout.read().strip()
|
||||
cmd = "%s req -new -newkey rsa:2048 -days 3650 -nodes -x509 -config %s -subj %s -keyout %s -out %s -batch" % cmd_params
|
||||
self.log.debug("Generating RSA CAcert and CAkey PEM files...")
|
||||
self.log.debug("Running: %s" % cmd)
|
||||
proc = subprocess.Popen(
|
||||
cmd, shell=True, stderr=subprocess.STDOUT,
|
||||
stdout=subprocess.PIPE, env=environ
|
||||
)
|
||||
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
||||
proc.wait()
|
||||
self.log.debug("Generating ECC cert PEM file...%s" % back)
|
||||
|
||||
if os.path.isfile("%s/cert-ecc.pem" % config.data_dir) and os.path.isfile("%s/key-ecc.pem" % config.data_dir):
|
||||
if not (os.path.isfile(self.cacert_pem) and os.path.isfile(self.cakey_pem)):
|
||||
self.log.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist. (%s)" % back)
|
||||
return False
|
||||
else:
|
||||
self.log.debug("Result: %s" % back)
|
||||
|
||||
# Generate certificate key and signing request
|
||||
cmd_params = helper.shellquote(
|
||||
self.openssl_bin,
|
||||
self.key_pem,
|
||||
self.cert_csr,
|
||||
"/CN=" + self.openssl_env['CN'],
|
||||
self.openssl_conf,
|
||||
)
|
||||
cmd = "%s req -new -newkey rsa:2048 -keyout %s -out %s -subj %s -sha256 -nodes -batch -config %s" % cmd_params
|
||||
self.log.debug("Generating certificate key and signing request...")
|
||||
proc = subprocess.Popen(
|
||||
cmd, shell=True, stderr=subprocess.STDOUT,
|
||||
stdout=subprocess.PIPE, env=environ
|
||||
)
|
||||
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
||||
proc.wait()
|
||||
self.log.debug("Running: %s\n%s" % (cmd, back))
|
||||
|
||||
# Sign request and generate certificate
|
||||
cmd_params = helper.shellquote(
|
||||
self.openssl_bin,
|
||||
self.cert_csr,
|
||||
self.cacert_pem,
|
||||
self.cakey_pem,
|
||||
self.cert_pem,
|
||||
self.openssl_conf
|
||||
)
|
||||
cmd = "%s x509 -req -in %s -CA %s -CAkey %s -set_serial 01 -out %s -days 730 -sha256 -extensions x509_ext -extfile %s" % cmd_params
|
||||
self.log.debug("Generating RSA cert...")
|
||||
proc = subprocess.Popen(
|
||||
cmd, shell=True, stderr=subprocess.STDOUT,
|
||||
stdout=subprocess.PIPE, env=environ
|
||||
)
|
||||
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
||||
proc.wait()
|
||||
self.log.debug("Running: %s\n%s" % (cmd, back))
|
||||
|
||||
if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
|
||||
self.createSslContexts()
|
||||
|
||||
# Remove no longer necessary files
|
||||
os.unlink(self.openssl_conf)
|
||||
os.unlink(self.cacert_pem)
|
||||
os.unlink(self.cakey_pem)
|
||||
os.unlink(self.cert_csr)
|
||||
|
||||
return True
|
||||
else:
|
||||
self.logging.error("ECC SSL cert generation failed, cert or key files not exits.")
|
||||
return False
|
||||
"""
|
||||
self.log.error("RSA ECC SSL cert generation failed, cert or key files not exist.")
|
||||
|
||||
|
||||
manager = CryptConnectionManager()
|
||||
|
|
|
@ -3,46 +3,54 @@ import os
|
|||
import base64
|
||||
|
||||
|
||||
def sha1sum(file, blocksize=65536):
|
||||
if hasattr(file, "endswith"): # Its a string open it
|
||||
file = open(file, "rb")
|
||||
hash = hashlib.sha1()
|
||||
for block in iter(lambda: file.read(blocksize), ""):
|
||||
hash.update(block)
|
||||
return hash.hexdigest()
|
||||
|
||||
|
||||
def sha512sum(file, blocksize=65536):
|
||||
if hasattr(file, "endswith"): # Its a string open it
|
||||
def sha512sum(file, blocksize=65536, format="hexdigest"):
|
||||
if type(file) is str: # Filename specified
|
||||
file = open(file, "rb")
|
||||
hash = hashlib.sha512()
|
||||
for block in iter(lambda: file.read(blocksize), ""):
|
||||
for block in iter(lambda: file.read(blocksize), b""):
|
||||
hash.update(block)
|
||||
return hash.hexdigest()[0:64] # Truncate to 256bits is good enough
|
||||
|
||||
# Truncate to 256bits is good enough
|
||||
if format == "hexdigest":
|
||||
return hash.hexdigest()[0:64]
|
||||
else:
|
||||
return hash.digest()[0:32]
|
||||
|
||||
|
||||
def sha256sum(file, blocksize=65536):
|
||||
if type(file) is str: # Filename specified
|
||||
file = open(file, "rb")
|
||||
hash = hashlib.sha256()
|
||||
for block in iter(lambda: file.read(blocksize), b""):
|
||||
hash.update(block)
|
||||
return hash.hexdigest()
|
||||
|
||||
|
||||
def random(length=64, encoding="hex"):
|
||||
if encoding == "base64": # Characters: A-Za-z0-9
|
||||
hash = hashlib.sha512(os.urandom(256)).digest()
|
||||
return base64.standard_b64encode(hash).replace("+", "").replace("/", "").replace("=", "")[0:length]
|
||||
return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length]
|
||||
else: # Characters: a-f0-9 (faster)
|
||||
return hashlib.sha512(os.urandom(256)).hexdigest()[0:length]
|
||||
|
||||
|
||||
# Sha512 truncated to 256bits
|
||||
class Sha512t:
|
||||
def __init__(self, data):
|
||||
if data:
|
||||
self.sha512 = hashlib.sha512(data)
|
||||
else:
|
||||
self.sha512 = hashlib.sha512()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import cStringIO as StringIO
|
||||
a = StringIO.StringIO()
|
||||
a.write("hello!")
|
||||
a.seek(0)
|
||||
print hashlib.sha1("hello!").hexdigest()
|
||||
print sha1sum(a)
|
||||
def hexdigest(self):
|
||||
return self.sha512.hexdigest()[0:64]
|
||||
|
||||
import time
|
||||
s = time.time()
|
||||
print sha1sum(open("F:\\Temp\\bigfile")),
|
||||
print time.time() - s
|
||||
def digest(self):
|
||||
return self.sha512.digest()[0:32]
|
||||
|
||||
s = time.time()
|
||||
print sha512sum(open("F:\\Temp\\bigfile")),
|
||||
print time.time() - s
|
||||
def update(self, data):
|
||||
return self.sha512.update(data)
|
||||
|
||||
|
||||
def sha512t(data=None):
|
||||
return Sha512t(data)
|
||||
|
|
85
src/Crypt/CryptTor.py
Normal file
85
src/Crypt/CryptTor.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
import base64
|
||||
import hashlib
|
||||
|
||||
def sign(data, privatekey):
|
||||
import rsa
|
||||
from rsa import pkcs1
|
||||
from lib import Ed25519
|
||||
|
||||
## Onion Service V3
|
||||
if len(privatekey) == 88:
|
||||
prv_key = base64.b64decode(privatekey)
|
||||
pub_key = Ed25519.publickey_unsafe(prv_key)
|
||||
sign = Ed25519.signature_unsafe(data, prv_key, pub_key)
|
||||
|
||||
return sign
|
||||
|
||||
## Onion Service V2
|
||||
if "BEGIN RSA PRIVATE KEY" not in privatekey:
|
||||
privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey
|
||||
|
||||
priv = rsa.PrivateKey.load_pkcs1(privatekey)
|
||||
sign = rsa.pkcs1.sign(data, priv, 'SHA-256')
|
||||
return sign
|
||||
|
||||
def verify(data, publickey, sign):
|
||||
import rsa
|
||||
from rsa import pkcs1
|
||||
from lib import Ed25519
|
||||
|
||||
## Onion Service V3
|
||||
if len(publickey) == 32:
|
||||
|
||||
try:
|
||||
valid = Ed25519.checkvalid(sign, data, publickey)
|
||||
valid = 'SHA-256'
|
||||
|
||||
except Exception as err:
|
||||
print(err)
|
||||
valid = False
|
||||
|
||||
return valid
|
||||
|
||||
## Onion Service V2
|
||||
pub = rsa.PublicKey.load_pkcs1(publickey, format="DER")
|
||||
|
||||
try:
|
||||
valid = rsa.pkcs1.verify(data, sign, pub)
|
||||
|
||||
except pkcs1.VerificationError:
|
||||
valid = False
|
||||
|
||||
return valid
|
||||
|
||||
def privatekeyToPublickey(privatekey):
|
||||
import rsa
|
||||
from rsa import pkcs1
|
||||
from lib import Ed25519
|
||||
|
||||
## Onion Service V3
|
||||
if len(privatekey) == 88:
|
||||
prv_key = base64.b64decode(privatekey)
|
||||
pub_key = Ed25519.publickey_unsafe(prv_key)
|
||||
|
||||
return pub_key
|
||||
|
||||
## Onion Service V2
|
||||
if "BEGIN RSA PRIVATE KEY" not in privatekey:
|
||||
privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey
|
||||
|
||||
priv = rsa.PrivateKey.load_pkcs1(privatekey)
|
||||
pub = rsa.PublicKey(priv.n, priv.e)
|
||||
|
||||
return pub.save_pkcs1("DER")
|
||||
|
||||
def publickeyToOnion(publickey):
|
||||
from lib import Ed25519
|
||||
|
||||
## Onion Service V3
|
||||
if len(publickey) == 32:
|
||||
addr = Ed25519.publickey_to_onionaddress(publickey)[:-6]
|
||||
|
||||
return addr
|
||||
|
||||
## Onion Service V2
|
||||
return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii")
|
420
src/Db/Db.py
420
src/Db/Db.py
|
@ -4,10 +4,24 @@ import time
|
|||
import logging
|
||||
import re
|
||||
import os
|
||||
import atexit
|
||||
import threading
|
||||
import sys
|
||||
import weakref
|
||||
import errno
|
||||
|
||||
import gevent
|
||||
|
||||
from DbCursor import DbCursor
|
||||
from Debug import Debug
|
||||
from .DbCursor import DbCursor
|
||||
from util import SafeRe
|
||||
from util import helper
|
||||
from util import ThreadPool
|
||||
from Config import config
|
||||
|
||||
thread_pool_db = ThreadPool.ThreadPool(config.threads_db)
|
||||
|
||||
next_db_id = 0
|
||||
opened_dbs = []
|
||||
|
||||
|
||||
|
@ -16,90 +30,242 @@ def dbCleanup():
|
|||
while 1:
|
||||
time.sleep(60 * 5)
|
||||
for db in opened_dbs[:]:
|
||||
if time.time() - db.last_query_time > 60 * 3:
|
||||
db.close()
|
||||
idle = time.time() - db.last_query_time
|
||||
if idle > 60 * 5 and db.close_idle:
|
||||
db.close("Cleanup")
|
||||
|
||||
|
||||
def dbCommitCheck():
|
||||
while 1:
|
||||
time.sleep(5)
|
||||
for db in opened_dbs[:]:
|
||||
if not db.need_commit:
|
||||
continue
|
||||
|
||||
success = db.commit("Interval")
|
||||
if success:
|
||||
db.need_commit = False
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
def dbCloseAll():
|
||||
for db in opened_dbs[:]:
|
||||
db.close("Close all")
|
||||
|
||||
|
||||
gevent.spawn(dbCleanup)
|
||||
gevent.spawn(dbCommitCheck)
|
||||
atexit.register(dbCloseAll)
|
||||
|
||||
|
||||
class Db:
|
||||
class DbTableError(Exception):
|
||||
def __init__(self, message, table):
|
||||
super().__init__(message)
|
||||
self.table = table
|
||||
|
||||
def __init__(self, schema, db_path):
|
||||
|
||||
class Db(object):
|
||||
|
||||
def __init__(self, schema, db_path, close_idle=False):
|
||||
global next_db_id
|
||||
self.db_path = db_path
|
||||
self.db_dir = os.path.dirname(db_path) + "/"
|
||||
self.schema = schema
|
||||
self.schema["version"] = self.schema.get("version", 1)
|
||||
self.conn = None
|
||||
self.cur = None
|
||||
self.log = logging.getLogger("Db:%s" % schema["db_name"])
|
||||
self.cursors = weakref.WeakSet()
|
||||
self.id = next_db_id
|
||||
next_db_id += 1
|
||||
self.progress_sleeping = False
|
||||
self.commiting = False
|
||||
self.log = logging.getLogger("Db#%s:%s" % (self.id, schema["db_name"]))
|
||||
self.table_names = None
|
||||
self.collect_stats = False
|
||||
self.foreign_keys = False
|
||||
self.need_commit = False
|
||||
self.query_stats = {}
|
||||
self.db_keyvalues = {}
|
||||
self.delayed_queue = []
|
||||
self.delayed_queue_thread = None
|
||||
self.close_idle = close_idle
|
||||
self.last_query_time = time.time()
|
||||
self.last_sleep_time = time.time()
|
||||
self.num_execute_since_sleep = 0
|
||||
self.lock = ThreadPool.Lock()
|
||||
self.connect_lock = ThreadPool.Lock()
|
||||
|
||||
def __repr__(self):
|
||||
return "<Db:%s>" % self.db_path
|
||||
return "<Db#%s:%s close_idle:%s>" % (id(self), self.db_path, self.close_idle)
|
||||
|
||||
def connect(self):
|
||||
if self not in opened_dbs:
|
||||
opened_dbs.append(self)
|
||||
self.connect_lock.acquire(True)
|
||||
try:
|
||||
if self.conn:
|
||||
self.log.debug("Already connected, connection ignored")
|
||||
return
|
||||
|
||||
self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version))
|
||||
if not os.path.isdir(self.db_dir): # Directory not exist yet
|
||||
os.makedirs(self.db_dir)
|
||||
self.log.debug("Created Db path: %s" % self.db_dir)
|
||||
if not os.path.isfile(self.db_path):
|
||||
self.log.debug("Db file not exist yet: %s" % self.db_path)
|
||||
self.conn = sqlite3.connect(self.db_path)
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
self.conn.isolation_level = None
|
||||
self.cur = self.getCursor()
|
||||
# We need more speed then security
|
||||
self.cur.execute("PRAGMA journal_mode = WAL")
|
||||
self.cur.execute("PRAGMA journal_mode = MEMORY")
|
||||
self.cur.execute("PRAGMA synchronous = OFF")
|
||||
if self not in opened_dbs:
|
||||
opened_dbs.append(self)
|
||||
s = time.time()
|
||||
try: # Directory not exist yet
|
||||
os.makedirs(self.db_dir)
|
||||
self.log.debug("Created Db path: %s" % self.db_dir)
|
||||
except OSError as err:
|
||||
if err.errno != errno.EEXIST:
|
||||
raise err
|
||||
if not os.path.isfile(self.db_path):
|
||||
self.log.debug("Db file not exist yet: %s" % self.db_path)
|
||||
self.conn = sqlite3.connect(self.db_path, isolation_level="DEFERRED", check_same_thread=False)
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
self.conn.set_progress_handler(self.progress, 5000000)
|
||||
self.conn.execute('PRAGMA journal_mode=WAL')
|
||||
if self.foreign_keys:
|
||||
self.conn.execute("PRAGMA foreign_keys = ON")
|
||||
self.cur = self.getCursor()
|
||||
|
||||
self.log.debug(
|
||||
"Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." %
|
||||
(self.db_path, time.time() - s, len(opened_dbs), sqlite3.version)
|
||||
)
|
||||
self.log.debug("Connect by thread: %s" % threading.current_thread().ident)
|
||||
self.log.debug("Connect called by %s" % Debug.formatStack())
|
||||
finally:
|
||||
self.connect_lock.release()
|
||||
|
||||
def getConn(self):
|
||||
if not self.conn:
|
||||
self.connect()
|
||||
return self.conn
|
||||
|
||||
def progress(self, *args, **kwargs):
|
||||
self.progress_sleeping = True
|
||||
time.sleep(0.001)
|
||||
self.progress_sleeping = False
|
||||
|
||||
# Execute query using dbcursor
|
||||
def execute(self, query, params=None):
|
||||
self.last_query_time = time.time()
|
||||
if not self.conn:
|
||||
self.connect()
|
||||
return self.cur.execute(query, params)
|
||||
|
||||
def close(self):
|
||||
self.log.debug("Closing, opened: %s" % opened_dbs)
|
||||
@thread_pool_db.wrap
|
||||
def commit(self, reason="Unknown"):
|
||||
if self.progress_sleeping:
|
||||
self.log.debug("Commit ignored: Progress sleeping")
|
||||
return False
|
||||
|
||||
if not self.conn:
|
||||
self.log.debug("Commit ignored: No connection")
|
||||
return False
|
||||
|
||||
if self.commiting:
|
||||
self.log.debug("Commit ignored: Already commiting")
|
||||
return False
|
||||
|
||||
try:
|
||||
s = time.time()
|
||||
self.commiting = True
|
||||
self.conn.commit()
|
||||
self.log.debug("Commited in %.3fs (reason: %s)" % (time.time() - s, reason))
|
||||
return True
|
||||
except Exception as err:
|
||||
if "SQL statements in progress" in str(err):
|
||||
self.log.warning("Commit delayed: %s (reason: %s)" % (Debug.formatException(err), reason))
|
||||
else:
|
||||
self.log.error("Commit error: %s (reason: %s)" % (Debug.formatException(err), reason))
|
||||
return False
|
||||
finally:
|
||||
self.commiting = False
|
||||
|
||||
def insertOrUpdate(self, *args, **kwargs):
|
||||
if not self.conn:
|
||||
self.connect()
|
||||
return self.cur.insertOrUpdate(*args, **kwargs)
|
||||
|
||||
def executeDelayed(self, *args, **kwargs):
|
||||
if not self.delayed_queue_thread:
|
||||
self.delayed_queue_thread = gevent.spawn_later(1, self.processDelayed)
|
||||
self.delayed_queue.append(("execute", (args, kwargs)))
|
||||
|
||||
def insertOrUpdateDelayed(self, *args, **kwargs):
|
||||
if not self.delayed_queue:
|
||||
gevent.spawn_later(1, self.processDelayed)
|
||||
self.delayed_queue.append(("insertOrUpdate", (args, kwargs)))
|
||||
|
||||
def processDelayed(self):
|
||||
if not self.delayed_queue:
|
||||
self.log.debug("processDelayed aborted")
|
||||
return
|
||||
if not self.conn:
|
||||
self.connect()
|
||||
|
||||
s = time.time()
|
||||
cur = self.getCursor()
|
||||
for command, params in self.delayed_queue:
|
||||
if command == "insertOrUpdate":
|
||||
cur.insertOrUpdate(*params[0], **params[1])
|
||||
else:
|
||||
cur.execute(*params[0], **params[1])
|
||||
|
||||
if len(self.delayed_queue) > 10:
|
||||
self.log.debug("Processed %s delayed queue in %.3fs" % (len(self.delayed_queue), time.time() - s))
|
||||
self.delayed_queue = []
|
||||
self.delayed_queue_thread = None
|
||||
|
||||
def close(self, reason="Unknown"):
|
||||
if not self.conn:
|
||||
return False
|
||||
self.connect_lock.acquire()
|
||||
s = time.time()
|
||||
if self.delayed_queue:
|
||||
self.processDelayed()
|
||||
if self in opened_dbs:
|
||||
opened_dbs.remove(self)
|
||||
self.need_commit = False
|
||||
self.commit("Closing: %s" % reason)
|
||||
self.log.debug("Close called by %s" % Debug.formatStack())
|
||||
for i in range(5):
|
||||
if len(self.cursors) == 0:
|
||||
break
|
||||
self.log.debug("Pending cursors: %s" % len(self.cursors))
|
||||
time.sleep(0.1 * i)
|
||||
if len(self.cursors):
|
||||
self.log.debug("Killing cursors: %s" % len(self.cursors))
|
||||
self.conn.interrupt()
|
||||
|
||||
if self.cur:
|
||||
self.cur.close()
|
||||
if self.conn:
|
||||
self.conn.close()
|
||||
ThreadPool.main_loop.call(self.conn.close)
|
||||
self.conn = None
|
||||
self.cur = None
|
||||
self.log.debug("%s closed (reason: %s) in %.3fs, opened: %s" % (self.db_path, reason, time.time() - s, len(opened_dbs)))
|
||||
self.connect_lock.release()
|
||||
return True
|
||||
|
||||
# Gets a cursor object to database
|
||||
# Return: Cursor class
|
||||
def getCursor(self):
|
||||
if not self.conn:
|
||||
self.connect()
|
||||
return DbCursor(self.conn, self)
|
||||
|
||||
cur = DbCursor(self)
|
||||
return cur
|
||||
|
||||
def getSharedCursor(self):
|
||||
if not self.conn:
|
||||
self.connect()
|
||||
return self.cur
|
||||
|
||||
# Get the table version
|
||||
# Return: Table version or None if not exist
|
||||
def getTableVersion(self, table_name):
|
||||
"""if not self.table_names: # Get existing table names
|
||||
res = self.cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
self.table_names = [row["name"] for row in res]
|
||||
if table_name not in self.table_names:
|
||||
return False
|
||||
|
||||
else:"""
|
||||
if not self.db_keyvalues: # Get db keyvalues
|
||||
try:
|
||||
res = self.cur.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
|
||||
except sqlite3.OperationalError, err: # Table not exist
|
||||
self.log.debug("Query error: %s" % err)
|
||||
res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
|
||||
except sqlite3.OperationalError as err: # Table not exist
|
||||
self.log.debug("Query table version error: %s" % err)
|
||||
return False
|
||||
|
||||
for row in res:
|
||||
|
@ -112,9 +278,8 @@ class Db:
|
|||
def checkTables(self):
|
||||
s = time.time()
|
||||
changed_tables = []
|
||||
cur = self.getCursor()
|
||||
|
||||
cur.execute("BEGIN")
|
||||
cur = self.getSharedCursor()
|
||||
|
||||
# Check internal tables
|
||||
# Check keyvalue table
|
||||
|
@ -122,85 +287,115 @@ class Db:
|
|||
["keyvalue_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["key", "TEXT"],
|
||||
["value", "INTEGER"],
|
||||
["json_id", "INTEGER REFERENCES json (json_id)"],
|
||||
["json_id", "INTEGER"],
|
||||
], [
|
||||
"CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
|
||||
], version=self.schema["version"])
|
||||
if changed:
|
||||
changed_tables.append("keyvalue")
|
||||
|
||||
# Check json table
|
||||
if self.schema["version"] == 1:
|
||||
changed = cur.needTable("json", [
|
||||
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["path", "VARCHAR(255)"]
|
||||
], [
|
||||
"CREATE UNIQUE INDEX path ON json(path)"
|
||||
], version=self.schema["version"])
|
||||
else:
|
||||
changed = cur.needTable("json", [
|
||||
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["directory", "VARCHAR(255)"],
|
||||
["file_name", "VARCHAR(255)"]
|
||||
], [
|
||||
"CREATE UNIQUE INDEX path ON json(directory, file_name)"
|
||||
], version=self.schema["version"])
|
||||
if changed:
|
||||
changed_tables.append("json")
|
||||
# Create json table if no custom one defined
|
||||
if "json" not in self.schema.get("tables", {}):
|
||||
if self.schema["version"] == 1:
|
||||
changed = cur.needTable("json", [
|
||||
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["path", "VARCHAR(255)"]
|
||||
], [
|
||||
"CREATE UNIQUE INDEX path ON json(path)"
|
||||
], version=self.schema["version"])
|
||||
elif self.schema["version"] == 2:
|
||||
changed = cur.needTable("json", [
|
||||
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["directory", "VARCHAR(255)"],
|
||||
["file_name", "VARCHAR(255)"]
|
||||
], [
|
||||
"CREATE UNIQUE INDEX path ON json(directory, file_name)"
|
||||
], version=self.schema["version"])
|
||||
elif self.schema["version"] == 3:
|
||||
changed = cur.needTable("json", [
|
||||
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["site", "VARCHAR(255)"],
|
||||
["directory", "VARCHAR(255)"],
|
||||
["file_name", "VARCHAR(255)"]
|
||||
], [
|
||||
"CREATE UNIQUE INDEX path ON json(directory, site, file_name)"
|
||||
], version=self.schema["version"])
|
||||
if changed:
|
||||
changed_tables.append("json")
|
||||
|
||||
# Check schema tables
|
||||
for table_name, table_settings in self.schema["tables"].items():
|
||||
changed = cur.needTable(
|
||||
table_name, table_settings["cols"],
|
||||
table_settings["indexes"], version=table_settings["schema_changed"]
|
||||
)
|
||||
if changed:
|
||||
changed_tables.append(table_name)
|
||||
for table_name, table_settings in self.schema.get("tables", {}).items():
|
||||
try:
|
||||
indexes = table_settings.get("indexes", [])
|
||||
version = table_settings.get("schema_changed", 0)
|
||||
changed = cur.needTable(
|
||||
table_name, table_settings["cols"],
|
||||
indexes, version=version
|
||||
)
|
||||
if changed:
|
||||
changed_tables.append(table_name)
|
||||
except Exception as err:
|
||||
self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err)))
|
||||
raise DbTableError(err, table_name)
|
||||
|
||||
cur.execute("COMMIT")
|
||||
self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables))
|
||||
if changed_tables:
|
||||
self.db_keyvalues = {} # Refresh table version cache
|
||||
|
||||
return changed_tables
|
||||
|
||||
# Load json file to db
|
||||
# Update json file to db
|
||||
# Return: True if matched
|
||||
def loadJson(self, file_path, file=None, cur=None):
|
||||
def updateJson(self, file_path, file=None, cur=None):
|
||||
if not file_path.startswith(self.db_dir):
|
||||
return False # Not from the db dir: Skipping
|
||||
relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file
|
||||
relative_path = file_path[len(self.db_dir):] # File path realative to db file
|
||||
|
||||
# Check if filename matches any of mappings in schema
|
||||
matched_maps = []
|
||||
for match, map_settings in self.schema["maps"].items():
|
||||
if re.match(match, relative_path):
|
||||
matched_maps.append(map_settings)
|
||||
try:
|
||||
if SafeRe.match(match, relative_path):
|
||||
matched_maps.append(map_settings)
|
||||
except SafeRe.UnsafePatternError as err:
|
||||
self.log.error(err)
|
||||
|
||||
# No match found for the file
|
||||
if not matched_maps:
|
||||
return False
|
||||
|
||||
# Load the json file
|
||||
if not file:
|
||||
file = open(file_path)
|
||||
data = json.load(file)
|
||||
try:
|
||||
if file is None: # Open file is not file object passed
|
||||
file = open(file_path, "rb")
|
||||
|
||||
if file is False: # File deleted
|
||||
data = {}
|
||||
else:
|
||||
if file_path.endswith("json.gz"):
|
||||
file = helper.limitedGzipFile(fileobj=file)
|
||||
|
||||
if sys.version_info.major == 3 and sys.version_info.minor < 6:
|
||||
data = json.loads(file.read().decode("utf8"))
|
||||
else:
|
||||
data = json.load(file)
|
||||
except Exception as err:
|
||||
self.log.debug("Json file %s load error: %s" % (file_path, err))
|
||||
data = {}
|
||||
|
||||
# No cursor specificed
|
||||
if not cur:
|
||||
cur = self.getCursor()
|
||||
cur.execute("BEGIN")
|
||||
cur = self.getSharedCursor()
|
||||
cur.logging = False
|
||||
commit_after_done = True
|
||||
else:
|
||||
commit_after_done = False
|
||||
|
||||
# Row for current json file
|
||||
json_row = cur.getJsonRow(relative_path)
|
||||
# Row for current json file if required
|
||||
if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]:
|
||||
json_row = cur.getJsonRow(relative_path)
|
||||
|
||||
# Check matched mappings in schema
|
||||
for map in matched_maps:
|
||||
for dbmap in matched_maps:
|
||||
# Insert non-relational key values
|
||||
if map.get("to_keyvalue"):
|
||||
if dbmap.get("to_keyvalue"):
|
||||
# Get current values
|
||||
res = cur.execute("SELECT * FROM keyvalue WHERE json_id = ?", (json_row["json_id"],))
|
||||
current_keyvalue = {}
|
||||
|
@ -209,7 +404,7 @@ class Db:
|
|||
current_keyvalue[row["key"]] = row["value"]
|
||||
current_keyvalue_id[row["key"]] = row["keyvalue_id"]
|
||||
|
||||
for key in map["to_keyvalue"]:
|
||||
for key in dbmap["to_keyvalue"]:
|
||||
if key not in current_keyvalue: # Keyvalue not exist yet in the db
|
||||
cur.execute(
|
||||
"INSERT INTO keyvalue ?",
|
||||
|
@ -221,15 +416,21 @@ class Db:
|
|||
(data.get(key), current_keyvalue_id[key])
|
||||
)
|
||||
|
||||
"""
|
||||
for key in map.get("to_keyvalue", []):
|
||||
cur.execute("INSERT OR REPLACE INTO keyvalue ?",
|
||||
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
|
||||
)
|
||||
"""
|
||||
# Insert data to json table for easier joins
|
||||
if dbmap.get("to_json_table"):
|
||||
directory, file_name = re.match("^(.*?)/*([^/]*)$", relative_path).groups()
|
||||
data_json_row = dict(cur.getJsonRow(directory + "/" + dbmap.get("file_name", file_name)))
|
||||
changed = False
|
||||
for key in dbmap["to_json_table"]:
|
||||
if data.get(key) != data_json_row.get(key):
|
||||
changed = True
|
||||
if changed:
|
||||
# Add the custom col values
|
||||
data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]})
|
||||
cur.execute("INSERT OR REPLACE INTO json ?", data_json_row)
|
||||
|
||||
# Insert data to tables
|
||||
for table_settings in map.get("to_table", []):
|
||||
for table_settings in dbmap.get("to_table", []):
|
||||
if isinstance(table_settings, dict): # Custom settings
|
||||
table_name = table_settings["table"] # Table name to insert datas
|
||||
node = table_settings.get("node", table_name) # Node keyname in data json file
|
||||
|
@ -245,34 +446,38 @@ class Db:
|
|||
import_cols = None
|
||||
replaces = None
|
||||
|
||||
# Fill import cols from table cols
|
||||
if not import_cols:
|
||||
import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]])
|
||||
|
||||
cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
|
||||
|
||||
if node not in data:
|
||||
continue
|
||||
|
||||
if key_col: # Map as dict
|
||||
for key, val in data[node].iteritems():
|
||||
for key, val in data[node].items():
|
||||
if val_col: # Single value
|
||||
cur.execute(
|
||||
"INSERT OR REPLACE INTO %s ?" % table_name,
|
||||
{key_col: key, val_col: val, "json_id": json_row["json_id"]}
|
||||
)
|
||||
else: # Multi value
|
||||
if isinstance(val, dict): # Single row
|
||||
if type(val) is dict: # Single row
|
||||
row = val
|
||||
if import_cols:
|
||||
row = {key: row[key] for key in import_cols} # Filter row by import_cols
|
||||
row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols
|
||||
row[key_col] = key
|
||||
# Replace in value if necessary
|
||||
if replaces:
|
||||
for replace_key, replace in replaces.iteritems():
|
||||
for replace_key, replace in replaces.items():
|
||||
if replace_key in row:
|
||||
for replace_from, replace_to in replace.iteritems():
|
||||
for replace_from, replace_to in replace.items():
|
||||
row[replace_key] = row[replace_key].replace(replace_from, replace_to)
|
||||
|
||||
row["json_id"] = json_row["json_id"]
|
||||
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
|
||||
else: # Multi row
|
||||
elif type(val) is list: # Multi row
|
||||
for row in val:
|
||||
row[key_col] = key
|
||||
row["json_id"] = json_row["json_id"]
|
||||
|
@ -280,10 +485,15 @@ class Db:
|
|||
else: # Map as list
|
||||
for row in data[node]:
|
||||
row["json_id"] = json_row["json_id"]
|
||||
if import_cols:
|
||||
row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols
|
||||
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
|
||||
|
||||
if commit_after_done:
|
||||
cur.execute("COMMIT")
|
||||
# Cleanup json row
|
||||
if not data:
|
||||
self.log.debug("Cleanup json row for %s" % file_path)
|
||||
cur.execute("DELETE FROM json WHERE json_id = %s" % json_row["json_id"])
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@ -297,15 +507,13 @@ if __name__ == "__main__":
|
|||
dbjson.collect_stats = True
|
||||
dbjson.checkTables()
|
||||
cur = dbjson.getCursor()
|
||||
cur.execute("BEGIN")
|
||||
cur.logging = False
|
||||
dbjson.loadJson("data/users/content.json", cur=cur)
|
||||
dbjson.updateJson("data/users/content.json", cur=cur)
|
||||
for user_dir in os.listdir("data/users"):
|
||||
if os.path.isdir("data/users/%s" % user_dir):
|
||||
dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur)
|
||||
dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur)
|
||||
# print ".",
|
||||
cur.logging = True
|
||||
cur.execute("COMMIT")
|
||||
print "Done in %.3fs" % (time.time() - s)
|
||||
print("Done in %.3fs" % (time.time() - s))
|
||||
for query, stats in sorted(dbjson.query_stats.items()):
|
||||
print "-", query, stats
|
||||
print("-", query, stats)
|
||||
|
|
|
@ -1,51 +1,119 @@
|
|||
import time
|
||||
import re
|
||||
from util import helper
|
||||
|
||||
# Special sqlite cursor
|
||||
|
||||
|
||||
class DbCursor:
|
||||
|
||||
def __init__(self, conn, db):
|
||||
self.conn = conn
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
self.cursor = conn.cursor()
|
||||
self.logging = False
|
||||
|
||||
def execute(self, query, params=None):
|
||||
if isinstance(params, dict): # Make easier select and insert by allowing dict params
|
||||
if query.startswith("SELECT") or query.startswith("DELETE"):
|
||||
def quoteValue(self, value):
|
||||
if type(value) is int:
|
||||
return str(value)
|
||||
else:
|
||||
return "'%s'" % value.replace("'", "''")
|
||||
|
||||
def parseQuery(self, query, params):
|
||||
query_type = query.split(" ", 1)[0].upper()
|
||||
if isinstance(params, dict) and "?" in query: # Make easier select and insert by allowing dict params
|
||||
if query_type in ("SELECT", "DELETE", "UPDATE"):
|
||||
# Convert param dict to SELECT * FROM table WHERE key = ? AND key2 = ? format
|
||||
query_wheres = []
|
||||
values = []
|
||||
for key, value in params.items():
|
||||
if type(value) is list:
|
||||
query_wheres.append(key+" IN ("+",".join(["?"]*len(value))+")")
|
||||
values += value
|
||||
if key.startswith("not__"):
|
||||
field = key.replace("not__", "")
|
||||
operator = "NOT IN"
|
||||
else:
|
||||
field = key
|
||||
operator = "IN"
|
||||
if len(value) > 100:
|
||||
# Embed values in query to avoid "too many SQL variables" error
|
||||
query_values = ",".join(map(helper.sqlquote, value))
|
||||
else:
|
||||
query_values = ",".join(["?"] * len(value))
|
||||
values += value
|
||||
query_wheres.append(
|
||||
"%s %s (%s)" %
|
||||
(field, operator, query_values)
|
||||
)
|
||||
else:
|
||||
query_wheres.append(key+" = ?")
|
||||
if key.startswith("not__"):
|
||||
query_wheres.append(key.replace("not__", "") + " != ?")
|
||||
elif key.endswith("__like"):
|
||||
query_wheres.append(key.replace("__like", "") + " LIKE ?")
|
||||
elif key.endswith(">"):
|
||||
query_wheres.append(key.replace(">", "") + " > ?")
|
||||
elif key.endswith("<"):
|
||||
query_wheres.append(key.replace("<", "") + " < ?")
|
||||
else:
|
||||
query_wheres.append(key + " = ?")
|
||||
values.append(value)
|
||||
wheres = " AND ".join(query_wheres)
|
||||
query = query.replace("?", wheres)
|
||||
if wheres == "":
|
||||
wheres = "1"
|
||||
query = re.sub("(.*)[?]", "\\1 %s" % wheres, query) # Replace the last ?
|
||||
params = values
|
||||
else:
|
||||
# Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
|
||||
keys = ", ".join(params.keys())
|
||||
values = ", ".join(['?' for key in params.keys()])
|
||||
query = query.replace("?", "(%s) VALUES (%s)" % (keys, values))
|
||||
keysvalues = "(%s) VALUES (%s)" % (keys, values)
|
||||
query = re.sub("(.*)[?]", "\\1%s" % keysvalues, query) # Replace the last ?
|
||||
params = tuple(params.values())
|
||||
elif isinstance(params, dict) and ":" in query:
|
||||
new_params = dict()
|
||||
values = []
|
||||
for key, value in params.items():
|
||||
if type(value) is list:
|
||||
for idx, val in enumerate(value):
|
||||
new_params[key + "__" + str(idx)] = val
|
||||
|
||||
s = time.time()
|
||||
# if query == "COMMIT": self.logging = True # Turn logging back on transaction commit
|
||||
new_names = [":" + key + "__" + str(idx) for idx in range(len(value))]
|
||||
query = re.sub(r":" + re.escape(key) + r"([)\s]|$)", "(%s)%s" % (", ".join(new_names), r"\1"), query)
|
||||
else:
|
||||
new_params[key] = value
|
||||
|
||||
if params: # Query has parameters
|
||||
res = self.cursor.execute(query, params)
|
||||
if self.logging:
|
||||
self.db.log.debug((query.replace("?", "%s") % params) + " (Done in %.4f)" % (time.time() - s))
|
||||
else:
|
||||
res = self.cursor.execute(query)
|
||||
if self.logging:
|
||||
self.db.log.debug(query + " (Done in %.4f)" % (time.time() - s))
|
||||
params = new_params
|
||||
return query, params
|
||||
|
||||
def execute(self, query, params=None):
|
||||
query = query.strip()
|
||||
while self.db.progress_sleeping or self.db.commiting:
|
||||
time.sleep(0.1)
|
||||
|
||||
self.db.last_query_time = time.time()
|
||||
|
||||
query, params = self.parseQuery(query, params)
|
||||
|
||||
cursor = self.db.getConn().cursor()
|
||||
self.db.cursors.add(cursor)
|
||||
if self.db.lock.locked():
|
||||
self.db.log.debug("Locked for %.3fs" % (time.time() - self.db.lock.time_lock))
|
||||
|
||||
try:
|
||||
s = time.time()
|
||||
self.db.lock.acquire(True)
|
||||
if query.upper().strip("; ") == "VACUUM":
|
||||
self.db.commit("vacuum called")
|
||||
if params:
|
||||
res = cursor.execute(query, params)
|
||||
else:
|
||||
res = cursor.execute(query)
|
||||
finally:
|
||||
self.db.lock.release()
|
||||
|
||||
taken_query = time.time() - s
|
||||
if self.logging or taken_query > 1:
|
||||
if params: # Query has parameters
|
||||
self.db.log.debug("Query: " + query + " " + str(params) + " (Done in %.4f)" % (time.time() - s))
|
||||
else:
|
||||
self.db.log.debug("Query: " + query + " (Done in %.4f)" % (time.time() - s))
|
||||
|
||||
# Log query stats
|
||||
if self.db.collect_stats:
|
||||
|
@ -54,23 +122,59 @@ class DbCursor:
|
|||
self.db.query_stats[query]["call"] += 1
|
||||
self.db.query_stats[query]["time"] += time.time() - s
|
||||
|
||||
# if query == "BEGIN": self.logging = False # Turn logging off on transaction commit
|
||||
return res
|
||||
query_type = query.split(" ", 1)[0].upper()
|
||||
is_update_query = query_type in ["UPDATE", "DELETE", "INSERT", "CREATE"]
|
||||
if not self.db.need_commit and is_update_query:
|
||||
self.db.need_commit = True
|
||||
|
||||
if is_update_query:
|
||||
return cursor
|
||||
else:
|
||||
return res
|
||||
|
||||
def executemany(self, query, params):
|
||||
while self.db.progress_sleeping or self.db.commiting:
|
||||
time.sleep(0.1)
|
||||
|
||||
self.db.last_query_time = time.time()
|
||||
|
||||
s = time.time()
|
||||
cursor = self.db.getConn().cursor()
|
||||
self.db.cursors.add(cursor)
|
||||
|
||||
try:
|
||||
self.db.lock.acquire(True)
|
||||
cursor.executemany(query, params)
|
||||
finally:
|
||||
self.db.lock.release()
|
||||
|
||||
taken_query = time.time() - s
|
||||
if self.logging or taken_query > 0.1:
|
||||
self.db.log.debug("Execute many: %s (Done in %.4f)" % (query, taken_query))
|
||||
|
||||
self.db.need_commit = True
|
||||
|
||||
return cursor
|
||||
|
||||
# Creates on updates a database row without incrementing the rowid
|
||||
def insertOrUpdate(self, table, query_sets, query_wheres, oninsert={}):
|
||||
sql_sets = ["%s = :%s" % (key, key) for key in query_sets.keys()]
|
||||
sql_wheres = ["%s = :%s" % (key, key) for key in query_wheres.keys()]
|
||||
|
||||
params = query_sets
|
||||
params.update(query_wheres)
|
||||
res = self.execute(
|
||||
"UPDATE %s SET %s WHERE %s" % (table, ", ".join(sql_sets), " AND ".join(sql_wheres)),
|
||||
params
|
||||
)
|
||||
if res.rowcount == 0:
|
||||
params.update(oninsert) # Add insert-only fields
|
||||
self.execute("INSERT INTO %s ?" % table, params)
|
||||
|
||||
# Create new table
|
||||
# Return: True on success
|
||||
def createTable(self, table, cols):
|
||||
# TODO: Check current structure
|
||||
"""table_changed = False
|
||||
res = c.execute("PRAGMA table_info(%s)" % table)
|
||||
if res:
|
||||
for row in res:
|
||||
print row["name"], row["type"], cols[row["name"]]
|
||||
print row
|
||||
else:
|
||||
table_changed = True
|
||||
|
||||
if table_changed: # Table structure changed, drop and create again"""
|
||||
self.execute("DROP TABLE IF EXISTS %s" % table)
|
||||
col_definitions = []
|
||||
for col_name, col_type in cols:
|
||||
|
@ -82,8 +186,10 @@ class DbCursor:
|
|||
# Create indexes on table
|
||||
# Return: True on success
|
||||
def createIndexes(self, table, indexes):
|
||||
# indexes.append("CREATE INDEX %s_id ON %s(%s_id)" % (table, table, table)) # Primary key index
|
||||
for index in indexes:
|
||||
if not index.strip().upper().startswith("CREATE"):
|
||||
self.db.log.error("Index command should start with CREATE: %s" % index)
|
||||
continue
|
||||
self.execute(index)
|
||||
|
||||
# Create table if not exist
|
||||
|
@ -91,7 +197,7 @@ class DbCursor:
|
|||
def needTable(self, table, cols, indexes=None, version=1):
|
||||
current_version = self.db.getTableVersion(table)
|
||||
if int(current_version) < int(version): # Table need update or not extis
|
||||
self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
|
||||
self.db.log.debug("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
|
||||
self.createTable(table, cols)
|
||||
if indexes:
|
||||
self.createIndexes(table, indexes)
|
||||
|
@ -108,20 +214,33 @@ class DbCursor:
|
|||
def getJsonRow(self, file_path):
|
||||
directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups()
|
||||
if self.db.schema["version"] == 1:
|
||||
# One path field
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
|
||||
row = res.fetchone()
|
||||
if not row: # No row yet, create it
|
||||
self.execute("INSERT INTO json ?", {"path": file_path})
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
|
||||
row = res.fetchone()
|
||||
else:
|
||||
elif self.db.schema["version"] == 2:
|
||||
# Separate directory, file_name (easier join)
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
|
||||
row = res.fetchone()
|
||||
if not row: # No row yet, create it
|
||||
self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name})
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
|
||||
row = res.fetchone()
|
||||
elif self.db.schema["version"] == 3:
|
||||
# Separate site, directory, file_name (for merger sites)
|
||||
site_address, directory = re.match("^([^/]*)/(.*)$", directory).groups()
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name})
|
||||
row = res.fetchone()
|
||||
if not row: # No row yet, create it
|
||||
self.execute("INSERT INTO json ?", {"site": site_address, "directory": directory, "file_name": file_name})
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name})
|
||||
row = res.fetchone()
|
||||
else:
|
||||
raise Exception("Dbschema version %s not supported" % self.db.schema.get("version"))
|
||||
return row
|
||||
|
||||
def close(self):
|
||||
self.cursor.close()
|
||||
pass
|
||||
|
|
46
src/Db/DbQuery.py
Normal file
46
src/Db/DbQuery.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
import re
|
||||
|
||||
|
||||
# Parse and modify sql queries
|
||||
class DbQuery:
|
||||
def __init__(self, query):
|
||||
self.setQuery(query.strip())
|
||||
|
||||
# Split main parts of query
|
||||
def parseParts(self, query):
|
||||
parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query)
|
||||
parts = [_f for _f in parts if _f] # Remove empty parts
|
||||
parts = [s.strip() for s in parts] # Remove whitespace
|
||||
return dict(list(zip(parts[0::2], parts[1::2])))
|
||||
|
||||
# Parse selected fields SELECT ... FROM
|
||||
def parseFields(self, query_select):
|
||||
fields = re.findall("([^,]+) AS ([^,]+)", query_select)
|
||||
return {key: val.strip() for val, key in fields}
|
||||
|
||||
# Parse query conditions WHERE ...
|
||||
def parseWheres(self, query_where):
|
||||
if " AND " in query_where:
|
||||
return query_where.split(" AND ")
|
||||
elif query_where:
|
||||
return [query_where]
|
||||
else:
|
||||
return []
|
||||
|
||||
# Set the query
|
||||
def setQuery(self, query):
|
||||
self.parts = self.parseParts(query)
|
||||
self.fields = self.parseFields(self.parts["SELECT"])
|
||||
self.wheres = self.parseWheres(self.parts.get("WHERE", ""))
|
||||
|
||||
# Convert query back to string
|
||||
def __str__(self):
|
||||
query_parts = []
|
||||
for part_name in ["SELECT", "FROM", "WHERE", "ORDER BY", "LIMIT"]:
|
||||
if part_name == "WHERE" and self.wheres:
|
||||
query_parts.append("WHERE")
|
||||
query_parts.append(" AND ".join(self.wheres))
|
||||
elif part_name in self.parts:
|
||||
query_parts.append(part_name)
|
||||
query_parts.append(self.parts[part_name])
|
||||
return "\n".join(query_parts)
|
|
@ -1,2 +0,0 @@
|
|||
from Db import Db
|
||||
from DbCursor import DbCursor
|
|
@ -1,40 +1,172 @@
|
|||
import sys
|
||||
import os
|
||||
import traceback
|
||||
import re
|
||||
from Config import config
|
||||
|
||||
|
||||
# Non fatal exception
|
||||
class Notify(Exception):
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
def __init__(self, message=None):
|
||||
if message:
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
|
||||
# Gevent greenlet.kill accept Exception type
|
||||
def createNotifyType(message):
|
||||
return type("Notify", (Notify, ), {"message": message})
|
||||
|
||||
|
||||
def formatExceptionMessage(err):
|
||||
err_type = err.__class__.__name__
|
||||
if err.args:
|
||||
err_message = err.args[-1]
|
||||
else:
|
||||
err_message = err.__str__()
|
||||
return "%s: %s" % (err_type, err_message)
|
||||
|
||||
|
||||
python_lib_dirs = [path.replace("\\", "/") for path in sys.path if re.sub(r".*[\\/]", "", path) in ("site-packages", "dist-packages")]
|
||||
python_lib_dirs.append(os.path.dirname(os.__file__).replace("\\", "/")) # TODO: check if returns the correct path for PyPy
|
||||
|
||||
root_dir = os.path.realpath(os.path.dirname(__file__) + "/../../")
|
||||
root_dir = root_dir.replace("\\", "/")
|
||||
|
||||
|
||||
def formatTraceback(items, limit=None, fold_builtin=True):
|
||||
back = []
|
||||
i = 0
|
||||
prev_file_title = ""
|
||||
is_prev_builtin = False
|
||||
|
||||
for path, line in items:
|
||||
i += 1
|
||||
is_last = i == len(items)
|
||||
path = path.replace("\\", "/")
|
||||
|
||||
if path.startswith("src/gevent/"):
|
||||
file_title = "<gevent>/" + path[len("src/gevent/"):]
|
||||
is_builtin = True
|
||||
is_skippable_builtin = False
|
||||
elif path in ("<frozen importlib._bootstrap>", "<frozen importlib._bootstrap_external>"):
|
||||
file_title = "(importlib)"
|
||||
is_builtin = True
|
||||
is_skippable_builtin = True
|
||||
else:
|
||||
is_skippable_builtin = False
|
||||
for base in python_lib_dirs:
|
||||
if path.startswith(base + "/"):
|
||||
file_title = path[len(base + "/"):]
|
||||
module_name, *tail = file_title.split("/")
|
||||
if module_name.endswith(".py"):
|
||||
module_name = module_name[:-3]
|
||||
file_title = "/".join(["<%s>" % module_name] + tail)
|
||||
is_builtin = True
|
||||
break
|
||||
else:
|
||||
is_builtin = False
|
||||
for base in (root_dir + "/src", root_dir + "/plugins", root_dir):
|
||||
if path.startswith(base + "/"):
|
||||
file_title = path[len(base + "/"):]
|
||||
break
|
||||
else:
|
||||
# For unknown paths, do our best to hide absolute path
|
||||
file_title = path
|
||||
for needle in ("/zeronet/", "/core/"):
|
||||
if needle in file_title.lower():
|
||||
file_title = "?/" + file_title[file_title.lower().rindex(needle) + len(needle):]
|
||||
|
||||
# Path compression: A/AB/ABC/X/Y.py -> ABC/X/Y.py
|
||||
# E.g.: in 'Db/DbCursor.py' the directory part is unnecessary
|
||||
if not file_title.startswith("/"):
|
||||
prev_part = ""
|
||||
for i, part in enumerate(file_title.split("/") + [""]):
|
||||
if not part.startswith(prev_part):
|
||||
break
|
||||
prev_part = part
|
||||
file_title = "/".join(file_title.split("/")[i - 1:])
|
||||
|
||||
if is_skippable_builtin and fold_builtin:
|
||||
pass
|
||||
elif is_builtin and is_prev_builtin and not is_last and fold_builtin:
|
||||
if back[-1] != "...":
|
||||
back.append("...")
|
||||
else:
|
||||
if file_title == prev_file_title:
|
||||
back.append("%s" % line)
|
||||
else:
|
||||
back.append("%s line %s" % (file_title, line))
|
||||
|
||||
prev_file_title = file_title
|
||||
is_prev_builtin = is_builtin
|
||||
|
||||
if limit and i >= limit:
|
||||
back.append("...")
|
||||
break
|
||||
return back
|
||||
|
||||
|
||||
def formatException(err=None, format="text"):
|
||||
import traceback
|
||||
if type(err) == Notify:
|
||||
return err
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
elif type(err) == tuple and err and err[0] is not None: # Passed trackeback info
|
||||
exc_type, exc_obj, exc_tb = err
|
||||
err = None
|
||||
else: # No trackeback info passed, get latest
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
|
||||
if not err:
|
||||
err = exc_obj.message
|
||||
tb = []
|
||||
for frame in traceback.extract_tb(exc_tb):
|
||||
path, line, function, text = frame
|
||||
file = os.path.split(path)[1]
|
||||
tb.append("%s line %s" % (file, line))
|
||||
if hasattr(err, "message"):
|
||||
err = exc_obj.message
|
||||
else:
|
||||
err = exc_obj
|
||||
|
||||
tb = formatTraceback([[frame[0], frame[1]] for frame in traceback.extract_tb(exc_tb)])
|
||||
if format == "html":
|
||||
return "%s: %s<br><small>%s</small>" % (exc_type.__name__, err, " > ".join(tb))
|
||||
return "%s: %s<br><small class='multiline'>%s</small>" % (repr(err), err, " > ".join(tb))
|
||||
else:
|
||||
return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb))
|
||||
|
||||
|
||||
def formatStack(limit=None):
|
||||
import inspect
|
||||
tb = formatTraceback([[frame[1], frame[2]] for frame in inspect.stack()[1:]], limit=limit)
|
||||
return " > ".join(tb)
|
||||
|
||||
|
||||
# Test if gevent eventloop blocks
|
||||
import logging
|
||||
import gevent
|
||||
import time
|
||||
|
||||
|
||||
num_block = 0
|
||||
|
||||
|
||||
def testBlock():
|
||||
global num_block
|
||||
logging.debug("Gevent block checker started")
|
||||
last_time = time.time()
|
||||
while 1:
|
||||
time.sleep(1)
|
||||
if time.time() - last_time > 1.1:
|
||||
logging.debug("Gevent block detected: %.3fs" % (time.time() - last_time - 1))
|
||||
num_block += 1
|
||||
last_time = time.time()
|
||||
|
||||
|
||||
gevent.spawn(testBlock)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
print 1 / 0
|
||||
except Exception, err:
|
||||
print type(err).__name__
|
||||
print "1/0 error: %s" % formatException(err)
|
||||
print(1 / 0)
|
||||
except Exception as err:
|
||||
print(type(err).__name__)
|
||||
print("1/0 error: %s" % formatException(err))
|
||||
|
||||
def loadJson():
|
||||
json.loads("Errr")
|
||||
|
@ -42,13 +174,13 @@ if __name__ == "__main__":
|
|||
import json
|
||||
try:
|
||||
loadJson()
|
||||
except Exception, err:
|
||||
print err
|
||||
print "Json load error: %s" % formatException(err)
|
||||
except Exception as err:
|
||||
print(err)
|
||||
print("Json load error: %s" % formatException(err))
|
||||
|
||||
try:
|
||||
raise Notify("nothing...")
|
||||
except Exception, err:
|
||||
print "Notify: %s" % formatException(err)
|
||||
except Exception as err:
|
||||
print("Notify: %s" % formatException(err))
|
||||
|
||||
loadJson()
|
||||
|
|
|
@ -1,15 +1,32 @@
|
|||
import sys
|
||||
import logging
|
||||
import signal
|
||||
import importlib
|
||||
|
||||
import gevent
|
||||
import gevent.hub
|
||||
|
||||
from Config import config
|
||||
from . import Debug
|
||||
|
||||
last_error = None
|
||||
|
||||
def shutdown(reason="Unknown"):
|
||||
logging.info("Shutting down (reason: %s)..." % reason)
|
||||
import main
|
||||
if "file_server" in dir(main):
|
||||
try:
|
||||
gevent.spawn(main.file_server.stop)
|
||||
if "ui_server" in dir(main):
|
||||
gevent.spawn(main.ui_server.stop)
|
||||
except Exception as err:
|
||||
print("Proper shutdown error: %s" % err)
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(0)
|
||||
|
||||
# Store last error, ignore notify, allow manual error logging
|
||||
def handleError(*args):
|
||||
def handleError(*args, **kwargs):
|
||||
global last_error
|
||||
if not args: # Manual called
|
||||
args = sys.exc_info()
|
||||
|
@ -18,47 +35,81 @@ def handleError(*args):
|
|||
silent = False
|
||||
if args[0].__name__ != "Notify":
|
||||
last_error = args
|
||||
if not silent and args[0].__name__ != "Notify":
|
||||
|
||||
if args[0].__name__ == "KeyboardInterrupt":
|
||||
shutdown("Keyboard interrupt")
|
||||
elif not silent and args[0].__name__ != "Notify":
|
||||
logging.exception("Unhandled exception")
|
||||
sys.__excepthook__(*args)
|
||||
if "greenlet.py" not in args[2].tb_frame.f_code.co_filename: # Don't display error twice
|
||||
sys.__excepthook__(*args, **kwargs)
|
||||
|
||||
|
||||
# Ignore notify errors
|
||||
def handleErrorNotify(*args):
|
||||
if args[0].__name__ != "Notify":
|
||||
logging.exception("Unhandled exception")
|
||||
sys.__excepthook__(*args)
|
||||
def handleErrorNotify(*args, **kwargs):
|
||||
err = args[0]
|
||||
if err.__name__ == "KeyboardInterrupt":
|
||||
shutdown("Keyboard interrupt")
|
||||
elif err.__name__ != "Notify":
|
||||
logging.error("Unhandled exception: %s" % Debug.formatException(args))
|
||||
sys.__excepthook__(*args, **kwargs)
|
||||
|
||||
|
||||
OriginalGreenlet = gevent.Greenlet
|
||||
|
||||
|
||||
class ErrorhookedGreenlet(OriginalGreenlet):
|
||||
def _report_error(self, exc_info):
|
||||
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
|
||||
|
||||
if config.debug:
|
||||
if config.debug: # Keep last error for /Debug
|
||||
sys.excepthook = handleError
|
||||
else:
|
||||
sys.excepthook = handleErrorNotify
|
||||
|
||||
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
|
||||
reload(gevent)
|
||||
|
||||
# Override default error handler to allow silent killing / custom logging
|
||||
if "handle_error" in dir(gevent.hub.Hub):
|
||||
gevent.hub.Hub._original_handle_error = gevent.hub.Hub.handle_error
|
||||
else:
|
||||
logging.debug("gevent.hub.Hub.handle_error not found using old gevent hooks")
|
||||
OriginalGreenlet = gevent.Greenlet
|
||||
class ErrorhookedGreenlet(OriginalGreenlet):
|
||||
def _report_error(self, exc_info):
|
||||
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
|
||||
|
||||
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
|
||||
importlib.reload(gevent)
|
||||
|
||||
def handleGreenletError(context, type, value, tb):
|
||||
if context.__class__ is tuple and context[0].__class__.__name__ == "ThreadPool":
|
||||
# Exceptions in ThreadPool will be handled in the main Thread
|
||||
return None
|
||||
|
||||
if isinstance(value, str):
|
||||
# Cython can raise errors where the value is a plain string
|
||||
# e.g., AttributeError, "_semaphore.Semaphore has no attr", <traceback>
|
||||
value = type(value)
|
||||
|
||||
if not issubclass(type, gevent.get_hub().NOT_ERROR):
|
||||
sys.excepthook(type, value, tb)
|
||||
|
||||
gevent.get_hub().handle_error = handleGreenletError
|
||||
|
||||
try:
|
||||
signal.signal(signal.SIGTERM, lambda signum, stack_frame: shutdown("SIGTERM"))
|
||||
except Exception as err:
|
||||
logging.debug("Error setting up SIGTERM watcher: %s" % err)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import time
|
||||
from gevent import monkey
|
||||
monkey.patch_all(thread=False, ssl=False)
|
||||
import Debug
|
||||
from . import Debug
|
||||
|
||||
def sleeper():
|
||||
print "started"
|
||||
def sleeper(num):
|
||||
print("started", num)
|
||||
time.sleep(3)
|
||||
print "stopped"
|
||||
thread1 = gevent.spawn(sleeper)
|
||||
thread2 = gevent.spawn(sleeper)
|
||||
raise Exception("Error")
|
||||
print("stopped", num)
|
||||
thread1 = gevent.spawn(sleeper, 1)
|
||||
thread2 = gevent.spawn(sleeper, 2)
|
||||
time.sleep(1)
|
||||
print "killing..."
|
||||
thread1.throw(Exception("Hello"))
|
||||
thread2.throw(Debug.Notify("Throw"))
|
||||
print "killed"
|
||||
print("killing...")
|
||||
thread1.kill(exception=Debug.Notify("Worker stopped"))
|
||||
#thread2.throw(Debug.Notify("Throw"))
|
||||
print("killed")
|
||||
gevent.joinall([thread1,thread2])
|
||||
|
|
24
src/Debug/DebugLock.py
Normal file
24
src/Debug/DebugLock.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
import time
|
||||
import logging
|
||||
|
||||
import gevent.lock
|
||||
|
||||
from Debug import Debug
|
||||
|
||||
|
||||
class DebugLock:
|
||||
def __init__(self, log_after=0.01, name="Lock"):
|
||||
self.name = name
|
||||
self.log_after = log_after
|
||||
self.lock = gevent.lock.Semaphore(1)
|
||||
self.release = self.lock.release
|
||||
|
||||
def acquire(self, *args, **kwargs):
|
||||
s = time.time()
|
||||
res = self.lock.acquire(*args, **kwargs)
|
||||
time_taken = time.time() - s
|
||||
if time_taken >= self.log_after:
|
||||
logging.debug("%s: Waited %.3fs after called by %s" %
|
||||
(self.name, time_taken, Debug.formatStack())
|
||||
)
|
||||
return res
|
|
@ -3,6 +3,7 @@ import subprocess
|
|||
import re
|
||||
import logging
|
||||
import time
|
||||
import functools
|
||||
|
||||
from Config import config
|
||||
from util import helper
|
||||
|
@ -10,7 +11,17 @@ from util import helper
|
|||
|
||||
# Find files with extension in path
|
||||
def findfiles(path, find_ext):
|
||||
for root, dirs, files in os.walk(path, topdown=False):
|
||||
def sorter(f1, f2):
|
||||
f1 = f1[0].replace(path, "")
|
||||
f2 = f2[0].replace(path, "")
|
||||
if f1 == "":
|
||||
return 1
|
||||
elif f2 == "":
|
||||
return -1
|
||||
else:
|
||||
return helper.cmp(f1.lower(), f2.lower())
|
||||
|
||||
for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)):
|
||||
for file in sorted(files):
|
||||
file_path = root + "/" + file
|
||||
file_ext = file.split(".")[-1]
|
||||
|
@ -34,6 +45,7 @@ def findCoffeescriptCompiler():
|
|||
|
||||
# Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features
|
||||
def merge(merged_path):
|
||||
merged_path = merged_path.replace("\\", "/")
|
||||
merge_dir = os.path.dirname(merged_path)
|
||||
s = time.time()
|
||||
ext = merged_path.split(".")[-1]
|
||||
|
@ -50,66 +62,69 @@ def merge(merged_path):
|
|||
|
||||
changed = {}
|
||||
for file_path in findfiles(merge_dir, find_ext):
|
||||
if os.path.getmtime(file_path) > merged_mtime:
|
||||
if os.path.getmtime(file_path) > merged_mtime + 1:
|
||||
changed[file_path] = True
|
||||
if not changed:
|
||||
return # Assets not changed, nothing to do
|
||||
|
||||
old_parts = {}
|
||||
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
|
||||
merged_old = open(merged_path, "rb").read().decode("utf8")
|
||||
old_parts = {}
|
||||
for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
||||
old_parts[match[1]] = match[2].strip("\n\r")
|
||||
merged_old = open(merged_path, "rb").read()
|
||||
for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
||||
old_parts[match[1].decode()] = match[2].strip(b"\n\r")
|
||||
|
||||
logging.debug("Merging %s (changed: %s, old parts: %s)" % (merged_path, changed, len(old_parts)))
|
||||
# Merge files
|
||||
parts = []
|
||||
s_total = time.time()
|
||||
for file_path in findfiles(merge_dir, find_ext):
|
||||
parts.append("\n\n/* ---- %s ---- */\n\n" % file_path)
|
||||
file_relative_path = file_path.replace(merge_dir + "/", "")
|
||||
parts.append(b"\n/* ---- %s ---- */\n\n" % file_relative_path.encode("utf8"))
|
||||
if file_path.endswith(".coffee"): # Compile coffee script
|
||||
if file_path in changed or file_path not in old_parts: # Only recompile if changed or its not compiled before
|
||||
if file_path in changed or file_relative_path not in old_parts: # Only recompile if changed or its not compiled before
|
||||
if config.coffeescript_compiler is None:
|
||||
config.coffeescript_compiler = findCoffeescriptCompiler()
|
||||
if not config.coffeescript_compiler:
|
||||
logging.error("No coffeescript compiler definied, skipping compiling %s" % merged_path)
|
||||
logging.error("No coffeescript compiler defined, skipping compiling %s" % merged_path)
|
||||
return False # No coffeescript compiler, skip this file
|
||||
|
||||
# Replace / with os separators and escape it
|
||||
file_path_escaped = helper.shellquote(os.path.join(*file_path.split("/")))
|
||||
file_path_escaped = helper.shellquote(file_path.replace("/", os.path.sep))
|
||||
|
||||
if "%s" in config.coffeescript_compiler: # Replace %s with coffeescript file
|
||||
command = config.coffeescript_compiler % file_path_escaped
|
||||
command = config.coffeescript_compiler.replace("%s", file_path_escaped)
|
||||
else: # Put coffeescript file to end
|
||||
command = config.coffeescript_compiler + " " + file_path_escaped
|
||||
|
||||
# Start compiling
|
||||
s = time.time()
|
||||
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
|
||||
out = compiler.stdout.read().decode("utf8")
|
||||
out = compiler.stdout.read()
|
||||
compiler.wait()
|
||||
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
|
||||
|
||||
# Check errors
|
||||
if out and out.startswith("("): # No error found
|
||||
if out and out.startswith(b"("): # No error found
|
||||
parts.append(out)
|
||||
else: # Put error message in place of source code
|
||||
error = out
|
||||
logging.error("%s Compile error: %s" % (file_path, error))
|
||||
logging.error("%s Compile error: %s" % (file_relative_path, error))
|
||||
error_escaped = re.escape(error).replace(b"\n", b"\\n").replace(br"\\n", br"\n")
|
||||
parts.append(
|
||||
"alert('%s compile error: %s');" %
|
||||
(file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n"))
|
||||
b"alert('%s compile error: %s');" %
|
||||
(file_relative_path.encode(), error_escaped)
|
||||
)
|
||||
else: # Not changed use the old_part
|
||||
parts.append(old_parts[file_path])
|
||||
parts.append(old_parts[file_relative_path])
|
||||
else: # Add to parts
|
||||
parts.append(open(file_path).read().decode("utf8"))
|
||||
parts.append(open(file_path, "rb").read())
|
||||
|
||||
merged = u"\n".join(parts)
|
||||
merged = b"\n".join(parts)
|
||||
if ext == "css": # Vendor prefix css
|
||||
from lib.cssvendor import cssvendor
|
||||
merged = cssvendor.prefix(merged)
|
||||
merged = merged.replace("\r", "")
|
||||
open(merged_path, "wb").write(merged.encode("utf8"))
|
||||
merged = merged.replace(b"\r", b"")
|
||||
open(merged_path, "wb").write(merged)
|
||||
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
|
||||
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue