Compare commits
26 commits
v0.8.1
...
py3-latest
Author | SHA1 | Date | |
---|---|---|---|
7edbda70f5 | |||
![]() |
290025958f | ||
![]() |
25c5658b72 | ||
![]() |
2970e3a205 | ||
![]() |
866179f6a3 | ||
![]() |
e8cf14bcf5 | ||
![]() |
fedcf9c1c6 | ||
![]() |
117bcf25d9 | ||
![]() |
a429349cd4 | ||
![]() |
d8e52eaabd | ||
![]() |
f2ef6e5d9c | ||
![]() |
dd2bb07cfb | ||
![]() |
06a9d1e0ff | ||
![]() |
c354f9e24d | ||
![]() |
77b4297224 | ||
![]() |
edc5310cd2 | ||
![]() |
99a8409513 | ||
![]() |
3550a64837 | ||
![]() |
85ef28e6fb | ||
![]() |
1500d9356b | ||
![]() |
f1a71770fa | ||
![]() |
f79a73cef4 | ||
![]() |
0731787518 | ||
![]() |
ad95eede10 | ||
![]() |
459b0a73ca | ||
![]() |
b7870edd2e |
19 changed files with 382 additions and 97 deletions
40
.forgejo/workflows/build-on-commit.yml
Normal file
40
.forgejo/workflows/build-on-commit.yml
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
name: Build Docker Image on Commit
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
tags:
|
||||||
|
- '!' # Exclude tags
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-publish:
|
||||||
|
runs-on: docker-builder
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set REPO_VARS
|
||||||
|
id: repo-url
|
||||||
|
run: |
|
||||||
|
echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV
|
||||||
|
echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Login to OCI registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin
|
||||||
|
|
||||||
|
- name: Build and push Docker images
|
||||||
|
run: |
|
||||||
|
# Build Docker image with commit SHA
|
||||||
|
docker build -t $REPO_HOST/$REPO_PATH:${{ github.sha }} .
|
||||||
|
docker push $REPO_HOST/$REPO_PATH:${{ github.sha }}
|
||||||
|
|
||||||
|
# Build Docker image with nightly tag
|
||||||
|
docker tag $REPO_HOST/$REPO_PATH:${{ github.sha }} $REPO_HOST/$REPO_PATH:nightly
|
||||||
|
docker push $REPO_HOST/$REPO_PATH:nightly
|
||||||
|
|
||||||
|
# Remove local images to save storage
|
||||||
|
docker rmi $REPO_HOST/$REPO_PATH:${{ github.sha }}
|
||||||
|
docker rmi $REPO_HOST/$REPO_PATH:nightly
|
37
.forgejo/workflows/build-on-tag.yml
Normal file
37
.forgejo/workflows/build-on-tag.yml
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
name: Build and Publish Docker Image on Tag
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-publish:
|
||||||
|
runs-on: docker-builder
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set REPO_VARS
|
||||||
|
id: repo-url
|
||||||
|
run: |
|
||||||
|
echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV
|
||||||
|
echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Login to OCI registry
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
run: |
|
||||||
|
TAG=${{ github.ref_name }} # Get the tag name from the context
|
||||||
|
# Build and push multi-platform Docker images
|
||||||
|
docker build -t $REPO_HOST/$REPO_PATH:$TAG --push .
|
||||||
|
# Tag and push latest
|
||||||
|
docker tag $REPO_HOST/$REPO_PATH:$TAG $REPO_HOST/$REPO_PATH:latest
|
||||||
|
docker push $REPO_HOST/$REPO_PATH:latest
|
||||||
|
|
||||||
|
# Remove the local image to save storage
|
||||||
|
docker rmi $REPO_HOST/$REPO_PATH:$TAG
|
||||||
|
docker rmi $REPO_HOST/$REPO_PATH:latest
|
71
.github/workflows/tests.yml
vendored
71
.github/workflows/tests.yml
vendored
|
@ -4,49 +4,48 @@ on: [push, pull_request]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
runs-on: ubuntu-18.04
|
|
||||||
strategy:
|
strategy:
|
||||||
max-parallel: 16
|
max-parallel: 16
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
python-version: ["3.7", "3.8", "3.9"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout ZeroNet
|
- name: Checkout ZeroNet
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
submodules: 'true'
|
submodules: "true"
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v1
|
uses: actions/setup-python@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Prepare for installation
|
- name: Prepare for installation
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install setuptools
|
python3 -m pip install setuptools
|
||||||
python3 -m pip install --upgrade pip wheel
|
python3 -m pip install --upgrade pip wheel
|
||||||
python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium
|
python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium
|
||||||
|
|
||||||
- name: Install
|
- name: Install
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install --upgrade -r requirements.txt
|
python3 -m pip install --upgrade -r requirements.txt
|
||||||
python3 -m pip list
|
python3 -m pip list
|
||||||
|
|
||||||
- name: Prepare for tests
|
- name: Prepare for tests
|
||||||
run: |
|
run: |
|
||||||
openssl version -a
|
openssl version -a
|
||||||
echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6
|
echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6
|
||||||
|
|
||||||
- name: Test
|
- name: Test
|
||||||
run: |
|
run: |
|
||||||
catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini
|
catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini
|
||||||
export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test
|
export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test
|
||||||
export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test
|
export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test
|
||||||
export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test
|
export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test
|
||||||
export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test
|
export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test
|
||||||
export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test
|
export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test
|
||||||
export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test
|
export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test
|
||||||
find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
|
find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
|
||||||
find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
|
find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
|
||||||
flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/
|
flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -7,6 +7,7 @@ __pycache__/
|
||||||
|
|
||||||
# Hidden files
|
# Hidden files
|
||||||
.*
|
.*
|
||||||
|
!/.forgejo
|
||||||
!/.github
|
!/.github
|
||||||
!/.gitignore
|
!/.gitignore
|
||||||
!/.travis.yml
|
!/.travis.yml
|
||||||
|
|
29
CHANGELOG.md
29
CHANGELOG.md
|
@ -1,3 +1,32 @@
|
||||||
|
### ZeroNet 0.9.0 (2023-07-12) Rev4630
|
||||||
|
- Fix RDos Issue in Plugins https://github.com/ZeroNetX/ZeroNet-Plugins/pull/9
|
||||||
|
- Add trackers to Config.py for failsafety incase missing trackers.txt
|
||||||
|
- Added Proxy links
|
||||||
|
- Fix pysha3 dep installation issue
|
||||||
|
- FileRequest -> Remove Unnecessary check, Fix error wording
|
||||||
|
- Fix Response when site is missing for `actionAs`
|
||||||
|
|
||||||
|
|
||||||
|
### ZeroNet 0.8.5 (2023-02-12) Rev4625
|
||||||
|
- Fix(https://github.com/ZeroNetX/ZeroNet/pull/202) for SSL cert gen failed on Windows.
|
||||||
|
- default theme-class for missing value in `users.json`.
|
||||||
|
- Fetch Stats Plugin changes.
|
||||||
|
|
||||||
|
### ZeroNet 0.8.4 (2022-12-12) Rev4620
|
||||||
|
- Increase Minimum Site size to 25MB.
|
||||||
|
|
||||||
|
### ZeroNet 0.8.3 (2022-12-11) Rev4611
|
||||||
|
- main.py -> Fix accessing unassigned varible
|
||||||
|
- ContentManager -> Support for multiSig
|
||||||
|
- SiteStrorage.py -> Fix accessing unassigned varible
|
||||||
|
- ContentManager.py Improve Logging of Valid Signers
|
||||||
|
|
||||||
|
### ZeroNet 0.8.2 (2022-11-01) Rev4610
|
||||||
|
- Fix Startup Error when plugins dir missing
|
||||||
|
- Move trackers to seperate file & Add more trackers
|
||||||
|
- Config:: Skip loading missing tracker files
|
||||||
|
- Added documentation for getRandomPort fn
|
||||||
|
|
||||||
### ZeroNet 0.8.1 (2022-10-01) Rev4600
|
### ZeroNet 0.8.1 (2022-10-01) Rev4600
|
||||||
- fix readdress loop (cherry-pick previously added commit from conservancy)
|
- fix readdress loop (cherry-pick previously added commit from conservancy)
|
||||||
- Remove Patreon badge
|
- Remove Patreon badge
|
||||||
|
|
18
README.md
18
README.md
|
@ -99,6 +99,24 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/
|
||||||
#### Docker
|
#### Docker
|
||||||
There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/
|
There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/
|
||||||
|
|
||||||
|
### Online Proxies
|
||||||
|
Proxies are like seed boxes for sites(i.e ZNX runs on a cloud vps), you can try zeronet experience from proxies. Add your proxy below if you have one.
|
||||||
|
|
||||||
|
#### Official ZNX Proxy :
|
||||||
|
|
||||||
|
https://proxy.zeronet.dev/
|
||||||
|
|
||||||
|
https://zeronet.dev/
|
||||||
|
|
||||||
|
#### From Community
|
||||||
|
|
||||||
|
https://0net-preview.com/
|
||||||
|
|
||||||
|
https://portal.ngnoid.tv/
|
||||||
|
|
||||||
|
https://zeronet.ipfsscan.io/
|
||||||
|
|
||||||
|
|
||||||
### Install from source
|
### Install from source
|
||||||
|
|
||||||
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip`
|
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip`
|
||||||
|
|
2
plugins
2
plugins
|
@ -1 +1 @@
|
||||||
Subproject commit 859ffbf43335796e225525ff01257711485088d9
|
Subproject commit 689d9309f73371f4681191b125ec3f2e14075eeb
|
|
@ -3,7 +3,7 @@ greenlet==0.4.16; python_version <= "3.6"
|
||||||
gevent>=20.9.0; python_version >= "3.7"
|
gevent>=20.9.0; python_version >= "3.7"
|
||||||
msgpack>=0.4.4
|
msgpack>=0.4.4
|
||||||
base58
|
base58
|
||||||
merkletools
|
merkletools @ git+https://github.com/ZeroNetX/pymerkletools.git@dev
|
||||||
rsa
|
rsa
|
||||||
PySocks>=1.6.8
|
PySocks>=1.6.8
|
||||||
pyasn1
|
pyasn1
|
||||||
|
|
|
@ -13,8 +13,8 @@ import time
|
||||||
class Config(object):
|
class Config(object):
|
||||||
|
|
||||||
def __init__(self, argv):
|
def __init__(self, argv):
|
||||||
self.version = "0.8.1-patch"
|
self.version = "0.9.0"
|
||||||
self.rev = 4601
|
self.rev = 4630
|
||||||
self.argv = argv
|
self.argv = argv
|
||||||
self.action = None
|
self.action = None
|
||||||
self.test_parser = None
|
self.test_parser = None
|
||||||
|
@ -36,7 +36,7 @@ class Config(object):
|
||||||
self.openssl_lib_file = None
|
self.openssl_lib_file = None
|
||||||
self.openssl_bin_file = None
|
self.openssl_bin_file = None
|
||||||
|
|
||||||
self.trackers_file = ["{data_dir}/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d/trackers.txt"]
|
self.trackers_file = False
|
||||||
self.createParser()
|
self.createParser()
|
||||||
self.createArguments()
|
self.createArguments()
|
||||||
|
|
||||||
|
@ -84,30 +84,10 @@ class Config(object):
|
||||||
trackers = [
|
trackers = [
|
||||||
"http://open.acgnxtracker.com:80/announce", # DE
|
"http://open.acgnxtracker.com:80/announce", # DE
|
||||||
"http://tracker.bt4g.com:2095/announce", # Cloudflare
|
"http://tracker.bt4g.com:2095/announce", # Cloudflare
|
||||||
"http://vps02.net.orel.ru:80/announce",
|
|
||||||
"http://tracker.files.fm:6969/announce",
|
"http://tracker.files.fm:6969/announce",
|
||||||
"http://t.publictracker.xyz:6969/announce",
|
"http://t.publictracker.xyz:6969/announce",
|
||||||
"https://tracker.lilithraws.cf:443/announce",
|
"https://tracker.lilithraws.cf:443/announce",
|
||||||
"https://tracker.babico.name.tr:443/announce",
|
"https://tracker.babico.name.tr:443/announce",
|
||||||
"https://tr.abiir.top:443/announce",
|
|
||||||
"udp://abufinzio.monocul.us:6969/announce",
|
|
||||||
"udp://vibe.sleepyinternetfun.xyz:1738/announce",
|
|
||||||
"udp://www.torrent.eu.org:451/announce",
|
|
||||||
"udp://tracker.0x.tf:6969/announce",
|
|
||||||
"udp://tracker.zerobytes.xyz:1337/announce",
|
|
||||||
"udp://tracker.opentrackr.org:1337/announce",
|
|
||||||
"udp://tracker.birkenwald.de:6969/announce",
|
|
||||||
"udp://tracker.moeking.me:6969/announce",
|
|
||||||
"udp://ipv6.babico.name.tr:8000/announce",
|
|
||||||
"zero://145.239.95.38:15441",
|
|
||||||
"zero://188.116.183.41:26552",
|
|
||||||
"zero://45.77.23.92:15555",
|
|
||||||
"zero://k5w77dozo3hy5zualyhni6vrh73iwfkaofa64abbilwyhhd3wgenbjqd.onion:15441",
|
|
||||||
"zero://2kcb2fqesyaevc4lntogupa4mkdssth2ypfwczd2ov5a3zo6ytwwbayd.onion:15441",
|
|
||||||
"zero://gugt43coc5tkyrhrc3esf6t6aeycvcqzw7qafxrjpqbwt4ssz5czgzyd.onion:15441",
|
|
||||||
"zero://5vczpwawviukvd7grfhsfxp7a6huz77hlis4fstjkym5kmf4pu7i7myd.onion:15441",
|
|
||||||
"zero://ow7in4ftwsix5klcbdfqvfqjvimqshbm2o75rhtpdnsderrcbx74wbad.onion:15441",
|
|
||||||
"zero://qn65si4gtcwdiliq7vzrwu62qrweoxb6tx2cchwslaervj6szuje66qd.onion:26117",
|
|
||||||
]
|
]
|
||||||
# Platform specific
|
# Platform specific
|
||||||
if sys.platform.startswith("win"):
|
if sys.platform.startswith("win"):
|
||||||
|
@ -271,7 +251,7 @@ class Config(object):
|
||||||
self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key')
|
self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key')
|
||||||
self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source')
|
self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source')
|
||||||
|
|
||||||
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit')
|
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=25, type=int, metavar='limit')
|
||||||
self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit')
|
self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit')
|
||||||
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
|
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
|
||||||
self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit')
|
self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit')
|
||||||
|
@ -339,8 +319,7 @@ class Config(object):
|
||||||
|
|
||||||
def loadTrackersFile(self):
|
def loadTrackersFile(self):
|
||||||
if not self.trackers_file:
|
if not self.trackers_file:
|
||||||
return None
|
self.trackers_file = ["trackers.txt", "{data_dir}/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d/trackers.txt"]
|
||||||
|
|
||||||
self.trackers = self.arguments.trackers[:]
|
self.trackers = self.arguments.trackers[:]
|
||||||
|
|
||||||
for trackers_file in self.trackers_file:
|
for trackers_file in self.trackers_file:
|
||||||
|
@ -352,6 +331,9 @@ class Config(object):
|
||||||
else: # Relative to zeronet.py
|
else: # Relative to zeronet.py
|
||||||
trackers_file_path = self.start_dir + "/" + trackers_file
|
trackers_file_path = self.start_dir + "/" + trackers_file
|
||||||
|
|
||||||
|
if not os.path.exists(trackers_file_path):
|
||||||
|
continue
|
||||||
|
|
||||||
for line in open(trackers_file_path):
|
for line in open(trackers_file_path):
|
||||||
tracker = line.strip()
|
tracker = line.strip()
|
||||||
if "://" in tracker and tracker not in self.trackers:
|
if "://" in tracker and tracker not in self.trackers:
|
||||||
|
|
|
@ -727,7 +727,6 @@ class ContentManager(object):
|
||||||
elif "files_optional" in new_content:
|
elif "files_optional" in new_content:
|
||||||
del new_content["files_optional"]
|
del new_content["files_optional"]
|
||||||
|
|
||||||
new_content["modified"] = int(time.time()) # Add timestamp
|
|
||||||
if inner_path == "content.json":
|
if inner_path == "content.json":
|
||||||
new_content["zeronet_version"] = config.version
|
new_content["zeronet_version"] = config.version
|
||||||
new_content["signs_required"] = content.get("signs_required", 1)
|
new_content["signs_required"] = content.get("signs_required", 1)
|
||||||
|
@ -747,9 +746,11 @@ class ContentManager(object):
|
||||||
)
|
)
|
||||||
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
|
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
|
||||||
|
|
||||||
|
signs_required = 1
|
||||||
if inner_path == "content.json" and privatekey_address == self.site.address:
|
if inner_path == "content.json" and privatekey_address == self.site.address:
|
||||||
# If signing using the root key, then sign the valid signers
|
# If signing using the root key, then sign the valid signers
|
||||||
signers_data = "%s:%s" % (new_content["signs_required"], ",".join(valid_signers))
|
signs_required = new_content["signs_required"]
|
||||||
|
signers_data = "%s:%s" % (signs_required, ",".join(valid_signers))
|
||||||
new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey)
|
new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey)
|
||||||
if not new_content["signers_sign"]:
|
if not new_content["signers_sign"]:
|
||||||
self.log.info("Old style address, signers_sign is none")
|
self.log.info("Old style address, signers_sign is none")
|
||||||
|
@ -757,15 +758,32 @@ class ContentManager(object):
|
||||||
self.log.info("Signing %s..." % inner_path)
|
self.log.info("Signing %s..." % inner_path)
|
||||||
|
|
||||||
if "signs" in new_content:
|
if "signs" in new_content:
|
||||||
del(new_content["signs"]) # Delete old signs
|
# del(new_content["signs"]) # Delete old signs
|
||||||
|
old_signs_content = new_content["signs"]
|
||||||
|
del(new_content["signs"])
|
||||||
|
else:
|
||||||
|
old_signs_content = None
|
||||||
if "sign" in new_content:
|
if "sign" in new_content:
|
||||||
del(new_content["sign"]) # Delete old sign (backward compatibility)
|
del(new_content["sign"]) # Delete old sign (backward compatibility)
|
||||||
|
|
||||||
sign_content = json.dumps(new_content, sort_keys=True)
|
if signs_required > 1:
|
||||||
|
has_valid_sign = False
|
||||||
|
sign_content = json.dumps(new_content, sort_keys=True)
|
||||||
|
for signer in valid_signers:
|
||||||
|
res = CryptBitcoin.verify(sign_content,signer,old_signs_content[signer]);
|
||||||
|
print(res)
|
||||||
|
if res:
|
||||||
|
has_valid_sign = has_valid_sign or res
|
||||||
|
if has_valid_sign:
|
||||||
|
new_content["modified"] = content["modified"]
|
||||||
|
sign_content = json.dumps(new_content, sort_keys=True)
|
||||||
|
else:
|
||||||
|
new_content["modified"] = int(time.time()) # Add timestamp
|
||||||
|
sign_content = json.dumps(new_content, sort_keys=True)
|
||||||
sign = CryptBitcoin.sign(sign_content, privatekey)
|
sign = CryptBitcoin.sign(sign_content, privatekey)
|
||||||
# new_content["signs"] = content.get("signs", {}) # TODO: Multisig
|
# new_content["signs"] = content.get("signs", {}) # TODO: Multisig
|
||||||
if sign: # If signing is successful (not an old address)
|
if sign: # If signing is successful (not an old address)
|
||||||
new_content["signs"] = {}
|
new_content["signs"] = old_signs_content or {}
|
||||||
new_content["signs"][privatekey_address] = sign
|
new_content["signs"][privatekey_address] = sign
|
||||||
|
|
||||||
self.verifyContent(inner_path, new_content)
|
self.verifyContent(inner_path, new_content)
|
||||||
|
@ -800,7 +818,9 @@ class ContentManager(object):
|
||||||
|
|
||||||
# Return: The required number of valid signs for the content.json
|
# Return: The required number of valid signs for the content.json
|
||||||
def getSignsRequired(self, inner_path, content=None):
|
def getSignsRequired(self, inner_path, content=None):
|
||||||
return 1 # Todo: Multisig
|
if not content:
|
||||||
|
return 1
|
||||||
|
return content.get("signs_required", 1)
|
||||||
|
|
||||||
def verifyCertSign(self, user_address, user_auth_type, user_name, issuer_address, sign):
|
def verifyCertSign(self, user_address, user_auth_type, user_name, issuer_address, sign):
|
||||||
from Crypt import CryptBitcoin
|
from Crypt import CryptBitcoin
|
||||||
|
@ -988,14 +1008,16 @@ class ContentManager(object):
|
||||||
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
|
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
|
||||||
raise VerifyError("Invalid cert!")
|
raise VerifyError("Invalid cert!")
|
||||||
|
|
||||||
valid_signs = 0
|
valid_signs = []
|
||||||
for address in valid_signers:
|
for address in valid_signers:
|
||||||
if address in signs:
|
if address in signs:
|
||||||
valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
|
result = CryptBitcoin.verify(sign_content, address, signs[address])
|
||||||
if valid_signs >= signs_required:
|
if result:
|
||||||
|
valid_signs.append(address)
|
||||||
|
if len(valid_signs) >= signs_required:
|
||||||
break # Break if we has enough signs
|
break # Break if we has enough signs
|
||||||
if valid_signs < signs_required:
|
if len(valid_signs) < signs_required:
|
||||||
raise VerifyError("Valid signs: %s/%s" % (valid_signs, signs_required))
|
raise VerifyError("Valid signs: %s/%s, Valid Signers : %s" % (len(valid_signs), signs_required, valid_signs))
|
||||||
else:
|
else:
|
||||||
return self.verifyContent(inner_path, new_content)
|
return self.verifyContent(inner_path, new_content)
|
||||||
else: # Old style signing
|
else: # Old style signing
|
||||||
|
|
|
@ -127,6 +127,10 @@ class CryptConnectionManager:
|
||||||
"/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA"
|
"/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA"
|
||||||
]
|
]
|
||||||
self.openssl_env['CN'] = random.choice(self.fakedomains)
|
self.openssl_env['CN'] = random.choice(self.fakedomains)
|
||||||
|
environ = os.environ
|
||||||
|
environ['OPENSSL_CONF'] = self.openssl_env['OPENSSL_CONF']
|
||||||
|
environ['RANDFILE'] = self.openssl_env['RANDFILE']
|
||||||
|
environ['CN'] = self.openssl_env['CN']
|
||||||
|
|
||||||
if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
|
if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
|
||||||
self.createSslContexts()
|
self.createSslContexts()
|
||||||
|
@ -152,7 +156,7 @@ class CryptConnectionManager:
|
||||||
self.log.debug("Running: %s" % cmd)
|
self.log.debug("Running: %s" % cmd)
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(
|
||||||
cmd, shell=True, stderr=subprocess.STDOUT,
|
cmd, shell=True, stderr=subprocess.STDOUT,
|
||||||
stdout=subprocess.PIPE, env=self.openssl_env
|
stdout=subprocess.PIPE, env=environ
|
||||||
)
|
)
|
||||||
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
@ -175,7 +179,7 @@ class CryptConnectionManager:
|
||||||
self.log.debug("Generating certificate key and signing request...")
|
self.log.debug("Generating certificate key and signing request...")
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(
|
||||||
cmd, shell=True, stderr=subprocess.STDOUT,
|
cmd, shell=True, stderr=subprocess.STDOUT,
|
||||||
stdout=subprocess.PIPE, env=self.openssl_env
|
stdout=subprocess.PIPE, env=environ
|
||||||
)
|
)
|
||||||
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
@ -194,7 +198,7 @@ class CryptConnectionManager:
|
||||||
self.log.debug("Generating RSA cert...")
|
self.log.debug("Generating RSA cert...")
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(
|
||||||
cmd, shell=True, stderr=subprocess.STDOUT,
|
cmd, shell=True, stderr=subprocess.STDOUT,
|
||||||
stdout=subprocess.PIPE, env=self.openssl_env
|
stdout=subprocess.PIPE, env=environ
|
||||||
)
|
)
|
||||||
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
|
|
@ -128,7 +128,7 @@ class FileRequest(object):
|
||||||
body = peer.getFile(site.address, inner_path).read()
|
body = peer.getFile(site.address, inner_path).read()
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
site.log.debug("Can't download updated file %s: %s" % (inner_path, err))
|
site.log.debug("Can't download updated file %s: %s" % (inner_path, err))
|
||||||
self.response({"error": "File invalid update: Can't download updaed file"})
|
self.response({"error": "Invalid File update: Failed to download updated file content"})
|
||||||
self.connection.badAction(5)
|
self.connection.badAction(5)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -165,21 +165,19 @@ class FileRequest(object):
|
||||||
|
|
||||||
site.onFileDone(inner_path) # Trigger filedone
|
site.onFileDone(inner_path) # Trigger filedone
|
||||||
|
|
||||||
if inner_path.endswith("content.json"): # Download every changed file from peer
|
# Download every changed file from peer
|
||||||
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer
|
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer
|
||||||
# On complete publish to other peers
|
# On complete publish to other peers
|
||||||
diffs = params.get("diffs", {})
|
diffs = params.get("diffs", {})
|
||||||
site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=6), "publish_%s" % inner_path)
|
site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=6), "publish_%s" % inner_path)
|
||||||
|
|
||||||
# Load new content file and download changed files in new thread
|
# Load new content file and download changed files in new thread
|
||||||
def downloader():
|
def downloader():
|
||||||
site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {}))
|
site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {}))
|
||||||
del self.server.files_parsing[file_uri]
|
|
||||||
|
|
||||||
gevent.spawn(downloader)
|
|
||||||
else:
|
|
||||||
del self.server.files_parsing[file_uri]
|
del self.server.files_parsing[file_uri]
|
||||||
|
|
||||||
|
gevent.spawn(downloader)
|
||||||
|
|
||||||
self.response({"ok": "Thanks, file %s updated!" % inner_path})
|
self.response({"ok": "Thanks, file %s updated!" % inner_path})
|
||||||
self.connection.goodAction()
|
self.connection.goodAction()
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,9 @@ import plugins
|
||||||
class PluginManager:
|
class PluginManager:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.log = logging.getLogger("PluginManager")
|
self.log = logging.getLogger("PluginManager")
|
||||||
self.path_plugins = os.path.abspath(os.path.dirname(plugins.__file__))
|
self.path_plugins = None
|
||||||
|
if plugins.__file__:
|
||||||
|
self.path_plugins = os.path.dirname(os.path.abspath(plugins.__file__));
|
||||||
self.path_installed_plugins = config.data_dir + "/__plugins__"
|
self.path_installed_plugins = config.data_dir + "/__plugins__"
|
||||||
self.plugins = defaultdict(list) # Registered plugins (key: class name, value: list of plugins for class)
|
self.plugins = defaultdict(list) # Registered plugins (key: class name, value: list of plugins for class)
|
||||||
self.subclass_order = {} # Record the load order of the plugins, to keep it after reload
|
self.subclass_order = {} # Record the load order of the plugins, to keep it after reload
|
||||||
|
@ -32,7 +34,8 @@ class PluginManager:
|
||||||
|
|
||||||
self.config.setdefault("builtin", {})
|
self.config.setdefault("builtin", {})
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.getcwd(), self.path_plugins))
|
if self.path_plugins:
|
||||||
|
sys.path.append(os.path.join(os.getcwd(), self.path_plugins))
|
||||||
self.migratePlugins()
|
self.migratePlugins()
|
||||||
|
|
||||||
if config.debug: # Auto reload Plugins on file change
|
if config.debug: # Auto reload Plugins on file change
|
||||||
|
@ -127,6 +130,8 @@ class PluginManager:
|
||||||
def loadPlugins(self):
|
def loadPlugins(self):
|
||||||
all_loaded = True
|
all_loaded = True
|
||||||
s = time.time()
|
s = time.time()
|
||||||
|
if self.path_plugins is None:
|
||||||
|
return
|
||||||
for plugin in self.listPlugins():
|
for plugin in self.listPlugins():
|
||||||
self.log.debug("Loading plugin: %s (%s)" % (plugin["name"], plugin["source"]))
|
self.log.debug("Loading plugin: %s (%s)" % (plugin["name"], plugin["source"]))
|
||||||
if plugin["source"] != "builtin":
|
if plugin["source"] != "builtin":
|
||||||
|
|
|
@ -143,7 +143,7 @@ class Site(object):
|
||||||
|
|
||||||
# Next size limit based on current size
|
# Next size limit based on current size
|
||||||
def getNextSizeLimit(self):
|
def getNextSizeLimit(self):
|
||||||
size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000]
|
size_limits = [25, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000]
|
||||||
size = self.settings.get("size", 0)
|
size = self.settings.get("size", 0)
|
||||||
for size_limit in size_limits:
|
for size_limit in size_limits:
|
||||||
if size * 1.2 < size_limit * 1024 * 1024:
|
if size * 1.2 < size_limit * 1024 * 1024:
|
||||||
|
|
|
@ -463,7 +463,8 @@ class SiteStorage(object):
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||||
except Exception as err:
|
except Exception as _err:
|
||||||
|
err = _err
|
||||||
ok = False
|
ok = False
|
||||||
|
|
||||||
if not ok:
|
if not ok:
|
||||||
|
|
|
@ -749,7 +749,10 @@ class UiRequest(object):
|
||||||
|
|
||||||
def replaceHtmlVariables(self, block, path_parts):
|
def replaceHtmlVariables(self, block, path_parts):
|
||||||
user = self.getCurrentUser()
|
user = self.getCurrentUser()
|
||||||
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light"))
|
if user and user.settings:
|
||||||
|
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light"))
|
||||||
|
else:
|
||||||
|
themeclass = "theme-light"
|
||||||
block = block.replace(b"{themeclass}", themeclass.encode("utf8"))
|
block = block.replace(b"{themeclass}", themeclass.encode("utf8"))
|
||||||
|
|
||||||
if path_parts:
|
if path_parts:
|
||||||
|
|
|
@ -327,7 +327,10 @@ class UiWebsocket(object):
|
||||||
|
|
||||||
def actionAs(self, to, address, cmd, params=[]):
|
def actionAs(self, to, address, cmd, params=[]):
|
||||||
if not self.hasSitePermission(address, cmd=cmd):
|
if not self.hasSitePermission(address, cmd=cmd):
|
||||||
|
#TODO! Return this as error ?
|
||||||
return self.response(to, "No permission for site %s" % address)
|
return self.response(to, "No permission for site %s" % address)
|
||||||
|
if not self.server.sites.get(address):
|
||||||
|
return self.response(to, {"error": "Site Does Not Exist: %s" % address})
|
||||||
req_self = copy.copy(self)
|
req_self = copy.copy(self)
|
||||||
req_self.site = self.server.sites.get(address)
|
req_self.site = self.server.sites.get(address)
|
||||||
req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site
|
req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site
|
||||||
|
|
|
@ -254,8 +254,9 @@ class Actions(object):
|
||||||
file_correct = site.content_manager.verifyFile(
|
file_correct = site.content_manager.verifyFile(
|
||||||
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
|
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
|
||||||
)
|
)
|
||||||
except Exception as err:
|
except Exception as exp:
|
||||||
file_correct = False
|
file_correct = False
|
||||||
|
err = exp
|
||||||
|
|
||||||
if file_correct is True:
|
if file_correct is True:
|
||||||
logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s))
|
logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s))
|
||||||
|
|
142
trackers.txt
Normal file
142
trackers.txt
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
udp://tracker.opentrackr.org:1337/announce
|
||||||
|
udp://explodie.org:6969/announce
|
||||||
|
udp://open.stealth.si:80/announce
|
||||||
|
http://tracker.ipv6tracker.ru:80/announce
|
||||||
|
udp://tracker.birkenwald.de:6969/announce
|
||||||
|
udp://tracker.moeking.me:6969/announce
|
||||||
|
http://tracker.bt4g.com:2095/announce
|
||||||
|
https://tracker.nanoha.org:443/announce
|
||||||
|
http://tracker.files.fm:6969/announce
|
||||||
|
http://open.acgnxtracker.com:80/announce
|
||||||
|
udp://tracker.army:6969/announce
|
||||||
|
udp://fe.dealclub.de:6969/announce
|
||||||
|
udp://tracker.leech.ie:1337/announce
|
||||||
|
udp://tracker.altrosky.nl:6969/announce
|
||||||
|
https://tracker.cyber-hub.net:443/announce
|
||||||
|
https://tracker.lilithraws.cf:443/announce
|
||||||
|
http://bt.okmp3.ru:2710/announce
|
||||||
|
udp://vibe.sleepyinternetfun.xyz:1738/announce
|
||||||
|
udp://open.publictracker.xyz:6969/announce
|
||||||
|
udp://tracker.bitsearch.to:1337/announce
|
||||||
|
udp://tracker.pomf.se:80/announce
|
||||||
|
https://tr.burnabyhighstar.com:443/announce
|
||||||
|
https://tr.abiir.top:443/announce
|
||||||
|
udp://open.free-tracker.ga:6969/announce
|
||||||
|
http://i-p-v-6.tk:6969/announce
|
||||||
|
http://open-v6.demonoid.ch:6969/announce
|
||||||
|
udp://aarsen.me:6969/announce
|
||||||
|
udp://htz3.noho.st:6969/announce
|
||||||
|
udp://uploads.gamecoast.net:6969/announce
|
||||||
|
udp://mail.zasaonsk.ga:6969/announce
|
||||||
|
udp://tracker.joybomb.tw:6969/announce
|
||||||
|
udp://tracker.jonaslsa.com:6969/announce
|
||||||
|
udp://leefafa.tk:6969/announce
|
||||||
|
udp://carr.codes:6969/announce
|
||||||
|
https://tr.fuckbitcoin.xyz:443/announce
|
||||||
|
udp://tracker.cubonegro.xyz:6969/announce
|
||||||
|
udp://tracker.skynetcloud.site:6969/announce
|
||||||
|
http://tracker4.itzmx.com:2710/announce
|
||||||
|
https://tracker.lilithraws.org:443/announce
|
||||||
|
udp://tracker.novaopcj.eu.org:6969/announce
|
||||||
|
udp://exodus.desync.com:6969/announce
|
||||||
|
http://t.acg.rip:6699/announce
|
||||||
|
udp://tracker2.dler.com:80/announce
|
||||||
|
udp://6ahddutb1ucc3cp.ru:6969/announce
|
||||||
|
udp://tracker.blacksparrowmedia.net:6969/announce
|
||||||
|
http://fxtt.ru:80/announce
|
||||||
|
udp://tracker.auctor.tv:6969/announce
|
||||||
|
udp://torrentclub.space:6969/announce
|
||||||
|
udp://zecircle.xyz:6969/announce
|
||||||
|
udp://psyco.fr:6969/announce
|
||||||
|
udp://fh2.cmp-gaming.com:6969/announce
|
||||||
|
udp://new-line.net:6969/announce
|
||||||
|
udp://torrents.artixlinux.org:6969/announce
|
||||||
|
udp://bt.ktrackers.com:6666/announce
|
||||||
|
udp://static.54.161.216.95.clients.your-server.de:6969/announce
|
||||||
|
udp://cpe-104-34-3-152.socal.res.rr.com:6969/announce
|
||||||
|
http://t.overflow.biz:6969/announce
|
||||||
|
udp://tracker1.myporn.club:9337/announce
|
||||||
|
udp://moonburrow.club:6969/announce
|
||||||
|
udp://tracker.artixlinux.org:6969/announce
|
||||||
|
https://t1.hloli.org:443/announce
|
||||||
|
udp://bt1.archive.org:6969/announce
|
||||||
|
udp://tracker.theoks.net:6969/announce
|
||||||
|
udp://tracker.4.babico.name.tr:3131/announce
|
||||||
|
udp://buddyfly.top:6969/announce
|
||||||
|
udp://ipv6.tracker.harry.lu:80/announce
|
||||||
|
udp://public.publictracker.xyz:6969/announce
|
||||||
|
udp://mail.artixlinux.org:6969/announce
|
||||||
|
udp://v1046920.hosted-by-vdsina.ru:6969/announce
|
||||||
|
udp://tracker.cyberia.is:6969/announce
|
||||||
|
udp://tracker.beeimg.com:6969/announce
|
||||||
|
udp://creative.7o7.cx:6969/announce
|
||||||
|
udp://open.dstud.io:6969/announce
|
||||||
|
udp://laze.cc:6969/announce
|
||||||
|
udp://download.nerocloud.me:6969/announce
|
||||||
|
udp://cutscloud.duckdns.org:6969/announce
|
||||||
|
https://tracker.jiesen.life:8443/announce
|
||||||
|
udp://jutone.com:6969/announce
|
||||||
|
udp://wepzone.net:6969/announce
|
||||||
|
udp://ipv4.tracker.harry.lu:80/announce
|
||||||
|
udp://tracker.tcp.exchange:6969/announce
|
||||||
|
udp://f1sh.de:6969/announce
|
||||||
|
udp://movies.zsw.ca:6969/announce
|
||||||
|
https://tracker1.ctix.cn:443/announce
|
||||||
|
udp://sanincode.com:6969/announce
|
||||||
|
udp://www.torrent.eu.org:451/announce
|
||||||
|
udp://open.4ever.tk:6969/announce
|
||||||
|
https://tracker2.ctix.cn:443/announce
|
||||||
|
udp://bt2.archive.org:6969/announce
|
||||||
|
http://t.nyaatracker.com:80/announce
|
||||||
|
udp://yahor.ftp.sh:6969/announce
|
||||||
|
udp://tracker.openbtba.com:6969/announce
|
||||||
|
udp://tracker.dler.com:6969/announce
|
||||||
|
udp://tracker-udp.gbitt.info:80/announce
|
||||||
|
udp://tracker.srv00.com:6969/announce
|
||||||
|
udp://tracker.pimpmyworld.to:6969/announce
|
||||||
|
http://tracker.gbitt.info:80/announce
|
||||||
|
udp://tracker6.lelux.fi:6969/announce
|
||||||
|
http://tracker.vrpnet.org:6969/announce
|
||||||
|
http://00.xxtor.com:443/announce
|
||||||
|
http://vps02.net.orel.ru:80/announce
|
||||||
|
udp://tracker.yangxiaoguozi.cn:6969/announce
|
||||||
|
udp://rep-art.ynh.fr:6969/announce
|
||||||
|
https://tracker.imgoingto.icu:443/announce
|
||||||
|
udp://mirror.aptus.co.tz:6969/announce
|
||||||
|
udp://tracker.lelux.fi:6969/announce
|
||||||
|
udp://tracker.torrent.eu.org:451/announce
|
||||||
|
udp://admin.52ywp.com:6969/announce
|
||||||
|
udp://thouvenin.cloud:6969/announce
|
||||||
|
http://vps-dd0a0715.vps.ovh.net:6969/announce
|
||||||
|
udp://bubu.mapfactor.com:6969/announce
|
||||||
|
udp://94-227-232-84.access.telenet.be:6969/announce
|
||||||
|
udp://epider.me:6969/announce
|
||||||
|
udp://camera.lei001.com:6969/announce
|
||||||
|
udp://tamas3.ynh.fr:6969/announce
|
||||||
|
https://tracker.tamersunion.org:443/announce
|
||||||
|
udp://ftp.pet:2710/announce
|
||||||
|
udp://p4p.arenabg.com:1337/announce
|
||||||
|
http://tracker.mywaifu.best:6969/announce
|
||||||
|
udp://tracker.monitorit4.me:6969/announce
|
||||||
|
udp://ipv6.tracker.monitorit4.me:6969/announce
|
||||||
|
zero://k5w77dozo3hy5zualyhni6vrh73iwfkaofa64abbilwyhhd3wgenbjqd.onion:15441
|
||||||
|
zero://2kcb2fqesyaevc4lntogupa4mkdssth2ypfwczd2ov5a3zo6ytwwbayd.onion:15441
|
||||||
|
zero://5vczpwawviukvd7grfhsfxp7a6huz77hlis4fstjkym5kmf4pu7i7myd.onion:15441
|
||||||
|
zero://pn4q2zzt2pw4nk7yidxvsxmydko7dfibuzxdswi6gu6ninjpofvqs2id.onion:15441
|
||||||
|
zero://6i54dd5th73oelv636ivix6sjnwfgk2qsltnyvswagwphub375t3xcad.onion:15441
|
||||||
|
zero://tl74auz4tyqv4bieeclmyoe4uwtoc2dj7fdqv4nc4gl5j2bwg2r26bqd.onion:15441
|
||||||
|
zero://wlxav3szbrdhest4j7dib2vgbrd7uj7u7rnuzg22cxbih7yxyg2hsmid.onion:15441
|
||||||
|
zero://zy7wttvjtsijt5uwmlar4yguvjc2gppzbdj4v6bujng6xwjmkdg7uvqd.onion:15441
|
||||||
|
zero://rlcjomszyitxpwv7kzopmqgzk3bdpsxeull4c3s6goszkk6h2sotfoad.onion:15441
|
||||||
|
zero://gugt43coc5tkyrhrc3esf6t6aeycvcqzw7qafxrjpqbwt4ssz5czgzyd.onion:15441
|
||||||
|
zero://ow7in4ftwsix5klcbdfqvfqjvimqshbm2o75rhtpdnsderrcbx74wbad.onion:15441
|
||||||
|
zero://57hzgtu62yzxqgbvgxs7g3lfck3za4zrda7qkskar3tlak5recxcebyd.onion:15445
|
||||||
|
zero://hb6ozikfiaafeuqvgseiik4r46szbpjfu66l67wjinnyv6dtopuwhtqd.onion:15445
|
||||||
|
zero://qn65si4gtcwdiliq7vzrwu62qrweoxb6tx2cchwslaervj6szuje66qd.onion:26117
|
||||||
|
zero://s3j2s5pjdfesbsmaqx6alsumaxxdxibmhv4eukmqpv3vqj6f627qx5yd.onion:15441
|
||||||
|
zero://agufghdtniyfwty3wk55drxxwj2zxgzzo7dbrtje73gmvcpxy4ngs4ad.onion:15441
|
||||||
|
zero://kgsvasoakvj4gnjiy7zemu34l3hq46dn5eauqkn76jpowmilci5t2vqd.onion:15445
|
||||||
|
zero://dslesoe72bdfwfu4cfqa2wpd4hr3fhlu4zv6mfsjju5xlpmssouv36qd.onion:15441
|
||||||
|
zero://f2hnjbggc3c2u2apvxdugirnk6bral54ibdoul3hhvu7pd4fso5fq3yd.onion:15441
|
||||||
|
zero://skdeywpgm5xncpxbbr4cuiip6ey4dkambpanog6nruvmef4f3e7o47qd.onion:15441
|
||||||
|
zero://tqmo2nffqo4qc5jgmz3me5eri3zpgf3v2zciufzmhnvznjve5c3argad.onion:15441
|
Loading…
Reference in a new issue