Compare commits
2 commits
py3-latest
...
multi-sig
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b2bd6d7cf5 | ||
![]() |
7e475ac5aa |
16 changed files with 69 additions and 225 deletions
|
@ -1,40 +0,0 @@
|
||||||
name: Build Docker Image on Commit
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
tags:
|
|
||||||
- '!' # Exclude tags
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-publish:
|
|
||||||
runs-on: docker-builder
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set REPO_VARS
|
|
||||||
id: repo-url
|
|
||||||
run: |
|
|
||||||
echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV
|
|
||||||
echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Login to OCI registry
|
|
||||||
run: |
|
|
||||||
echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin
|
|
||||||
|
|
||||||
- name: Build and push Docker images
|
|
||||||
run: |
|
|
||||||
# Build Docker image with commit SHA
|
|
||||||
docker build -t $REPO_HOST/$REPO_PATH:${{ github.sha }} .
|
|
||||||
docker push $REPO_HOST/$REPO_PATH:${{ github.sha }}
|
|
||||||
|
|
||||||
# Build Docker image with nightly tag
|
|
||||||
docker tag $REPO_HOST/$REPO_PATH:${{ github.sha }} $REPO_HOST/$REPO_PATH:nightly
|
|
||||||
docker push $REPO_HOST/$REPO_PATH:nightly
|
|
||||||
|
|
||||||
# Remove local images to save storage
|
|
||||||
docker rmi $REPO_HOST/$REPO_PATH:${{ github.sha }}
|
|
||||||
docker rmi $REPO_HOST/$REPO_PATH:nightly
|
|
|
@ -1,37 +0,0 @@
|
||||||
name: Build and Publish Docker Image on Tag
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- '*'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-publish:
|
|
||||||
runs-on: docker-builder
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set REPO_VARS
|
|
||||||
id: repo-url
|
|
||||||
run: |
|
|
||||||
echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV
|
|
||||||
echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Login to OCI registry
|
|
||||||
run: |
|
|
||||||
echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
run: |
|
|
||||||
TAG=${{ github.ref_name }} # Get the tag name from the context
|
|
||||||
# Build and push multi-platform Docker images
|
|
||||||
docker build -t $REPO_HOST/$REPO_PATH:$TAG --push .
|
|
||||||
# Tag and push latest
|
|
||||||
docker tag $REPO_HOST/$REPO_PATH:$TAG $REPO_HOST/$REPO_PATH:latest
|
|
||||||
docker push $REPO_HOST/$REPO_PATH:latest
|
|
||||||
|
|
||||||
# Remove the local image to save storage
|
|
||||||
docker rmi $REPO_HOST/$REPO_PATH:$TAG
|
|
||||||
docker rmi $REPO_HOST/$REPO_PATH:latest
|
|
7
.github/workflows/tests.yml
vendored
7
.github/workflows/tests.yml
vendored
|
@ -4,17 +4,18 @@ on: [push, pull_request]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
|
runs-on: ubuntu-18.04
|
||||||
strategy:
|
strategy:
|
||||||
max-parallel: 16
|
max-parallel: 16
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.7", "3.8", "3.9"]
|
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout ZeroNet
|
- name: Checkout ZeroNet
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
submodules: "true"
|
submodules: 'true'
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v1
|
uses: actions/setup-python@v1
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -7,7 +7,6 @@ __pycache__/
|
||||||
|
|
||||||
# Hidden files
|
# Hidden files
|
||||||
.*
|
.*
|
||||||
!/.forgejo
|
|
||||||
!/.github
|
!/.github
|
||||||
!/.gitignore
|
!/.gitignore
|
||||||
!/.travis.yml
|
!/.travis.yml
|
||||||
|
|
29
CHANGELOG.md
29
CHANGELOG.md
|
@ -1,32 +1,3 @@
|
||||||
### ZeroNet 0.9.0 (2023-07-12) Rev4630
|
|
||||||
- Fix RDos Issue in Plugins https://github.com/ZeroNetX/ZeroNet-Plugins/pull/9
|
|
||||||
- Add trackers to Config.py for failsafety incase missing trackers.txt
|
|
||||||
- Added Proxy links
|
|
||||||
- Fix pysha3 dep installation issue
|
|
||||||
- FileRequest -> Remove Unnecessary check, Fix error wording
|
|
||||||
- Fix Response when site is missing for `actionAs`
|
|
||||||
|
|
||||||
|
|
||||||
### ZeroNet 0.8.5 (2023-02-12) Rev4625
|
|
||||||
- Fix(https://github.com/ZeroNetX/ZeroNet/pull/202) for SSL cert gen failed on Windows.
|
|
||||||
- default theme-class for missing value in `users.json`.
|
|
||||||
- Fetch Stats Plugin changes.
|
|
||||||
|
|
||||||
### ZeroNet 0.8.4 (2022-12-12) Rev4620
|
|
||||||
- Increase Minimum Site size to 25MB.
|
|
||||||
|
|
||||||
### ZeroNet 0.8.3 (2022-12-11) Rev4611
|
|
||||||
- main.py -> Fix accessing unassigned varible
|
|
||||||
- ContentManager -> Support for multiSig
|
|
||||||
- SiteStrorage.py -> Fix accessing unassigned varible
|
|
||||||
- ContentManager.py Improve Logging of Valid Signers
|
|
||||||
|
|
||||||
### ZeroNet 0.8.2 (2022-11-01) Rev4610
|
|
||||||
- Fix Startup Error when plugins dir missing
|
|
||||||
- Move trackers to seperate file & Add more trackers
|
|
||||||
- Config:: Skip loading missing tracker files
|
|
||||||
- Added documentation for getRandomPort fn
|
|
||||||
|
|
||||||
### ZeroNet 0.8.1 (2022-10-01) Rev4600
|
### ZeroNet 0.8.1 (2022-10-01) Rev4600
|
||||||
- fix readdress loop (cherry-pick previously added commit from conservancy)
|
- fix readdress loop (cherry-pick previously added commit from conservancy)
|
||||||
- Remove Patreon badge
|
- Remove Patreon badge
|
||||||
|
|
18
README.md
18
README.md
|
@ -99,24 +99,6 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/
|
||||||
#### Docker
|
#### Docker
|
||||||
There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/
|
There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/
|
||||||
|
|
||||||
### Online Proxies
|
|
||||||
Proxies are like seed boxes for sites(i.e ZNX runs on a cloud vps), you can try zeronet experience from proxies. Add your proxy below if you have one.
|
|
||||||
|
|
||||||
#### Official ZNX Proxy :
|
|
||||||
|
|
||||||
https://proxy.zeronet.dev/
|
|
||||||
|
|
||||||
https://zeronet.dev/
|
|
||||||
|
|
||||||
#### From Community
|
|
||||||
|
|
||||||
https://0net-preview.com/
|
|
||||||
|
|
||||||
https://portal.ngnoid.tv/
|
|
||||||
|
|
||||||
https://zeronet.ipfsscan.io/
|
|
||||||
|
|
||||||
|
|
||||||
### Install from source
|
### Install from source
|
||||||
|
|
||||||
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip`
|
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip`
|
||||||
|
|
2
plugins
2
plugins
|
@ -1 +1 @@
|
||||||
Subproject commit 689d9309f73371f4681191b125ec3f2e14075eeb
|
Subproject commit 859ffbf43335796e225525ff01257711485088d9
|
|
@ -3,7 +3,7 @@ greenlet==0.4.16; python_version <= "3.6"
|
||||||
gevent>=20.9.0; python_version >= "3.7"
|
gevent>=20.9.0; python_version >= "3.7"
|
||||||
msgpack>=0.4.4
|
msgpack>=0.4.4
|
||||||
base58
|
base58
|
||||||
merkletools @ git+https://github.com/ZeroNetX/pymerkletools.git@dev
|
merkletools
|
||||||
rsa
|
rsa
|
||||||
PySocks>=1.6.8
|
PySocks>=1.6.8
|
||||||
pyasn1
|
pyasn1
|
||||||
|
|
|
@ -13,8 +13,8 @@ import time
|
||||||
class Config(object):
|
class Config(object):
|
||||||
|
|
||||||
def __init__(self, argv):
|
def __init__(self, argv):
|
||||||
self.version = "0.9.0"
|
self.version = "0.8.2"
|
||||||
self.rev = 4630
|
self.rev = 4610
|
||||||
self.argv = argv
|
self.argv = argv
|
||||||
self.action = None
|
self.action = None
|
||||||
self.test_parser = None
|
self.test_parser = None
|
||||||
|
@ -81,14 +81,7 @@ class Config(object):
|
||||||
def createArguments(self):
|
def createArguments(self):
|
||||||
from Crypt import CryptHash
|
from Crypt import CryptHash
|
||||||
access_key_default = CryptHash.random(24, "base64") # Used to allow restrited plugins when multiuser plugin is enabled
|
access_key_default = CryptHash.random(24, "base64") # Used to allow restrited plugins when multiuser plugin is enabled
|
||||||
trackers = [
|
trackers = []
|
||||||
"http://open.acgnxtracker.com:80/announce", # DE
|
|
||||||
"http://tracker.bt4g.com:2095/announce", # Cloudflare
|
|
||||||
"http://tracker.files.fm:6969/announce",
|
|
||||||
"http://t.publictracker.xyz:6969/announce",
|
|
||||||
"https://tracker.lilithraws.cf:443/announce",
|
|
||||||
"https://tracker.babico.name.tr:443/announce",
|
|
||||||
]
|
|
||||||
# Platform specific
|
# Platform specific
|
||||||
if sys.platform.startswith("win"):
|
if sys.platform.startswith("win"):
|
||||||
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
|
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
|
||||||
|
@ -251,7 +244,7 @@ class Config(object):
|
||||||
self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key')
|
self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key')
|
||||||
self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source')
|
self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source')
|
||||||
|
|
||||||
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=25, type=int, metavar='limit')
|
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit')
|
||||||
self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit')
|
self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit')
|
||||||
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
|
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
|
||||||
self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit')
|
self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit')
|
||||||
|
|
|
@ -727,6 +727,7 @@ class ContentManager(object):
|
||||||
elif "files_optional" in new_content:
|
elif "files_optional" in new_content:
|
||||||
del new_content["files_optional"]
|
del new_content["files_optional"]
|
||||||
|
|
||||||
|
new_content["modified"] = 1669187325 # This is just for prototyping this needs to be replaced with real value #int(time.time()) # Add timestamp
|
||||||
if inner_path == "content.json":
|
if inner_path == "content.json":
|
||||||
new_content["zeronet_version"] = config.version
|
new_content["zeronet_version"] = config.version
|
||||||
new_content["signs_required"] = content.get("signs_required", 1)
|
new_content["signs_required"] = content.get("signs_required", 1)
|
||||||
|
@ -746,11 +747,9 @@ class ContentManager(object):
|
||||||
)
|
)
|
||||||
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
|
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
|
||||||
|
|
||||||
signs_required = 1
|
|
||||||
if inner_path == "content.json" and privatekey_address == self.site.address:
|
if inner_path == "content.json" and privatekey_address == self.site.address:
|
||||||
# If signing using the root key, then sign the valid signers
|
# If signing using the root key, then sign the valid signers
|
||||||
signs_required = new_content["signs_required"]
|
signers_data = "%s:%s" % (new_content["signs_required"], ",".join(valid_signers))
|
||||||
signers_data = "%s:%s" % (signs_required, ",".join(valid_signers))
|
|
||||||
new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey)
|
new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey)
|
||||||
if not new_content["signers_sign"]:
|
if not new_content["signers_sign"]:
|
||||||
self.log.info("Old style address, signers_sign is none")
|
self.log.info("Old style address, signers_sign is none")
|
||||||
|
@ -766,19 +765,6 @@ class ContentManager(object):
|
||||||
if "sign" in new_content:
|
if "sign" in new_content:
|
||||||
del(new_content["sign"]) # Delete old sign (backward compatibility)
|
del(new_content["sign"]) # Delete old sign (backward compatibility)
|
||||||
|
|
||||||
if signs_required > 1:
|
|
||||||
has_valid_sign = False
|
|
||||||
sign_content = json.dumps(new_content, sort_keys=True)
|
|
||||||
for signer in valid_signers:
|
|
||||||
res = CryptBitcoin.verify(sign_content,signer,old_signs_content[signer]);
|
|
||||||
print(res)
|
|
||||||
if res:
|
|
||||||
has_valid_sign = has_valid_sign or res
|
|
||||||
if has_valid_sign:
|
|
||||||
new_content["modified"] = content["modified"]
|
|
||||||
sign_content = json.dumps(new_content, sort_keys=True)
|
|
||||||
else:
|
|
||||||
new_content["modified"] = int(time.time()) # Add timestamp
|
|
||||||
sign_content = json.dumps(new_content, sort_keys=True)
|
sign_content = json.dumps(new_content, sort_keys=True)
|
||||||
sign = CryptBitcoin.sign(sign_content, privatekey)
|
sign = CryptBitcoin.sign(sign_content, privatekey)
|
||||||
# new_content["signs"] = content.get("signs", {}) # TODO: Multisig
|
# new_content["signs"] = content.get("signs", {}) # TODO: Multisig
|
||||||
|
@ -1008,16 +994,14 @@ class ContentManager(object):
|
||||||
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
|
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
|
||||||
raise VerifyError("Invalid cert!")
|
raise VerifyError("Invalid cert!")
|
||||||
|
|
||||||
valid_signs = []
|
valid_signs = 0
|
||||||
for address in valid_signers:
|
for address in valid_signers:
|
||||||
if address in signs:
|
if address in signs:
|
||||||
result = CryptBitcoin.verify(sign_content, address, signs[address])
|
valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
|
||||||
if result:
|
if valid_signs >= signs_required:
|
||||||
valid_signs.append(address)
|
|
||||||
if len(valid_signs) >= signs_required:
|
|
||||||
break # Break if we has enough signs
|
break # Break if we has enough signs
|
||||||
if len(valid_signs) < signs_required:
|
if valid_signs < signs_required:
|
||||||
raise VerifyError("Valid signs: %s/%s, Valid Signers : %s" % (len(valid_signs), signs_required, valid_signs))
|
raise VerifyError("Valid signs: %s/%s" % (valid_signs, signs_required))
|
||||||
else:
|
else:
|
||||||
return self.verifyContent(inner_path, new_content)
|
return self.verifyContent(inner_path, new_content)
|
||||||
else: # Old style signing
|
else: # Old style signing
|
||||||
|
|
|
@ -127,10 +127,6 @@ class CryptConnectionManager:
|
||||||
"/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA"
|
"/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA"
|
||||||
]
|
]
|
||||||
self.openssl_env['CN'] = random.choice(self.fakedomains)
|
self.openssl_env['CN'] = random.choice(self.fakedomains)
|
||||||
environ = os.environ
|
|
||||||
environ['OPENSSL_CONF'] = self.openssl_env['OPENSSL_CONF']
|
|
||||||
environ['RANDFILE'] = self.openssl_env['RANDFILE']
|
|
||||||
environ['CN'] = self.openssl_env['CN']
|
|
||||||
|
|
||||||
if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
|
if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
|
||||||
self.createSslContexts()
|
self.createSslContexts()
|
||||||
|
@ -156,7 +152,7 @@ class CryptConnectionManager:
|
||||||
self.log.debug("Running: %s" % cmd)
|
self.log.debug("Running: %s" % cmd)
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(
|
||||||
cmd, shell=True, stderr=subprocess.STDOUT,
|
cmd, shell=True, stderr=subprocess.STDOUT,
|
||||||
stdout=subprocess.PIPE, env=environ
|
stdout=subprocess.PIPE, env=self.openssl_env
|
||||||
)
|
)
|
||||||
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
@ -179,7 +175,7 @@ class CryptConnectionManager:
|
||||||
self.log.debug("Generating certificate key and signing request...")
|
self.log.debug("Generating certificate key and signing request...")
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(
|
||||||
cmd, shell=True, stderr=subprocess.STDOUT,
|
cmd, shell=True, stderr=subprocess.STDOUT,
|
||||||
stdout=subprocess.PIPE, env=environ
|
stdout=subprocess.PIPE, env=self.openssl_env
|
||||||
)
|
)
|
||||||
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
@ -198,7 +194,7 @@ class CryptConnectionManager:
|
||||||
self.log.debug("Generating RSA cert...")
|
self.log.debug("Generating RSA cert...")
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(
|
||||||
cmd, shell=True, stderr=subprocess.STDOUT,
|
cmd, shell=True, stderr=subprocess.STDOUT,
|
||||||
stdout=subprocess.PIPE, env=environ
|
stdout=subprocess.PIPE, env=self.openssl_env
|
||||||
)
|
)
|
||||||
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
|
||||||
proc.wait()
|
proc.wait()
|
||||||
|
|
|
@ -128,7 +128,7 @@ class FileRequest(object):
|
||||||
body = peer.getFile(site.address, inner_path).read()
|
body = peer.getFile(site.address, inner_path).read()
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
site.log.debug("Can't download updated file %s: %s" % (inner_path, err))
|
site.log.debug("Can't download updated file %s: %s" % (inner_path, err))
|
||||||
self.response({"error": "Invalid File update: Failed to download updated file content"})
|
self.response({"error": "File invalid update: Can't download updaed file"})
|
||||||
self.connection.badAction(5)
|
self.connection.badAction(5)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -165,7 +165,7 @@ class FileRequest(object):
|
||||||
|
|
||||||
site.onFileDone(inner_path) # Trigger filedone
|
site.onFileDone(inner_path) # Trigger filedone
|
||||||
|
|
||||||
# Download every changed file from peer
|
if inner_path.endswith("content.json"): # Download every changed file from peer
|
||||||
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer
|
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer
|
||||||
# On complete publish to other peers
|
# On complete publish to other peers
|
||||||
diffs = params.get("diffs", {})
|
diffs = params.get("diffs", {})
|
||||||
|
@ -177,6 +177,8 @@ class FileRequest(object):
|
||||||
del self.server.files_parsing[file_uri]
|
del self.server.files_parsing[file_uri]
|
||||||
|
|
||||||
gevent.spawn(downloader)
|
gevent.spawn(downloader)
|
||||||
|
else:
|
||||||
|
del self.server.files_parsing[file_uri]
|
||||||
|
|
||||||
self.response({"ok": "Thanks, file %s updated!" % inner_path})
|
self.response({"ok": "Thanks, file %s updated!" % inner_path})
|
||||||
self.connection.goodAction()
|
self.connection.goodAction()
|
||||||
|
|
|
@ -143,7 +143,7 @@ class Site(object):
|
||||||
|
|
||||||
# Next size limit based on current size
|
# Next size limit based on current size
|
||||||
def getNextSizeLimit(self):
|
def getNextSizeLimit(self):
|
||||||
size_limits = [25, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000]
|
size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000]
|
||||||
size = self.settings.get("size", 0)
|
size = self.settings.get("size", 0)
|
||||||
for size_limit in size_limits:
|
for size_limit in size_limits:
|
||||||
if size * 1.2 < size_limit * 1024 * 1024:
|
if size * 1.2 < size_limit * 1024 * 1024:
|
||||||
|
|
|
@ -463,8 +463,7 @@ class SiteStorage(object):
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||||
except Exception as _err:
|
except Exception as err:
|
||||||
err = _err
|
|
||||||
ok = False
|
ok = False
|
||||||
|
|
||||||
if not ok:
|
if not ok:
|
||||||
|
|
|
@ -749,10 +749,7 @@ class UiRequest(object):
|
||||||
|
|
||||||
def replaceHtmlVariables(self, block, path_parts):
|
def replaceHtmlVariables(self, block, path_parts):
|
||||||
user = self.getCurrentUser()
|
user = self.getCurrentUser()
|
||||||
if user and user.settings:
|
|
||||||
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light"))
|
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light"))
|
||||||
else:
|
|
||||||
themeclass = "theme-light"
|
|
||||||
block = block.replace(b"{themeclass}", themeclass.encode("utf8"))
|
block = block.replace(b"{themeclass}", themeclass.encode("utf8"))
|
||||||
|
|
||||||
if path_parts:
|
if path_parts:
|
||||||
|
|
|
@ -327,10 +327,7 @@ class UiWebsocket(object):
|
||||||
|
|
||||||
def actionAs(self, to, address, cmd, params=[]):
|
def actionAs(self, to, address, cmd, params=[]):
|
||||||
if not self.hasSitePermission(address, cmd=cmd):
|
if not self.hasSitePermission(address, cmd=cmd):
|
||||||
#TODO! Return this as error ?
|
|
||||||
return self.response(to, "No permission for site %s" % address)
|
return self.response(to, "No permission for site %s" % address)
|
||||||
if not self.server.sites.get(address):
|
|
||||||
return self.response(to, {"error": "Site Does Not Exist: %s" % address})
|
|
||||||
req_self = copy.copy(self)
|
req_self = copy.copy(self)
|
||||||
req_self.site = self.server.sites.get(address)
|
req_self.site = self.server.sites.get(address)
|
||||||
req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site
|
req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site
|
||||||
|
|
Loading…
Reference in a new issue