Compare commits

..

22 commits

Author SHA1 Message Date
7edbda70f5 apply build template
Some checks failed
Build Docker Image on Commit / build-and-publish (push) Failing after 14s
2025-04-20 15:59:40 -07:00
canewsin
290025958f v0.9.0(4630) 2023-07-12 18:28:32 +05:30
canewsin
25c5658b72 Upgrade GH runner to 20.04 2023-07-12 18:22:16 +05:30
canewsin
2970e3a205 Fetch plugins changes 2023-07-12 01:25:48 +05:30
PramUkesh
866179f6a3
v0.8.6(4626) 2023-07-01 04:27:48 +05:30
PramUkesh
e8cf14bcf5
Add trackers to Config.py for failsafety incase missing trackers..txt file 2023-07-01 04:25:41 +05:30
PramUkesh
fedcf9c1c6
Added Proxy links 2023-07-01 03:21:32 +05:30
PramUkesh
117bcf25d9
Fix pysha3 dep installation issue 2023-07-01 02:56:49 +05:30
canewsin
a429349cd4 FileRequest -> Fix error wording 2023-03-24 02:24:14 +05:30
canewsin
d8e52eaabd FileRequest -> Remove Unnecessary check 2023-03-24 02:23:16 +05:30
canewsin
f2ef6e5d9c Fix Response when site is missing for actionAs 2023-02-24 16:56:10 +05:30
canewsin
dd2bb07cfb v0.8.5(4625) 2023-02-12 00:41:38 +05:30
Seto
06a9d1e0ff Fix openssl error in windows. 2023-02-10 18:51:36 +05:30
canewsin
c354f9e24d Use default theme-class for corrupt users.json file
where settings key is missing etc
Fixes Ui.UiServer Error 500: UiWSGIHandler error
2022-12-25 01:28:16 +05:30
canewsin
77b4297224 Update Stats Plugin 2022-12-25 01:26:53 +05:30
canewsin
edc5310cd2 v0.8.4(4620) 2022-12-11 05:01:55 +05:30
canewsin
99a8409513 Increase Def Min Site Size to 25MB 2022-12-11 04:30:31 +05:30
canewsin
3550a64837 v0.8.3(4611) 2022-12-11 03:21:22 +05:30
canewsin
85ef28e6fb ContentManager.py Improve Logging of Valid Signers 2022-12-11 03:21:22 +05:30
canewsin
1500d9356b SiteStrorage.py -> Fix accessing unassigned varible 2022-12-11 03:21:22 +05:30
canewsin
f1a71770fa ContentManager -> Support for multiSig 2022-12-11 03:21:22 +05:30
canewsin
f79a73cef4 main.py -> Fix accessing unassigned varible 2022-12-11 00:51:23 +05:30
16 changed files with 225 additions and 69 deletions

View file

@ -0,0 +1,40 @@
name: Build Docker Image on Commit
on:
push:
branches:
- main
tags:
- '!' # Exclude tags
jobs:
build-and-publish:
runs-on: docker-builder
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set REPO_VARS
id: repo-url
run: |
echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV
echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV
- name: Login to OCI registry
run: |
echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin
- name: Build and push Docker images
run: |
# Build Docker image with commit SHA
docker build -t $REPO_HOST/$REPO_PATH:${{ github.sha }} .
docker push $REPO_HOST/$REPO_PATH:${{ github.sha }}
# Build Docker image with nightly tag
docker tag $REPO_HOST/$REPO_PATH:${{ github.sha }} $REPO_HOST/$REPO_PATH:nightly
docker push $REPO_HOST/$REPO_PATH:nightly
# Remove local images to save storage
docker rmi $REPO_HOST/$REPO_PATH:${{ github.sha }}
docker rmi $REPO_HOST/$REPO_PATH:nightly

View file

@ -0,0 +1,37 @@
name: Build and Publish Docker Image on Tag
on:
push:
tags:
- '*'
jobs:
build-and-publish:
runs-on: docker-builder
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set REPO_VARS
id: repo-url
run: |
echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV
echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV
- name: Login to OCI registry
run: |
echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin
- name: Build and push Docker image
run: |
TAG=${{ github.ref_name }} # Get the tag name from the context
# Build and push multi-platform Docker images
docker build -t $REPO_HOST/$REPO_PATH:$TAG --push .
# Tag and push latest
docker tag $REPO_HOST/$REPO_PATH:$TAG $REPO_HOST/$REPO_PATH:latest
docker push $REPO_HOST/$REPO_PATH:latest
# Remove the local image to save storage
docker rmi $REPO_HOST/$REPO_PATH:$TAG
docker rmi $REPO_HOST/$REPO_PATH:latest

View file

@ -4,49 +4,48 @@ on: [push, pull_request]
jobs: jobs:
test: test:
runs-on: ubuntu-20.04
runs-on: ubuntu-18.04
strategy: strategy:
max-parallel: 16 max-parallel: 16
matrix: matrix:
python-version: [3.6, 3.7, 3.8, 3.9] python-version: ["3.7", "3.8", "3.9"]
steps: steps:
- name: Checkout ZeroNet - name: Checkout ZeroNet
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
submodules: 'true' submodules: "true"
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1 uses: actions/setup-python@v1
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Prepare for installation - name: Prepare for installation
run: | run: |
python3 -m pip install setuptools python3 -m pip install setuptools
python3 -m pip install --upgrade pip wheel python3 -m pip install --upgrade pip wheel
python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium
- name: Install - name: Install
run: | run: |
python3 -m pip install --upgrade -r requirements.txt python3 -m pip install --upgrade -r requirements.txt
python3 -m pip list python3 -m pip list
- name: Prepare for tests - name: Prepare for tests
run: | run: |
openssl version -a openssl version -a
echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6 echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6
- name: Test - name: Test
run: | run: |
catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini
export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test
export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test
export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test
export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test
export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test
export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test
find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')"
flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/

1
.gitignore vendored
View file

@ -7,6 +7,7 @@ __pycache__/
# Hidden files # Hidden files
.* .*
!/.forgejo
!/.github !/.github
!/.gitignore !/.gitignore
!/.travis.yml !/.travis.yml

View file

@ -1,3 +1,32 @@
### ZeroNet 0.9.0 (2023-07-12) Rev4630
- Fix RDos Issue in Plugins https://github.com/ZeroNetX/ZeroNet-Plugins/pull/9
- Add trackers to Config.py for failsafety incase missing trackers.txt
- Added Proxy links
- Fix pysha3 dep installation issue
- FileRequest -> Remove Unnecessary check, Fix error wording
- Fix Response when site is missing for `actionAs`
### ZeroNet 0.8.5 (2023-02-12) Rev4625
- Fix(https://github.com/ZeroNetX/ZeroNet/pull/202) for SSL cert gen failed on Windows.
- default theme-class for missing value in `users.json`.
- Fetch Stats Plugin changes.
### ZeroNet 0.8.4 (2022-12-12) Rev4620
- Increase Minimum Site size to 25MB.
### ZeroNet 0.8.3 (2022-12-11) Rev4611
- main.py -> Fix accessing unassigned varible
- ContentManager -> Support for multiSig
- SiteStrorage.py -> Fix accessing unassigned varible
- ContentManager.py Improve Logging of Valid Signers
### ZeroNet 0.8.2 (2022-11-01) Rev4610
- Fix Startup Error when plugins dir missing
- Move trackers to seperate file & Add more trackers
- Config:: Skip loading missing tracker files
- Added documentation for getRandomPort fn
### ZeroNet 0.8.1 (2022-10-01) Rev4600 ### ZeroNet 0.8.1 (2022-10-01) Rev4600
- fix readdress loop (cherry-pick previously added commit from conservancy) - fix readdress loop (cherry-pick previously added commit from conservancy)
- Remove Patreon badge - Remove Patreon badge

View file

@ -99,6 +99,24 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/
#### Docker #### Docker
There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/ There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/
### Online Proxies
Proxies are like seed boxes for sites(i.e ZNX runs on a cloud vps), you can try zeronet experience from proxies. Add your proxy below if you have one.
#### Official ZNX Proxy :
https://proxy.zeronet.dev/
https://zeronet.dev/
#### From Community
https://0net-preview.com/
https://portal.ngnoid.tv/
https://zeronet.ipfsscan.io/
### Install from source ### Install from source
- `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip` - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip`

@ -1 +1 @@
Subproject commit 859ffbf43335796e225525ff01257711485088d9 Subproject commit 689d9309f73371f4681191b125ec3f2e14075eeb

View file

@ -3,7 +3,7 @@ greenlet==0.4.16; python_version <= "3.6"
gevent>=20.9.0; python_version >= "3.7" gevent>=20.9.0; python_version >= "3.7"
msgpack>=0.4.4 msgpack>=0.4.4
base58 base58
merkletools merkletools @ git+https://github.com/ZeroNetX/pymerkletools.git@dev
rsa rsa
PySocks>=1.6.8 PySocks>=1.6.8
pyasn1 pyasn1

View file

@ -13,8 +13,8 @@ import time
class Config(object): class Config(object):
def __init__(self, argv): def __init__(self, argv):
self.version = "0.8.2" self.version = "0.9.0"
self.rev = 4610 self.rev = 4630
self.argv = argv self.argv = argv
self.action = None self.action = None
self.test_parser = None self.test_parser = None
@ -81,7 +81,14 @@ class Config(object):
def createArguments(self): def createArguments(self):
from Crypt import CryptHash from Crypt import CryptHash
access_key_default = CryptHash.random(24, "base64") # Used to allow restrited plugins when multiuser plugin is enabled access_key_default = CryptHash.random(24, "base64") # Used to allow restrited plugins when multiuser plugin is enabled
trackers = [] trackers = [
"http://open.acgnxtracker.com:80/announce", # DE
"http://tracker.bt4g.com:2095/announce", # Cloudflare
"http://tracker.files.fm:6969/announce",
"http://t.publictracker.xyz:6969/announce",
"https://tracker.lilithraws.cf:443/announce",
"https://tracker.babico.name.tr:443/announce",
]
# Platform specific # Platform specific
if sys.platform.startswith("win"): if sys.platform.startswith("win"):
coffeescript = "type %s | tools\\coffee\\coffee.cmd" coffeescript = "type %s | tools\\coffee\\coffee.cmd"
@ -244,7 +251,7 @@ class Config(object):
self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key') self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key')
self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source') self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source')
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit') self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=25, type=int, metavar='limit')
self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit') self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit')
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit') self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit') self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit')

View file

@ -727,7 +727,6 @@ class ContentManager(object):
elif "files_optional" in new_content: elif "files_optional" in new_content:
del new_content["files_optional"] del new_content["files_optional"]
new_content["modified"] = 1669187325 # This is just for prototyping this needs to be replaced with real value #int(time.time()) # Add timestamp
if inner_path == "content.json": if inner_path == "content.json":
new_content["zeronet_version"] = config.version new_content["zeronet_version"] = config.version
new_content["signs_required"] = content.get("signs_required", 1) new_content["signs_required"] = content.get("signs_required", 1)
@ -747,9 +746,11 @@ class ContentManager(object):
) )
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers)) self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
signs_required = 1
if inner_path == "content.json" and privatekey_address == self.site.address: if inner_path == "content.json" and privatekey_address == self.site.address:
# If signing using the root key, then sign the valid signers # If signing using the root key, then sign the valid signers
signers_data = "%s:%s" % (new_content["signs_required"], ",".join(valid_signers)) signs_required = new_content["signs_required"]
signers_data = "%s:%s" % (signs_required, ",".join(valid_signers))
new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey) new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey)
if not new_content["signers_sign"]: if not new_content["signers_sign"]:
self.log.info("Old style address, signers_sign is none") self.log.info("Old style address, signers_sign is none")
@ -765,7 +766,20 @@ class ContentManager(object):
if "sign" in new_content: if "sign" in new_content:
del(new_content["sign"]) # Delete old sign (backward compatibility) del(new_content["sign"]) # Delete old sign (backward compatibility)
sign_content = json.dumps(new_content, sort_keys=True) if signs_required > 1:
has_valid_sign = False
sign_content = json.dumps(new_content, sort_keys=True)
for signer in valid_signers:
res = CryptBitcoin.verify(sign_content,signer,old_signs_content[signer]);
print(res)
if res:
has_valid_sign = has_valid_sign or res
if has_valid_sign:
new_content["modified"] = content["modified"]
sign_content = json.dumps(new_content, sort_keys=True)
else:
new_content["modified"] = int(time.time()) # Add timestamp
sign_content = json.dumps(new_content, sort_keys=True)
sign = CryptBitcoin.sign(sign_content, privatekey) sign = CryptBitcoin.sign(sign_content, privatekey)
# new_content["signs"] = content.get("signs", {}) # TODO: Multisig # new_content["signs"] = content.get("signs", {}) # TODO: Multisig
if sign: # If signing is successful (not an old address) if sign: # If signing is successful (not an old address)
@ -994,14 +1008,16 @@ class ContentManager(object):
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
raise VerifyError("Invalid cert!") raise VerifyError("Invalid cert!")
valid_signs = 0 valid_signs = []
for address in valid_signers: for address in valid_signers:
if address in signs: if address in signs:
valid_signs += CryptBitcoin.verify(sign_content, address, signs[address]) result = CryptBitcoin.verify(sign_content, address, signs[address])
if valid_signs >= signs_required: if result:
valid_signs.append(address)
if len(valid_signs) >= signs_required:
break # Break if we has enough signs break # Break if we has enough signs
if valid_signs < signs_required: if len(valid_signs) < signs_required:
raise VerifyError("Valid signs: %s/%s" % (valid_signs, signs_required)) raise VerifyError("Valid signs: %s/%s, Valid Signers : %s" % (len(valid_signs), signs_required, valid_signs))
else: else:
return self.verifyContent(inner_path, new_content) return self.verifyContent(inner_path, new_content)
else: # Old style signing else: # Old style signing

View file

@ -127,6 +127,10 @@ class CryptConnectionManager:
"/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA" "/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA"
] ]
self.openssl_env['CN'] = random.choice(self.fakedomains) self.openssl_env['CN'] = random.choice(self.fakedomains)
environ = os.environ
environ['OPENSSL_CONF'] = self.openssl_env['OPENSSL_CONF']
environ['RANDFILE'] = self.openssl_env['RANDFILE']
environ['CN'] = self.openssl_env['CN']
if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
self.createSslContexts() self.createSslContexts()
@ -152,7 +156,7 @@ class CryptConnectionManager:
self.log.debug("Running: %s" % cmd) self.log.debug("Running: %s" % cmd)
proc = subprocess.Popen( proc = subprocess.Popen(
cmd, shell=True, stderr=subprocess.STDOUT, cmd, shell=True, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, env=self.openssl_env stdout=subprocess.PIPE, env=environ
) )
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
proc.wait() proc.wait()
@ -175,7 +179,7 @@ class CryptConnectionManager:
self.log.debug("Generating certificate key and signing request...") self.log.debug("Generating certificate key and signing request...")
proc = subprocess.Popen( proc = subprocess.Popen(
cmd, shell=True, stderr=subprocess.STDOUT, cmd, shell=True, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, env=self.openssl_env stdout=subprocess.PIPE, env=environ
) )
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
proc.wait() proc.wait()
@ -194,7 +198,7 @@ class CryptConnectionManager:
self.log.debug("Generating RSA cert...") self.log.debug("Generating RSA cert...")
proc = subprocess.Popen( proc = subprocess.Popen(
cmd, shell=True, stderr=subprocess.STDOUT, cmd, shell=True, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, env=self.openssl_env stdout=subprocess.PIPE, env=environ
) )
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
proc.wait() proc.wait()

View file

@ -128,7 +128,7 @@ class FileRequest(object):
body = peer.getFile(site.address, inner_path).read() body = peer.getFile(site.address, inner_path).read()
except Exception as err: except Exception as err:
site.log.debug("Can't download updated file %s: %s" % (inner_path, err)) site.log.debug("Can't download updated file %s: %s" % (inner_path, err))
self.response({"error": "File invalid update: Can't download updaed file"}) self.response({"error": "Invalid File update: Failed to download updated file content"})
self.connection.badAction(5) self.connection.badAction(5)
return return
@ -165,21 +165,19 @@ class FileRequest(object):
site.onFileDone(inner_path) # Trigger filedone site.onFileDone(inner_path) # Trigger filedone
if inner_path.endswith("content.json"): # Download every changed file from peer # Download every changed file from peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer
# On complete publish to other peers # On complete publish to other peers
diffs = params.get("diffs", {}) diffs = params.get("diffs", {})
site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=6), "publish_%s" % inner_path) site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=6), "publish_%s" % inner_path)
# Load new content file and download changed files in new thread # Load new content file and download changed files in new thread
def downloader(): def downloader():
site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {})) site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {}))
del self.server.files_parsing[file_uri]
gevent.spawn(downloader)
else:
del self.server.files_parsing[file_uri] del self.server.files_parsing[file_uri]
gevent.spawn(downloader)
self.response({"ok": "Thanks, file %s updated!" % inner_path}) self.response({"ok": "Thanks, file %s updated!" % inner_path})
self.connection.goodAction() self.connection.goodAction()

View file

@ -143,7 +143,7 @@ class Site(object):
# Next size limit based on current size # Next size limit based on current size
def getNextSizeLimit(self): def getNextSizeLimit(self):
size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000] size_limits = [25, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000]
size = self.settings.get("size", 0) size = self.settings.get("size", 0)
for size_limit in size_limits: for size_limit in size_limits:
if size * 1.2 < size_limit * 1024 * 1024: if size * 1.2 < size_limit * 1024 * 1024:

View file

@ -463,7 +463,8 @@ class SiteStorage(object):
else: else:
try: try:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
except Exception as err: except Exception as _err:
err = _err
ok = False ok = False
if not ok: if not ok:

View file

@ -749,7 +749,10 @@ class UiRequest(object):
def replaceHtmlVariables(self, block, path_parts): def replaceHtmlVariables(self, block, path_parts):
user = self.getCurrentUser() user = self.getCurrentUser()
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) if user and user.settings:
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light"))
else:
themeclass = "theme-light"
block = block.replace(b"{themeclass}", themeclass.encode("utf8")) block = block.replace(b"{themeclass}", themeclass.encode("utf8"))
if path_parts: if path_parts:

View file

@ -327,7 +327,10 @@ class UiWebsocket(object):
def actionAs(self, to, address, cmd, params=[]): def actionAs(self, to, address, cmd, params=[]):
if not self.hasSitePermission(address, cmd=cmd): if not self.hasSitePermission(address, cmd=cmd):
#TODO! Return this as error ?
return self.response(to, "No permission for site %s" % address) return self.response(to, "No permission for site %s" % address)
if not self.server.sites.get(address):
return self.response(to, {"error": "Site Does Not Exist: %s" % address})
req_self = copy.copy(self) req_self = copy.copy(self)
req_self.site = self.server.sites.get(address) req_self.site = self.server.sites.get(address)
req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site